1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-FrameProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23 
24 #include "common/CameraDeviceBase.h"
25 #include "api1/Camera2Client.h"
26 #include "api1/client2/FrameProcessor.h"
27 
28 namespace android {
29 namespace camera2 {
30 
FrameProcessor(wp<CameraDeviceBase> device,sp<Camera2Client> client)31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32                                sp<Camera2Client> client) :
33     FrameProcessorBase(device),
34     mClient(client),
35     mLastFrameNumberOfFaces(0),
36     mLast3AFrameNumber(-1) {
37 
38     sp<CameraDeviceBase> d = device.promote();
39     mSynthesize3ANotify = !(d->willNotify3A());
40 
41     {
42         SharedParameters::Lock l(client->getParameters());
43 
44         if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
45             mUsePartialResult = (mNumPartialResults > 1);
46         } else {
47             mUsePartialResult = l.mParameters.quirks.partialResults;
48         }
49 
50         // Initialize starting 3A state
51         m3aState.afTriggerId = l.mParameters.afTriggerCounter;
52         m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
53         // Check if lens is fixed-focus
54         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
55             m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
56         } else {
57             m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
58         }
59         m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
60         m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
61         m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
62         m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
63     }
64 }
65 
~FrameProcessor()66 FrameProcessor::~FrameProcessor() {
67 }
68 
processSingleFrame(CaptureResult & frame,const sp<CameraDeviceBase> & device)69 bool FrameProcessor::processSingleFrame(CaptureResult &frame,
70                                         const sp<CameraDeviceBase> &device) {
71 
72     sp<Camera2Client> client = mClient.promote();
73     if (!client.get()) {
74         return false;
75     }
76 
77     bool isPartialResult = false;
78     if (mUsePartialResult) {
79         if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
80             isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
81         } else {
82             camera_metadata_entry_t entry;
83             entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
84             if (entry.count > 0 &&
85                     entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
86                 isPartialResult = true;
87             }
88         }
89     }
90 
91     if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
92         return false;
93     }
94 
95     if (mSynthesize3ANotify) {
96         process3aState(frame, client);
97     }
98 
99     return FrameProcessorBase::processSingleFrame(frame, device);
100 }
101 
processFaceDetect(const CameraMetadata & frame,const sp<Camera2Client> & client)102 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
103         const sp<Camera2Client> &client) {
104     status_t res = BAD_VALUE;
105     ATRACE_CALL();
106     camera_metadata_ro_entry_t entry;
107     bool enableFaceDetect;
108 
109     {
110         SharedParameters::Lock l(client->getParameters());
111         enableFaceDetect = l.mParameters.enableFaceDetect;
112     }
113     entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
114 
115     // TODO: This should be an error once implementations are compliant
116     if (entry.count == 0) {
117         return OK;
118     }
119 
120     uint8_t faceDetectMode = entry.data.u8[0];
121 
122     camera_frame_metadata metadata;
123     Vector<camera_face_t> faces;
124     metadata.number_of_faces = 0;
125 
126     if (enableFaceDetect &&
127         faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
128 
129         SharedParameters::Lock l(client->getParameters());
130         entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
131         if (entry.count == 0) {
132             // No faces this frame
133             /* warning: locks SharedCameraCallbacks */
134             callbackFaceDetection(client, metadata);
135             return OK;
136         }
137         metadata.number_of_faces = entry.count / 4;
138         if (metadata.number_of_faces >
139                 l.mParameters.fastInfo.maxFaces) {
140             ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
141                     __FUNCTION__, client->getCameraId(),
142                     metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
143             return res;
144         }
145         const int32_t *faceRects = entry.data.i32;
146 
147         entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
148         if (entry.count == 0) {
149             ALOGE("%s: Camera %d: Unable to read face scores",
150                     __FUNCTION__, client->getCameraId());
151             return res;
152         }
153         const uint8_t *faceScores = entry.data.u8;
154 
155         const int32_t *faceLandmarks = NULL;
156         const int32_t *faceIds = NULL;
157 
158         if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
159             entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
160             if (entry.count == 0) {
161                 ALOGE("%s: Camera %d: Unable to read face landmarks",
162                         __FUNCTION__, client->getCameraId());
163                 return res;
164             }
165             faceLandmarks = entry.data.i32;
166 
167             entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
168 
169             if (entry.count == 0) {
170                 ALOGE("%s: Camera %d: Unable to read face IDs",
171                         __FUNCTION__, client->getCameraId());
172                 return res;
173             }
174             faceIds = entry.data.i32;
175         }
176 
177         entry = frame.find(ANDROID_SCALER_CROP_REGION);
178         if (entry.count < 4) {
179             ALOGE("%s: Camera %d: Unable to read crop region (count = %zu)",
180                     __FUNCTION__, client->getCameraId(), entry.count);
181             return res;
182         }
183 
184         Parameters::CropRegion scalerCrop = {
185             static_cast<float>(entry.data.i32[0]),
186             static_cast<float>(entry.data.i32[1]),
187             static_cast<float>(entry.data.i32[2]),
188             static_cast<float>(entry.data.i32[3])};
189 
190         faces.setCapacity(metadata.number_of_faces);
191 
192         size_t maxFaces = metadata.number_of_faces;
193         for (size_t i = 0; i < maxFaces; i++) {
194             if (faceScores[i] == 0) {
195                 metadata.number_of_faces--;
196                 continue;
197             }
198             if (faceScores[i] > 100) {
199                 ALOGW("%s: Face index %zu with out of range score %d",
200                         __FUNCTION__, i, faceScores[i]);
201             }
202 
203             camera_face_t face;
204 
205             face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
206                                 faceRects[i*4 + 0], scalerCrop);
207             face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
208                                 faceRects[i*4 + 1], scalerCrop);
209             face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
210                                 faceRects[i*4 + 2], scalerCrop);
211             face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
212                                 faceRects[i*4 + 3], scalerCrop);
213 
214             face.score = faceScores[i];
215             if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
216                 face.id = faceIds[i];
217                 face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
218                         faceLandmarks[i*6 + 0], scalerCrop);
219                 face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
220                         faceLandmarks[i*6 + 1], scalerCrop);
221                 face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
222                         faceLandmarks[i*6 + 2], scalerCrop);
223                 face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
224                         faceLandmarks[i*6 + 3], scalerCrop);
225                 face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
226                         faceLandmarks[i*6 + 4], scalerCrop);
227                 face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
228                         faceLandmarks[i*6 + 5], scalerCrop);
229             } else {
230                 face.id = 0;
231                 face.left_eye[0] = face.left_eye[1] = -2000;
232                 face.right_eye[0] = face.right_eye[1] = -2000;
233                 face.mouth[0] = face.mouth[1] = -2000;
234             }
235             faces.push_back(face);
236         }
237 
238         metadata.faces = faces.editArray();
239     }
240 
241     /* warning: locks SharedCameraCallbacks */
242     callbackFaceDetection(client, metadata);
243 
244     return OK;
245 }
246 
process3aState(const CaptureResult & frame,const sp<Camera2Client> & client)247 status_t FrameProcessor::process3aState(const CaptureResult &frame,
248         const sp<Camera2Client> &client) {
249 
250     ATRACE_CALL();
251     const CameraMetadata &metadata = frame.mMetadata;
252     camera_metadata_ro_entry_t entry;
253     int cameraId = client->getCameraId();
254 
255     entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
256     int32_t frameNumber = entry.data.i32[0];
257 
258     // Don't send 3A notifications for the same frame number twice
259     if (frameNumber <= mLast3AFrameNumber) {
260         ALOGV("%s: Already sent 3A for frame number %d, skipping",
261                 __FUNCTION__, frameNumber);
262 
263         // Remove the entry if there is one for this frame number in mPending3AStates.
264         mPending3AStates.removeItem(frameNumber);
265         return OK;
266     }
267 
268     AlgState pendingState;
269 
270     ssize_t index = mPending3AStates.indexOfKey(frameNumber);
271     if (index != NAME_NOT_FOUND) {
272         pendingState = mPending3AStates.valueAt(index);
273     }
274 
275     // Update 3A states from the result.
276     bool gotAllStates = true;
277 
278     // TODO: Also use AE mode, AE trigger ID
279     gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
280             &pendingState.afMode, frameNumber, cameraId);
281 
282     gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
283             &pendingState.awbMode, frameNumber, cameraId);
284 
285     gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
286             &pendingState.aeState, frameNumber, cameraId);
287 
288     gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
289             &pendingState.afState, frameNumber, cameraId);
290 
291     gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
292             &pendingState.awbState, frameNumber, cameraId);
293 
294     if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
295         pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
296         pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
297     } else {
298         gotAllStates &= updatePendingState<int32_t>(metadata,
299                 ANDROID_CONTROL_AF_TRIGGER_ID, &pendingState.afTriggerId, frameNumber, cameraId);
300 
301         gotAllStates &= updatePendingState<int32_t>(metadata,
302             ANDROID_CONTROL_AE_PRECAPTURE_ID, &pendingState.aeTriggerId, frameNumber, cameraId);
303     }
304 
305     if (!gotAllStates) {
306         // If not all states are received, put the pending state to mPending3AStates.
307         if (index == NAME_NOT_FOUND) {
308             mPending3AStates.add(frameNumber, pendingState);
309         } else {
310             mPending3AStates.replaceValueAt(index, pendingState);
311         }
312         return NOT_ENOUGH_DATA;
313     }
314 
315     // Once all 3A states are received, notify the client about 3A changes.
316     if (pendingState.aeState != m3aState.aeState) {
317         ALOGV("%s: Camera %d: AE state %d->%d",
318                 __FUNCTION__, cameraId,
319                 m3aState.aeState, pendingState.aeState);
320         client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
321     }
322 
323     if (pendingState.afState != m3aState.afState ||
324         pendingState.afMode != m3aState.afMode ||
325         pendingState.afTriggerId != m3aState.afTriggerId) {
326         ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
327                 __FUNCTION__, cameraId,
328                 m3aState.afState, pendingState.afState,
329                 m3aState.afMode, pendingState.afMode,
330                 m3aState.afTriggerId, pendingState.afTriggerId);
331         client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
332     }
333     if (pendingState.awbState != m3aState.awbState ||
334         pendingState.awbMode != m3aState.awbMode) {
335         ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
336                 __FUNCTION__, cameraId,
337                 m3aState.awbState, pendingState.awbState,
338                 m3aState.awbMode, pendingState.awbMode);
339         client->notifyAutoWhitebalance(pendingState.awbState,
340                 pendingState.aeTriggerId);
341     }
342 
343     if (index != NAME_NOT_FOUND) {
344         mPending3AStates.removeItemsAt(index);
345     }
346 
347     m3aState = pendingState;
348     mLast3AFrameNumber = frameNumber;
349 
350     return OK;
351 }
352 
353 template<typename Src, typename T>
updatePendingState(const CameraMetadata & result,int32_t tag,T * value,int32_t frameNumber,int cameraId)354 bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
355         T* value, int32_t frameNumber, int cameraId) {
356     camera_metadata_ro_entry_t entry;
357     if (value == NULL) {
358         ALOGE("%s: Camera %d: Value to write to is NULL",
359                 __FUNCTION__, cameraId);
360         return false;
361     }
362 
363     // Already got the value for this tag.
364     if (*value != static_cast<T>(NOT_SET)) {
365         return true;
366     }
367 
368     entry = result.find(tag);
369     if (entry.count == 0) {
370         ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
371                 __FUNCTION__, cameraId,
372                 get_camera_metadata_tag_name(tag), frameNumber);
373         return false;
374     } else {
375         switch(sizeof(Src)){
376             case sizeof(uint8_t):
377                 *value = static_cast<T>(entry.data.u8[0]);
378                 break;
379             case sizeof(int32_t):
380                 *value = static_cast<T>(entry.data.i32[0]);
381                 break;
382             default:
383                 ALOGE("%s: Camera %d: Unsupported source",
384                         __FUNCTION__, cameraId);
385                 return false;
386         }
387     }
388     return true;
389 }
390 
391 
callbackFaceDetection(sp<Camera2Client> client,const camera_frame_metadata & metadata)392 void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
393                                      const camera_frame_metadata &metadata) {
394 
395     camera_frame_metadata *metadata_ptr =
396         const_cast<camera_frame_metadata*>(&metadata);
397 
398     /**
399      * Filter out repeated 0-face callbacks,
400      * but not when the last frame was >0
401      */
402     if (metadata.number_of_faces != 0 ||
403         mLastFrameNumberOfFaces != metadata.number_of_faces) {
404 
405         Camera2Client::SharedCameraCallbacks::Lock
406             l(client->mSharedCameraCallbacks);
407         if (l.mRemoteCallback != NULL) {
408             l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
409                                             NULL,
410                                             metadata_ptr);
411         }
412     }
413 
414     mLastFrameNumberOfFaces = metadata.number_of_faces;
415 }
416 
417 }; // namespace camera2
418 }; // namespace android
419