1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-OutputUtils"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0  // Per-frame verbose logging
21 
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27 
28 // Convenience macros for transitioning to the error state
29 #define SET_ERR(fmt, ...) states.setErrIntf.setErrorState(   \
30     "%s: " fmt, __FUNCTION__,                         \
31     ##__VA_ARGS__)
32 
33 #include <inttypes.h>
34 
35 #include <utils/Log.h>
36 #include <utils/SortedVector.h>
37 #include <utils/Trace.h>
38 
39 #include <android/hardware/ICameraService.h>
40 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
41 
42 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
43 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
44 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
45 
46 #include <camera/CameraUtils.h>
47 #include <camera/StringUtils.h>
48 #include <camera_metadata_hidden.h>
49 #include <com_android_internal_camera_flags.h>
50 
51 #include "device3/Camera3OutputUtils.h"
52 #include "utils/SessionConfigurationUtils.h"
53 
54 #include "system/camera_metadata.h"
55 
56 using namespace android::camera3;
57 using namespace android::camera3::SessionConfigurationUtils;
58 using namespace android::hardware::camera;
59 namespace flags = com::android::internal::camera::flags;
60 
61 namespace android {
62 namespace camera3 {
63 
fixupMonochromeTags(CaptureOutputStates & states,const CameraMetadata & deviceInfo,CameraMetadata & resultMetadata)64 status_t fixupMonochromeTags(
65         CaptureOutputStates& states,
66         const CameraMetadata& deviceInfo,
67         CameraMetadata& resultMetadata) {
68     status_t res = OK;
69     if (!states.needFixupMonoChrome) {
70         return res;
71     }
72 
73     // Remove tags that are not applicable to monochrome camera.
74     int32_t tagsToRemove[] = {
75            ANDROID_SENSOR_GREEN_SPLIT,
76            ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
77            ANDROID_COLOR_CORRECTION_MODE,
78            ANDROID_COLOR_CORRECTION_TRANSFORM,
79            ANDROID_COLOR_CORRECTION_GAINS,
80     };
81     for (auto tag : tagsToRemove) {
82         res = resultMetadata.erase(tag);
83         if (res != OK) {
84             ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
85             return res;
86         }
87     }
88 
89     // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
90     camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
91     for (size_t i = 1; i < blEntry.count; i++) {
92         blEntry.data.f[i] = blEntry.data.f[0];
93     }
94 
95     // ANDROID_SENSOR_NOISE_PROFILE
96     camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
97     if (npEntry.count > 0 && npEntry.count % 2 == 0) {
98         double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
99         res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
100         if (res != OK) {
101              ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
102                     __FUNCTION__, strerror(-res), res);
103             return res;
104         }
105     }
106 
107     // ANDROID_STATISTICS_LENS_SHADING_MAP
108     camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
109     camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
110     if (lsSizeEntry.count == 2 && lsEntry.count > 0
111             && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
112         for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
113             lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
114             lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
115             lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
116         }
117     }
118 
119     // ANDROID_TONEMAP_CURVE_BLUE
120     // ANDROID_TONEMAP_CURVE_GREEN
121     // ANDROID_TONEMAP_CURVE_RED
122     camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
123     camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
124     camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
125     if (tcbEntry.count > 0
126             && tcbEntry.count == tcgEntry.count
127             && tcbEntry.count == tcrEntry.count) {
128         for (size_t i = 0; i < tcbEntry.count; i++) {
129             tcbEntry.data.f[i] = tcrEntry.data.f[i];
130             tcgEntry.data.f[i] = tcrEntry.data.f[i];
131         }
132     }
133 
134     return res;
135 }
136 
fixupAutoframingTags(CameraMetadata & resultMetadata)137 status_t fixupAutoframingTags(CameraMetadata& resultMetadata) {
138     status_t res = OK;
139     camera_metadata_entry autoframingEntry =
140             resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING);
141     if (autoframingEntry.count == 0) {
142         const uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_OFF;
143         res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING, &defaultAutoframingEntry, 1);
144         if (res != OK) {
145             ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING: %s (%d)",
146                   __FUNCTION__, strerror(-res), res);
147             return res;
148         }
149     }
150 
151     camera_metadata_entry autoframingStateEntry =
152             resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING_STATE);
153     if (autoframingStateEntry.count == 0) {
154         const uint8_t defaultAutoframingStateEntry = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
155         res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING_STATE,
156                                     &defaultAutoframingStateEntry, 1);
157         if (res != OK) {
158             ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING_STATE: %s (%d)",
159                   __FUNCTION__, strerror(-res), res);
160             return res;
161         }
162     }
163 
164     return res;
165 }
166 
fixupManualFlashStrengthControlTags(CameraMetadata & resultMetadata)167 status_t fixupManualFlashStrengthControlTags(CameraMetadata& resultMetadata) {
168     status_t res = OK;
169     camera_metadata_entry strengthLevelEntry =
170             resultMetadata.find(ANDROID_FLASH_STRENGTH_LEVEL);
171     if (strengthLevelEntry.count == 0) {
172         const int32_t defaultStrengthLevelEntry = ANDROID_FLASH_STRENGTH_LEVEL;
173         res = resultMetadata.update(ANDROID_FLASH_STRENGTH_LEVEL, &defaultStrengthLevelEntry, 1);
174         if (res != OK) {
175             ALOGE("%s: Failed to update ANDROID_FLASH_STRENGTH_LEVEL: %s (%d)",
176                   __FUNCTION__, strerror(-res), res);
177             return res;
178         }
179     }
180 
181     return res;
182 }
183 
correctMeteringRegions(camera_metadata_t * meta)184 void correctMeteringRegions(camera_metadata_t *meta) {
185     if (meta == nullptr) return;
186 
187     uint32_t meteringRegionKeys[] = {
188             ANDROID_CONTROL_AE_REGIONS,
189             ANDROID_CONTROL_AWB_REGIONS,
190             ANDROID_CONTROL_AF_REGIONS };
191 
192     for (uint32_t key : meteringRegionKeys) {
193         camera_metadata_entry_t entry;
194         int res = find_camera_metadata_entry(meta, key, &entry);
195         if (res != OK) continue;
196 
197         for (size_t i = 0; i < entry.count; i += 5) {
198             if (entry.data.i32[0] > entry.data.i32[2]) {
199                 ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
200                         __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
201                 entry.data.i32[2] = entry.data.i32[0];
202             }
203             if (entry.data.i32[1] > entry.data.i32[3]) {
204                 ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
205                         __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
206                 entry.data.i32[3] = entry.data.i32[1];
207             }
208         }
209     }
210 }
211 
insertResultLocked(CaptureOutputStates & states,CaptureResult * result,uint32_t frameNumber)212 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
213     if (result == nullptr) return;
214 
215     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
216             result->mMetadata.getAndLock());
217     set_camera_metadata_vendor_id(meta, states.vendorTagId);
218     correctMeteringRegions(meta);
219     result->mMetadata.unlock(meta);
220 
221     if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
222             (int32_t*)&frameNumber, 1) != OK) {
223         SET_ERR("Failed to set frame number %d in metadata", frameNumber);
224         return;
225     }
226 
227     if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
228         SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
229         return;
230     }
231 
232     // Update vendor tag id for physical metadata
233     for (auto& physicalMetadata : result->mPhysicalMetadatas) {
234         camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
235                 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
236         set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
237         correctMeteringRegions(pmeta);
238         physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
239     }
240 
241     // Valid result, insert into queue
242     std::list<CaptureResult>::iterator queuedResult =
243             states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
244     ALOGV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
245            ", burstId = %" PRId32, __FUNCTION__,
246            queuedResult->mResultExtras.requestId,
247            queuedResult->mResultExtras.frameNumber,
248            queuedResult->mResultExtras.burstId);
249 
250     states.resultSignal.notify_one();
251 }
252 
253 
sendPartialCaptureResult(CaptureOutputStates & states,const camera_metadata_t * partialResult,const CaptureResultExtras & resultExtras,uint32_t frameNumber)254 void sendPartialCaptureResult(CaptureOutputStates& states,
255         const camera_metadata_t * partialResult,
256         const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
257     ATRACE_CALL();
258     std::lock_guard<std::mutex> l(states.outputLock);
259 
260     CaptureResult captureResult;
261     captureResult.mResultExtras = resultExtras;
262     captureResult.mMetadata = partialResult;
263 
264     // Fix up result metadata for monochrome camera.
265     status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
266     if (res != OK) {
267         SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
268         return;
269     }
270 
271     // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
272     // and RotationAndCrop mappers.
273     std::set<uint32_t> keysToRemove;
274 
275     auto iter = states.distortionMappers.find(states.cameraId);
276     if (iter != states.distortionMappers.end()) {
277         const auto& remappedKeys = iter->second.getRemappedKeys();
278         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
279     }
280 
281     const auto& remappedKeys = states.zoomRatioMappers[states.cameraId].getRemappedKeys();
282     keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
283 
284     auto mapper = states.rotateAndCropMappers.find(states.cameraId);
285     if (mapper != states.rotateAndCropMappers.end()) {
286         const auto& remappedKeys = mapper->second.getRemappedKeys();
287         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
288     }
289 
290     for (uint32_t key : keysToRemove) {
291         captureResult.mMetadata.erase(key);
292     }
293 
294     // Send partial result
295     if (captureResult.mMetadata.entryCount() > 0) {
296         insertResultLocked(states, &captureResult, frameNumber);
297     }
298 }
299 
sendCaptureResult(CaptureOutputStates & states,CameraMetadata & pendingMetadata,CaptureResultExtras & resultExtras,CameraMetadata & collectedPartialResult,uint32_t frameNumber,bool reprocess,bool zslStillCapture,bool rotateAndCropAuto,const std::set<std::string> & cameraIdsWithZoom,const std::vector<PhysicalCaptureResultInfo> & physicalMetadatas)300 void sendCaptureResult(
301         CaptureOutputStates& states,
302         CameraMetadata &pendingMetadata,
303         CaptureResultExtras &resultExtras,
304         CameraMetadata &collectedPartialResult,
305         uint32_t frameNumber,
306         bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
307         const std::set<std::string>& cameraIdsWithZoom,
308         const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
309     ATRACE_CALL();
310     if (pendingMetadata.isEmpty())
311         return;
312 
313     std::lock_guard<std::mutex> l(states.outputLock);
314 
315     // TODO: need to track errors for tighter bounds on expected frame number
316     if (reprocess) {
317         if (frameNumber < states.nextReprocResultFrameNum) {
318             SET_ERR("Out-of-order reprocess capture result metadata submitted! "
319                 "(got frame number %d, expecting %d)",
320                 frameNumber, states.nextReprocResultFrameNum);
321             return;
322         }
323         states.nextReprocResultFrameNum = frameNumber + 1;
324     } else if (zslStillCapture) {
325         if (frameNumber < states.nextZslResultFrameNum) {
326             SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
327                 "(got frame number %d, expecting %d)",
328                 frameNumber, states.nextZslResultFrameNum);
329             return;
330         }
331         states.nextZslResultFrameNum = frameNumber + 1;
332     } else {
333         if (frameNumber < states.nextResultFrameNum) {
334             SET_ERR("Out-of-order capture result metadata submitted! "
335                     "(got frame number %d, expecting %d)",
336                     frameNumber, states.nextResultFrameNum);
337             return;
338         }
339         states.nextResultFrameNum = frameNumber + 1;
340     }
341 
342     CaptureResult captureResult;
343     captureResult.mResultExtras = resultExtras;
344     captureResult.mMetadata = pendingMetadata;
345     captureResult.mPhysicalMetadatas = physicalMetadatas;
346 
347     // Append any previous partials to form a complete result
348     if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
349         captureResult.mMetadata.append(collectedPartialResult);
350     }
351 
352     captureResult.mMetadata.sort();
353 
354     // Check that there's a timestamp in the result metadata
355     camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
356     if (timestamp.count == 0) {
357         SET_ERR("No timestamp provided by HAL for frame %d!",
358                 frameNumber);
359         return;
360     }
361     nsecs_t sensorTimestamp = timestamp.data.i64[0];
362 
363     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
364         camera_metadata_entry timestamp =
365                 physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
366         if (timestamp.count == 0) {
367             SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
368                     physicalMetadata.mPhysicalCameraId.c_str(), frameNumber);
369             return;
370         }
371     }
372 
373     // Fix up some result metadata to account for HAL-level distortion correction
374     status_t res = OK;
375     auto iter = states.distortionMappers.find(states.cameraId);
376     if (iter != states.distortionMappers.end()) {
377         res = iter->second.correctCaptureResult(&captureResult.mMetadata);
378         if (res != OK) {
379             SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
380                     frameNumber, strerror(-res), res);
381             return;
382         }
383     }
384 
385     // Fix up result metadata to account for zoom ratio availabilities between
386     // HAL and app.
387     bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId) == cameraIdsWithZoom.end();
388     res = states.zoomRatioMappers[states.cameraId].updateCaptureResult(
389             &captureResult.mMetadata, zoomRatioIs1);
390     if (res != OK) {
391         SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
392                 frameNumber, strerror(-res), res);
393         return;
394     }
395 
396     // Fix up result metadata to account for rotateAndCrop in AUTO mode
397     if (rotateAndCropAuto) {
398         auto mapper = states.rotateAndCropMappers.find(states.cameraId);
399         if (mapper != states.rotateAndCropMappers.end()) {
400             res = mapper->second.updateCaptureResult(
401                     &captureResult.mMetadata);
402             if (res != OK) {
403                 SET_ERR("Unable to correct capture result rotate-and-crop for frame %d: %s (%d)",
404                         frameNumber, strerror(-res), res);
405                 return;
406             }
407         }
408     }
409 
410     // Fix up manual flash strength control metadata
411     res = fixupManualFlashStrengthControlTags(captureResult.mMetadata);
412     if (res != OK) {
413         SET_ERR("Failed to set flash strength level defaults in result metadata: %s (%d)",
414                 strerror(-res), res);
415         return;
416     }
417     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
418         res = fixupManualFlashStrengthControlTags(physicalMetadata.mPhysicalCameraMetadata);
419         if (res != OK) {
420             SET_ERR("Failed to set flash strength level defaults in physical result"
421                     " metadata: %s (%d)", strerror(-res), res);
422             return;
423         }
424     }
425 
426     // Fix up autoframing metadata
427     res = fixupAutoframingTags(captureResult.mMetadata);
428     if (res != OK) {
429         SET_ERR("Failed to set autoframing defaults in result metadata: %s (%d)",
430                 strerror(-res), res);
431         return;
432     }
433     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
434         res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
435         if (res != OK) {
436             SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
437                     strerror(-res), res);
438             return;
439         }
440     }
441 
442     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
443         const std::string cameraId = physicalMetadata.mPhysicalCameraId;
444         auto mapper = states.distortionMappers.find(cameraId);
445         if (mapper != states.distortionMappers.end()) {
446             res = mapper->second.correctCaptureResult(
447                     &physicalMetadata.mPhysicalCameraMetadata);
448             if (res != OK) {
449                 SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
450                         frameNumber, strerror(-res), res);
451                 return;
452             }
453         }
454 
455         zoomRatioIs1 = cameraIdsWithZoom.find(cameraId) == cameraIdsWithZoom.end();
456         res = states.zoomRatioMappers[cameraId].updateCaptureResult(
457                 &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
458         if (res != OK) {
459             SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
460                     "frame %d: %s(%d)", cameraId.c_str(), frameNumber, strerror(-res), res);
461             return;
462         }
463     }
464 
465     // Fix up result metadata for monochrome camera.
466     res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
467     if (res != OK) {
468         SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
469         return;
470     }
471     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
472         const std::string &cameraId = physicalMetadata.mPhysicalCameraId;
473         res = fixupMonochromeTags(states,
474                 states.physicalDeviceInfoMap.at(cameraId),
475                 physicalMetadata.mPhysicalCameraMetadata);
476         if (res != OK) {
477             SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
478             return;
479         }
480     }
481 
482     std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
483     for (auto& m : physicalMetadatas) {
484         monitoredPhysicalMetadata.emplace(m.mPhysicalCameraId,
485                 CameraMetadata(m.mPhysicalCameraMetadata));
486     }
487     states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
488             frameNumber, sensorTimestamp, captureResult.mMetadata,
489             monitoredPhysicalMetadata);
490 
491     insertResultLocked(states, &captureResult, frameNumber);
492 }
493 
removeInFlightMapEntryLocked(CaptureOutputStates & states,int idx)494 void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
495     ATRACE_CALL();
496     InFlightRequestMap& inflightMap = states.inflightMap;
497     nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
498     inflightMap.removeItemsAt(idx, 1);
499 
500     states.inflightIntf.onInflightEntryRemovedLocked(duration);
501 }
502 
removeInFlightRequestIfReadyLocked(CaptureOutputStates & states,int idx,std::vector<BufferToReturn> * returnableBuffers)503 void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx,
504         std::vector<BufferToReturn> *returnableBuffers) {
505     InFlightRequestMap& inflightMap = states.inflightMap;
506     const InFlightRequest &request = inflightMap.valueAt(idx);
507     const uint32_t frameNumber = inflightMap.keyAt(idx);
508     SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
509 
510     nsecs_t sensorTimestamp = request.sensorTimestamp;
511     nsecs_t shutterTimestamp = request.shutterTimestamp;
512 
513     // Check if it's okay to remove the request from InFlightMap:
514     // In the case of a successful request:
515     //      all input and output buffers, all result metadata, shutter callback
516     //      arrived.
517     // In the case of an unsuccessful request:
518     //      all input and output buffers, as well as request/result error notifications, arrived.
519     if (request.numBuffersLeft == 0 &&
520             (request.skipResultMetadata ||
521             (request.haveResultMetadata && shutterTimestamp != 0))) {
522         if (request.stillCapture) {
523             ATRACE_ASYNC_END("still capture", frameNumber);
524         }
525 
526         ATRACE_ASYNC_END("frame capture", frameNumber);
527 
528         // Validation check - if sensor timestamp matches shutter timestamp in the
529         // case of request having callback.
530         if (request.hasCallback && request.requestStatus == OK &&
531                 sensorTimestamp != shutterTimestamp) {
532             SET_ERR("sensor timestamp (%" PRId64
533                 ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
534                 sensorTimestamp, frameNumber, shutterTimestamp);
535         }
536 
537         // for an unsuccessful request, it may have pending output buffers to
538         // return.
539         assert(request.requestStatus != OK ||
540                request.pendingOutputBuffers.size() == 0);
541 
542         collectReturnableOutputBuffers(
543             states.useHalBufManager, states.halBufManagedStreamIds,
544             states.listener,
545             request.pendingOutputBuffers.array(),
546             request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
547             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
548             /*out*/ returnableBuffers,
549             /*timestampIncreasing*/true,
550             request.outputSurfaces, request.resultExtras,
551             request.errorBufStrategy, request.transform);
552 
553         // Note down the just completed frame number
554         if (request.hasInputBuffer) {
555             states.lastCompletedReprocessFrameNumber = frameNumber;
556         } else if (request.zslCapture && request.stillCapture) {
557             states.lastCompletedZslFrameNumber = frameNumber;
558         } else {
559             states.lastCompletedRegularFrameNumber = frameNumber;
560         }
561 
562         sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
563 
564         removeInFlightMapEntryLocked(states, idx);
565         ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
566     }
567 
568     states.inflightIntf.checkInflightMapLengthLocked();
569 }
570 
571 // Erase the subset of physicalCameraIds that contains id
erasePhysicalCameraIdSet(std::set<std::set<std::string>> & physicalCameraIds,const std::string & id)572 bool erasePhysicalCameraIdSet(
573         std::set<std::set<std::string>>& physicalCameraIds, const std::string& id) {
574     bool found = false;
575     for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
576         if (iter->count(id) == 1) {
577             physicalCameraIds.erase(iter);
578             found = true;
579             break;
580         }
581     }
582     return found;
583 }
584 
getCameraIdsWithZoomLocked(const InFlightRequestMap & inflightMap,const CameraMetadata & metadata,const std::set<std::string> & cameraIdsWithZoom)585 const std::set<std::string>& getCameraIdsWithZoomLocked(
586         const InFlightRequestMap& inflightMap, const CameraMetadata& metadata,
587         const std::set<std::string>& cameraIdsWithZoom) {
588     camera_metadata_ro_entry overrideEntry =
589             metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
590     camera_metadata_ro_entry frameNumberEntry =
591             metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
592     if (overrideEntry.count != 1
593             || overrideEntry.data.i32[0] != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM
594             || frameNumberEntry.count != 1) {
595         // No valid overriding frame number, skip
596         return cameraIdsWithZoom;
597     }
598 
599     uint32_t overridingFrameNumber = frameNumberEntry.data.i32[0];
600     ssize_t idx = inflightMap.indexOfKey(overridingFrameNumber);
601     if (idx < 0) {
602         ALOGE("%s: Failed to find pending request #%d in inflight map",
603                 __FUNCTION__, overridingFrameNumber);
604         return cameraIdsWithZoom;
605     }
606 
607     const InFlightRequest &r = inflightMap.valueFor(overridingFrameNumber);
608     return r.cameraIdsWithZoom;
609 }
610 
processCaptureResult(CaptureOutputStates & states,const camera_capture_result * result)611 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
612     ATRACE_CALL();
613 
614     status_t res;
615 
616     uint32_t frameNumber = result->frame_number;
617     if (result->result == NULL && result->num_output_buffers == 0 &&
618             result->input_buffer == NULL) {
619         SET_ERR("No result data provided by HAL for frame %d",
620                 frameNumber);
621         return;
622     }
623 
624     if (!states.usePartialResult &&
625             result->result != NULL &&
626             result->partial_result != 1) {
627         SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
628                 " if partial result is not supported",
629                 frameNumber, result->partial_result);
630         return;
631     }
632 
633     bool isPartialResult = false;
634     CameraMetadata collectedPartialResult;
635     bool hasInputBufferInRequest = false;
636 
637     // Get shutter timestamp and resultExtras from list of in-flight requests,
638     // where it was added by the shutter notification for this frame. If the
639     // shutter timestamp isn't received yet, append the output buffers to the
640     // in-flight request and they will be returned when the shutter timestamp
641     // arrives. Update the in-flight status and remove the in-flight entry if
642     // all result data and shutter timestamp have been received.
643     std::vector<BufferToReturn> returnableBuffers{};
644     nsecs_t shutterTimestamp = 0;
645     {
646         std::lock_guard<std::mutex> l(states.inflightLock);
647         ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
648         if (idx == NAME_NOT_FOUND) {
649             SET_ERR("Unknown frame number for capture result: %d",
650                     frameNumber);
651             return;
652         }
653         InFlightRequest &request = states.inflightMap.editValueAt(idx);
654         ALOGVV("%s: got InFlightRequest requestId = %" PRId32
655                 ", frameNumber = %" PRId64 ", burstId = %" PRId32
656                 ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
657                 ", usePartialResult = %d",
658                 __FUNCTION__, request.resultExtras.requestId,
659                 request.resultExtras.frameNumber, request.resultExtras.burstId,
660                 result->partial_result, states.numPartialResults,
661                 request.hasCallback, result->num_output_buffers,
662                 states.usePartialResult);
663         // Always update the partial count to the latest one if it's not 0
664         // (buffers only). When framework aggregates adjacent partial results
665         // into one, the latest partial count will be used.
666         if (result->partial_result != 0)
667             request.resultExtras.partialResultCount = result->partial_result;
668 
669         if (result->result != nullptr) {
670             camera_metadata_ro_entry entry;
671             auto ret = find_camera_metadata_ro_entry(result->result,
672                     ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
673             if ((ret == OK) && (entry.count > 0)) {
674                 std::string physicalId(reinterpret_cast<const char *>(entry.data.u8));
675                 if (!states.activePhysicalId.empty() && physicalId != states.activePhysicalId) {
676                     states.listener->notifyPhysicalCameraChange(physicalId);
677                 }
678                 states.activePhysicalId = physicalId;
679                 using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
680                 if (!states.legacyClient &&
681                         states.rotationOverride == ROTATION_OVERRIDE_NONE) {
682                     auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
683                     if (deviceInfo != states.physicalDeviceInfoMap.end()) {
684                         auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
685                         if (orientation.count > 0) {
686                             int32_t transform;
687                             ret = CameraUtils::getRotationTransform(deviceInfo->second,
688                                     OutputConfiguration::MIRROR_MODE_AUTO, &transform);
689                             if (ret == OK) {
690                                 // It is possible for camera providers to return the capture
691                                 // results after the processed frames. In such scenario, we will
692                                 // not be able to set the output transformation before the frames
693                                 // return back to the consumer for the current capture request
694                                 // but we could still try and configure it for any future requests
695                                 // that are still in flight. The assumption is that the physical
696                                 // device id remains the same for the duration of the pending queue.
697                                 for (size_t i = 0; i < states.inflightMap.size(); i++) {
698                                     auto &r = states.inflightMap.editValueAt(i);
699                                     if (r.requestTimeNs >= request.requestTimeNs) {
700                                         r.transform = transform;
701                                     }
702                                 }
703                             } else {
704                                 ALOGE("%s: Failed to calculate current stream transformation: %s "
705                                         "(%d)", __FUNCTION__, strerror(-ret), ret);
706                             }
707                         } else {
708                             ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
709                         }
710                     } else {
711                         ALOGE("%s: Physical device not found in device info map found!",
712                                 __FUNCTION__);
713                     }
714                 }
715             }
716         }
717 
718         // Check if this result carries only partial metadata
719         if (states.usePartialResult && result->result != NULL) {
720             if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
721                 SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
722                         " the range of [1, %d] when metadata is included in the result",
723                         frameNumber, result->partial_result, states.numPartialResults);
724                 return;
725             }
726             isPartialResult = (result->partial_result < states.numPartialResults);
727             if (isPartialResult && result->num_physcam_metadata) {
728                 SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
729                         " physical camera result", frameNumber);
730                 return;
731             }
732             if (isPartialResult) {
733                 request.collectedPartialResult.append(result->result);
734             }
735 
736             if (isPartialResult && request.hasCallback) {
737                 // Send partial capture result
738                 sendPartialCaptureResult(states, result->result, request.resultExtras,
739                         frameNumber);
740             }
741         }
742 
743         shutterTimestamp = request.shutterTimestamp;
744         hasInputBufferInRequest = request.hasInputBuffer;
745 
746         // Did we get the (final) result metadata for this capture?
747         if (result->result != NULL && !isPartialResult) {
748             if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
749                 SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
750                         request.physicalCameraIds.size(), result->num_physcam_metadata);
751                 return;
752             }
753             if (request.haveResultMetadata) {
754                 SET_ERR("Called multiple times with metadata for frame %d",
755                         frameNumber);
756                 return;
757             }
758             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
759                 const std::string physicalId = result->physcam_ids[i];
760                 bool validPhysicalCameraMetadata =
761                         erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
762                 if (!validPhysicalCameraMetadata) {
763                     SET_ERR("Unexpected total result for frame %d camera %s",
764                             frameNumber, physicalId.c_str());
765                     return;
766                 }
767             }
768             if (states.usePartialResult &&
769                     !request.collectedPartialResult.isEmpty()) {
770                 collectedPartialResult.acquire(
771                     request.collectedPartialResult);
772             }
773             request.haveResultMetadata = true;
774             request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
775         }
776 
777         uint32_t numBuffersReturned = result->num_output_buffers;
778         if (result->input_buffer != NULL) {
779             if (hasInputBufferInRequest) {
780                 numBuffersReturned += 1;
781             } else {
782                 ALOGW("%s: Input buffer should be NULL if there is no input"
783                         " buffer sent in the request",
784                         __FUNCTION__);
785             }
786         }
787         request.numBuffersLeft -= numBuffersReturned;
788         if (request.numBuffersLeft < 0) {
789             SET_ERR("Too many buffers returned for frame %d",
790                     frameNumber);
791             return;
792         }
793 
794         camera_metadata_ro_entry_t entry;
795         res = find_camera_metadata_ro_entry(result->result,
796                 ANDROID_SENSOR_TIMESTAMP, &entry);
797         if (res == OK && entry.count == 1) {
798             request.sensorTimestamp = entry.data.i64[0];
799         }
800 
801         // If shutter event isn't received yet, do not return the pending output
802         // buffers.
803         request.pendingOutputBuffers.appendArray(result->output_buffers,
804                 result->num_output_buffers);
805         if (shutterTimestamp != 0) {
806             collectAndRemovePendingOutputBuffers(
807                 states.useHalBufManager, states.halBufManagedStreamIds,
808                 states.listener,
809                 request, states.sessionStatsBuilder,
810                 /*out*/ &returnableBuffers);
811         }
812 
813         if (result->result != NULL && !isPartialResult) {
814             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
815                 CameraMetadata physicalMetadata;
816                 physicalMetadata.append(result->physcam_metadata[i]);
817                 request.physicalMetadatas.push_back({result->physcam_ids[i],
818                         physicalMetadata});
819             }
820             if (shutterTimestamp == 0) {
821                 request.pendingMetadata = result->result;
822                 request.collectedPartialResult = collectedPartialResult;
823             } else if (request.hasCallback) {
824                 CameraMetadata metadata;
825                 metadata = result->result;
826                 auto cameraIdsWithZoom = getCameraIdsWithZoomLocked(
827                         states.inflightMap, metadata, request.cameraIdsWithZoom);
828                 sendCaptureResult(states, metadata, request.resultExtras,
829                     collectedPartialResult, frameNumber,
830                     hasInputBufferInRequest, request.zslCapture && request.stillCapture,
831                     request.rotateAndCropAuto, cameraIdsWithZoom,
832                     request.physicalMetadatas);
833             }
834         }
835         removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
836         if (!flags::return_buffers_outside_locks()) {
837             finishReturningOutputBuffers(returnableBuffers,
838                 states.listener, states.sessionStatsBuilder);
839         }
840     } // scope for states.inFlightLock
841 
842     if (flags::return_buffers_outside_locks()) {
843         finishReturningOutputBuffers(returnableBuffers,
844                 states.listener, states.sessionStatsBuilder);
845     }
846 
847     if (result->input_buffer != NULL) {
848         if (hasInputBufferInRequest) {
849             Camera3Stream *stream =
850                 Camera3Stream::cast(result->input_buffer->stream);
851             res = stream->returnInputBuffer(*(result->input_buffer));
852             // Note: stream may be deallocated at this point, if this buffer was the
853             // last reference to it.
854             if (res != OK) {
855                 ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
856                       "  its stream:%s (%d)",  __FUNCTION__,
857                       frameNumber, strerror(-res), res);
858             }
859         } else {
860             ALOGW("%s: Input buffer should be NULL if there is no input"
861                     " buffer sent in the request, skipping input buffer return.",
862                     __FUNCTION__);
863         }
864     }
865 }
866 
collectReturnableOutputBuffers(bool useHalBufManager,const std::set<int32_t> & halBufferManagedStreams,sp<NotificationListener> listener,const camera_stream_buffer_t * outputBuffers,size_t numBuffers,nsecs_t timestamp,nsecs_t readoutTimestamp,bool requested,nsecs_t requestTimeNs,SessionStatsBuilder & sessionStatsBuilder,std::vector<BufferToReturn> * returnableBuffers,bool timestampIncreasing,const SurfaceMap & outputSurfaces,const CaptureResultExtras & resultExtras,ERROR_BUF_STRATEGY errorBufStrategy,int32_t transform)867 void collectReturnableOutputBuffers(
868         bool useHalBufManager,
869         const std::set<int32_t> &halBufferManagedStreams,
870         sp<NotificationListener> listener,
871         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
872         nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
873         nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
874         /*out*/ std::vector<BufferToReturn> *returnableBuffers,
875         bool timestampIncreasing, const SurfaceMap& outputSurfaces,
876         const CaptureResultExtras &resultExtras,
877         ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
878     for (size_t i = 0; i < numBuffers; i++)
879     {
880         Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
881         int streamId = stream->getId();
882 
883         // Call notify(ERROR_BUFFER) if necessary.
884         if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
885                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
886             if (listener != nullptr) {
887                 CaptureResultExtras extras = resultExtras;
888                 extras.errorStreamId = streamId;
889                 listener->notifyError(
890                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
891                         extras);
892             }
893         }
894 
895         if (outputBuffers[i].buffer == nullptr) {
896             if (!useHalBufManager &&
897                     !(flags::session_hal_buf_manager() &&
898                             contains(halBufferManagedStreams, streamId))) {
899                 // With HAL buffer management API, HAL sometimes will have to return buffers that
900                 // has not got a output buffer handle filled yet. This is though illegal if HAL
901                 // buffer management API is not being used.
902                 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
903             } else {
904                 if (requested) {
905                     sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
906                 }
907             }
908             continue;
909         }
910 
911         const auto& it = outputSurfaces.find(streamId);
912 
913         // Do not return the buffer if the buffer status is error, and the error
914         // buffer strategy is CACHE.
915         if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
916                 errorBufStrategy != ERROR_BUF_CACHE) {
917             if (it != outputSurfaces.end()) {
918                 returnableBuffers->emplace_back(stream,
919                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
920                         it->second, resultExtras,
921                         transform, requested ? requestTimeNs : 0);
922             } else {
923                 returnableBuffers->emplace_back(stream,
924                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
925                         std::vector<size_t> (), resultExtras,
926                         transform, requested ? requestTimeNs : 0 );
927             }
928         }
929     }
930 }
931 
finishReturningOutputBuffers(const std::vector<BufferToReturn> & returnableBuffers,sp<NotificationListener> listener,SessionStatsBuilder & sessionStatsBuilder)932 void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
933         sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder) {
934     for (auto& b : returnableBuffers) {
935         const int streamId = b.stream->getId();
936 
937         status_t res = b.stream->returnBuffer(b.buffer, b.timestamp,
938                 b.readoutTimestamp, b.timestampIncreasing,
939                 b.surfaceIds, b.resultExtras.frameNumber, b.transform);
940 
941         // Note: stream may be deallocated at this point, if this buffer was
942         // the last reference to it.
943         bool dropped = false;
944         if (res == NO_INIT || res == DEAD_OBJECT) {
945             ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
946             sessionStatsBuilder.stopCounter(streamId);
947         } else if (res != OK) {
948             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
949             dropped = true;
950         } else {
951             if (b.buffer.status == CAMERA_BUFFER_STATUS_ERROR || b.timestamp == 0) {
952                 dropped = true;
953             }
954         }
955         if (b.requestTimeNs > 0) {
956             nsecs_t bufferTimeNs = systemTime();
957             int32_t captureLatencyMs = ns2ms(bufferTimeNs - b.requestTimeNs);
958             sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
959         }
960 
961         // Long processing consumers can cause returnBuffer timeout for shared stream
962         // If that happens, cancel the buffer and send a buffer error to client
963         if (b.surfaceIds.size() > 0 && res == TIMED_OUT &&
964                 b.buffer.status == CAMERA_BUFFER_STATUS_OK) {
965             // cancel the buffer
966             camera_stream_buffer_t sb = b.buffer;
967             sb.status = CAMERA_BUFFER_STATUS_ERROR;
968             b.stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
969                     b.timestampIncreasing, std::vector<size_t> (),
970                     b.resultExtras.frameNumber, b.transform);
971 
972             if (listener != nullptr) {
973                 CaptureResultExtras extras = b.resultExtras;
974                 extras.errorStreamId = streamId;
975                 listener->notifyError(
976                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
977                         extras);
978             }
979         }
980 
981     }
982 }
983 
collectAndRemovePendingOutputBuffers(bool useHalBufManager,const std::set<int32_t> & halBufferManagedStreams,sp<NotificationListener> listener,InFlightRequest & request,SessionStatsBuilder & sessionStatsBuilder,std::vector<BufferToReturn> * returnableBuffers)984 void collectAndRemovePendingOutputBuffers(bool useHalBufManager,
985         const std::set<int32_t> &halBufferManagedStreams,
986         sp<NotificationListener> listener, InFlightRequest& request,
987         SessionStatsBuilder& sessionStatsBuilder,
988         std::vector<BufferToReturn> *returnableBuffers) {
989     bool timestampIncreasing =
990             !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
991     nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
992             request.resultExtras.readoutTimestamp : 0;
993     collectReturnableOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
994             request.pendingOutputBuffers.array(),
995             request.pendingOutputBuffers.size(),
996             request.shutterTimestamp, readoutTimestamp,
997             /*requested*/true, request.requestTimeNs, sessionStatsBuilder,
998             /*out*/ returnableBuffers,
999             timestampIncreasing,
1000             request.outputSurfaces, request.resultExtras,
1001             request.errorBufStrategy, request.transform);
1002 
1003     // Remove error buffers that are not cached.
1004     for (auto iter = request.pendingOutputBuffers.begin();
1005             iter != request.pendingOutputBuffers.end(); ) {
1006         if (request.errorBufStrategy != ERROR_BUF_CACHE ||
1007                 iter->status != CAMERA_BUFFER_STATUS_ERROR) {
1008             iter = request.pendingOutputBuffers.erase(iter);
1009         } else {
1010             iter++;
1011         }
1012     }
1013 }
1014 
notifyShutter(CaptureOutputStates & states,const camera_shutter_msg_t & msg)1015 void notifyShutter(CaptureOutputStates& states, const camera_shutter_msg_t &msg) {
1016     ATRACE_CALL();
1017     ssize_t idx;
1018 
1019     std::vector<BufferToReturn> returnableBuffers{};
1020     CaptureResultExtras pendingNotificationResultExtras{};
1021 
1022     // Set timestamp for the request in the in-flight tracking
1023     // and get the request ID to send upstream
1024     {
1025         std::lock_guard<std::mutex> l(states.inflightLock);
1026         InFlightRequestMap& inflightMap = states.inflightMap;
1027         idx = inflightMap.indexOfKey(msg.frame_number);
1028         if (idx >= 0) {
1029             InFlightRequest &r = inflightMap.editValueAt(idx);
1030 
1031             // Verify ordering of shutter notifications
1032             {
1033                 std::lock_guard<std::mutex> l(states.outputLock);
1034                 // TODO: need to track errors for tighter bounds on expected frame number.
1035                 if (r.hasInputBuffer) {
1036                     if (msg.frame_number < states.nextReprocShutterFrameNum) {
1037                         SET_ERR("Reprocess shutter notification out-of-order. Expected "
1038                                 "notification for frame %d, got frame %d",
1039                                 states.nextReprocShutterFrameNum, msg.frame_number);
1040                         return;
1041                     }
1042                     states.nextReprocShutterFrameNum = msg.frame_number + 1;
1043                 } else if (r.zslCapture && r.stillCapture) {
1044                     if (msg.frame_number < states.nextZslShutterFrameNum) {
1045                         SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
1046                                 "notification for frame %d, got frame %d",
1047                                 states.nextZslShutterFrameNum, msg.frame_number);
1048                         return;
1049                     }
1050                     states.nextZslShutterFrameNum = msg.frame_number + 1;
1051                 } else {
1052                     if (msg.frame_number < states.nextShutterFrameNum) {
1053                         SET_ERR("Shutter notification out-of-order. Expected "
1054                                 "notification for frame %d, got frame %d",
1055                                 states.nextShutterFrameNum, msg.frame_number);
1056                         return;
1057                     }
1058                     states.nextShutterFrameNum = msg.frame_number + 1;
1059                 }
1060             }
1061 
1062             r.shutterTimestamp = msg.timestamp;
1063             if (msg.readout_timestamp_valid) {
1064                 r.resultExtras.hasReadoutTimestamp = true;
1065                 r.resultExtras.readoutTimestamp = msg.readout_timestamp;
1066             }
1067             if (r.minExpectedDuration != states.minFrameDuration ||
1068                     r.isFixedFps != states.isFixedFps) {
1069                 for (size_t i = 0; i < states.outputStreams.size(); i++) {
1070                     auto outputStream = states.outputStreams[i];
1071                     outputStream->onMinDurationChanged(r.minExpectedDuration, r.isFixedFps);
1072                 }
1073                 states.minFrameDuration = r.minExpectedDuration;
1074                 states.isFixedFps = r.isFixedFps;
1075             }
1076             if (r.hasCallback) {
1077                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
1078                     states.cameraId.c_str(), __FUNCTION__,
1079                     msg.frame_number, r.resultExtras.requestId, msg.timestamp);
1080                 // Call listener, if any
1081                 if (states.listener != nullptr) {
1082                     r.resultExtras.lastCompletedRegularFrameNumber =
1083                             states.lastCompletedRegularFrameNumber;
1084                     r.resultExtras.lastCompletedReprocessFrameNumber =
1085                             states.lastCompletedReprocessFrameNumber;
1086                     r.resultExtras.lastCompletedZslFrameNumber =
1087                             states.lastCompletedZslFrameNumber;
1088                     if (flags::return_buffers_outside_locks()) {
1089                         pendingNotificationResultExtras = r.resultExtras;
1090                     } else {
1091                         states.listener->notifyShutter(r.resultExtras, msg.timestamp);
1092                     }
1093                 }
1094                 // send pending result and buffers; this queues them up for delivery later
1095                 const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
1096                         inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
1097                 sendCaptureResult(states,
1098                     r.pendingMetadata, r.resultExtras,
1099                     r.collectedPartialResult, msg.frame_number,
1100                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
1101                     r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
1102             }
1103             collectAndRemovePendingOutputBuffers(
1104                     states.useHalBufManager, states.halBufManagedStreamIds,
1105                     states.listener, r, states.sessionStatsBuilder, &returnableBuffers);
1106 
1107             if (!flags::return_buffers_outside_locks()) {
1108                 finishReturningOutputBuffers(returnableBuffers,
1109                         states.listener, states.sessionStatsBuilder);
1110             }
1111 
1112             removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
1113 
1114         }
1115     }
1116     if (idx < 0) {
1117         SET_ERR("Shutter notification for non-existent frame number %d",
1118                 msg.frame_number);
1119     }
1120     // Call notifyShutter outside of in-flight mutex
1121     if (flags::return_buffers_outside_locks() && pendingNotificationResultExtras.isValid()) {
1122         states.listener->notifyShutter(pendingNotificationResultExtras, msg.timestamp);
1123     }
1124 
1125     // With no locks held, finish returning buffers to streams, which may take a while since
1126     // binder calls are involved
1127     if (flags::return_buffers_outside_locks()) {
1128         finishReturningOutputBuffers(returnableBuffers,
1129                 states.listener, states.sessionStatsBuilder);
1130     }
1131 
1132 }
1133 
notifyError(CaptureOutputStates & states,const camera_error_msg_t & msg)1134 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
1135     ATRACE_CALL();
1136     // Map camera HAL error codes to ICameraDeviceCallback error codes
1137     // Index into this with the HAL error code
1138     static const int32_t halErrorMap[CAMERA_MSG_NUM_ERRORS] = {
1139         // 0 = Unused error code
1140         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
1141         // 1 = CAMERA_MSG_ERROR_DEVICE
1142         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
1143         // 2 = CAMERA_MSG_ERROR_REQUEST
1144         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
1145         // 3 = CAMERA_MSG_ERROR_RESULT
1146         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
1147         // 4 = CAMERA_MSG_ERROR_BUFFER
1148         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
1149     };
1150 
1151     int32_t errorCode =
1152             ((msg.error_code >= 0) &&
1153                     (msg.error_code < CAMERA_MSG_NUM_ERRORS)) ?
1154             halErrorMap[msg.error_code] :
1155             hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
1156 
1157     int streamId = 0;
1158     std::string physicalCameraId;
1159     if (msg.error_stream != nullptr) {
1160         Camera3Stream *stream =
1161                 Camera3Stream::cast(msg.error_stream);
1162         streamId = stream->getId();
1163         physicalCameraId = stream->physicalCameraId();
1164     }
1165     ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
1166             states.cameraId.c_str(), __FUNCTION__, msg.frame_number,
1167             streamId, msg.error_code);
1168 
1169     CaptureResultExtras resultExtras;
1170     switch (errorCode) {
1171         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1172             // SET_ERR calls into listener to notify application
1173             SET_ERR("Camera HAL reported serious device error");
1174             break;
1175         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1176         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1177         {
1178             std::vector<BufferToReturn> returnableBuffers{};
1179             {
1180                 std::lock_guard<std::mutex> l(states.inflightLock);
1181                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
1182                 if (idx >= 0) {
1183                     InFlightRequest &r = states.inflightMap.editValueAt(idx);
1184                     r.requestStatus = msg.error_code;
1185                     resultExtras = r.resultExtras;
1186                     bool physicalDeviceResultError = false;
1187                     if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
1188                             errorCode) {
1189                         if (physicalCameraId.size() > 0) {
1190                             bool validPhysicalCameraId =
1191                                     erasePhysicalCameraIdSet(r.physicalCameraIds, physicalCameraId);
1192                             if (!validPhysicalCameraId) {
1193                                 ALOGE("%s: Reported result failure for physical camera device: %s "
1194                                         " which is not part of the respective request!",
1195                                         __FUNCTION__, physicalCameraId.c_str());
1196                                 break;
1197                             }
1198                             resultExtras.errorPhysicalCameraId = physicalCameraId;
1199                             physicalDeviceResultError = true;
1200                         }
1201                     }
1202 
1203                     if (!physicalDeviceResultError) {
1204                         r.skipResultMetadata = true;
1205                         if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
1206                                 == errorCode) {
1207                             r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
1208                         } else {
1209                             // errorCode is ERROR_CAMERA_REQUEST
1210                             r.errorBufStrategy = ERROR_BUF_RETURN;
1211                         }
1212 
1213                         // Check whether the buffers returned. If they returned,
1214                         // remove inflight request.
1215                         removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
1216                         if (!flags::return_buffers_outside_locks()) {
1217                             finishReturningOutputBuffers(returnableBuffers,
1218                                     states.listener, states.sessionStatsBuilder);
1219                         }
1220 
1221                     }
1222                 } else {
1223                     resultExtras.frameNumber = msg.frame_number;
1224                     ALOGE("Camera %s: %s: cannot find in-flight request on "
1225                             "frame %" PRId64 " error", states.cameraId.c_str(), __FUNCTION__,
1226                             resultExtras.frameNumber);
1227                 }
1228             }
1229 
1230             if (flags::return_buffers_outside_locks()) {
1231                 finishReturningOutputBuffers(returnableBuffers,
1232                         states.listener, states.sessionStatsBuilder);
1233             }
1234 
1235             resultExtras.errorStreamId = streamId;
1236             if (states.listener != nullptr) {
1237                 states.listener->notifyError(errorCode, resultExtras);
1238             } else {
1239                 ALOGE("Camera %s: %s: no listener available",
1240                         states.cameraId.c_str(), __FUNCTION__);
1241             }
1242             break;
1243         }
1244         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1245             // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
1246             // callback to the app. Rather, use STATUS_ERROR of image buffers.
1247             break;
1248         default:
1249             // SET_ERR calls notifyError
1250             SET_ERR("Unknown error message from HAL: %d", msg.error_code);
1251             break;
1252     }
1253 }
1254 
notify(CaptureOutputStates & states,const camera_notify_msg * msg)1255 void notify(CaptureOutputStates& states, const camera_notify_msg *msg) {
1256     switch (msg->type) {
1257         case CAMERA_MSG_ERROR: {
1258             notifyError(states, msg->message.error);
1259             break;
1260         }
1261         case CAMERA_MSG_SHUTTER: {
1262             notifyShutter(states, msg->message.shutter);
1263             break;
1264         }
1265         default:
1266             SET_ERR("Unknown notify message from HAL: %d",
1267                     msg->type);
1268     }
1269 }
1270 
flushInflightRequests(FlushInflightReqStates & states)1271 void flushInflightRequests(FlushInflightReqStates& states) {
1272     ATRACE_CALL();
1273     std::vector<BufferToReturn> returnableBuffers{};
1274     { // First return buffers cached in inFlightMap
1275         std::lock_guard<std::mutex> l(states.inflightLock);
1276         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
1277             const InFlightRequest &request = states.inflightMap.valueAt(idx);
1278             collectReturnableOutputBuffers(
1279                 states.useHalBufManager, states.halBufManagedStreamIds,
1280                 states.listener,
1281                 request.pendingOutputBuffers.array(),
1282                 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
1283                 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
1284                 /*out*/ &returnableBuffers,
1285                 /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
1286                 request.errorBufStrategy);
1287             if (!flags::return_buffers_outside_locks()) {
1288                 finishReturningOutputBuffers(returnableBuffers,
1289                         states.listener, states.sessionStatsBuilder);
1290             }
1291             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
1292                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
1293                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
1294                     request.haveResultMetadata ? "true" : "false",
1295                     request.numBuffersLeft);
1296         }
1297 
1298         states.inflightMap.clear();
1299         states.inflightIntf.onInflightMapFlushedLocked();
1300     }
1301     if (flags::return_buffers_outside_locks()) {
1302         finishReturningOutputBuffers(returnableBuffers,
1303                 states.listener, states.sessionStatsBuilder);
1304     }
1305 
1306     // Then return all inflight buffers not returned by HAL
1307     std::vector<std::pair<int32_t, int32_t>> inflightKeys;
1308     states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
1309 
1310     // Inflight buffers for HAL buffer manager
1311     std::vector<uint64_t> inflightRequestBufferKeys;
1312     states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
1313 
1314     // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
1315     // frameNumber will be -1 for buffers from HAL buffer manager
1316     std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
1317     inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
1318 
1319     for (auto& pair : inflightKeys) {
1320         int32_t frameNumber = pair.first;
1321         int32_t streamId = pair.second;
1322         buffer_handle_t* buffer;
1323         status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
1324         if (res != OK) {
1325             ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
1326                     __FUNCTION__, frameNumber, streamId);
1327             continue;
1328         }
1329         inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
1330     }
1331 
1332     for (auto& bufferId : inflightRequestBufferKeys) {
1333         int32_t streamId = -1;
1334         buffer_handle_t* buffer = nullptr;
1335         status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1336                 bufferId, &buffer, &streamId);
1337         if (res != OK) {
1338             ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
1339             continue;
1340         }
1341         inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
1342     }
1343 
1344     std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
1345 
1346     for (auto& tuple : inflightBuffers) {
1347         status_t res = OK;
1348         int32_t streamId = std::get<0>(tuple);
1349         int32_t frameNumber = std::get<1>(tuple);
1350         buffer_handle_t* buffer = std::get<2>(tuple);
1351 
1352         camera_stream_buffer_t streamBuffer;
1353         streamBuffer.buffer = buffer;
1354         streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1355         streamBuffer.acquire_fence = -1;
1356         streamBuffer.release_fence = -1;
1357 
1358         for (auto& stream : streams) {
1359             if (streamId == stream->getId()) {
1360                 // Return buffer to deleted stream
1361                 camera_stream* halStream = stream->asHalStream();
1362                 streamBuffer.stream = halStream;
1363                 switch (halStream->stream_type) {
1364                     case CAMERA_STREAM_OUTPUT:
1365                         res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
1366                                 /*readoutTimestamp*/0, /*timestampIncreasing*/true,
1367                                 std::vector<size_t> (), frameNumber);
1368                         if (res != OK) {
1369                             ALOGE("%s: Can't return output buffer for frame %d to"
1370                                   " stream %d: %s (%d)",  __FUNCTION__,
1371                                   frameNumber, streamId, strerror(-res), res);
1372                         }
1373                         break;
1374                     case CAMERA_STREAM_INPUT:
1375                         res = stream->returnInputBuffer(streamBuffer);
1376                         if (res != OK) {
1377                             ALOGE("%s: Can't return input buffer for frame %d to"
1378                                   " stream %d: %s (%d)",  __FUNCTION__,
1379                                   frameNumber, streamId, strerror(-res), res);
1380                         }
1381                         break;
1382                     default: // Bi-direcitonal stream is deprecated
1383                         ALOGE("%s: stream %d has unknown stream type %d",
1384                                 __FUNCTION__, streamId, halStream->stream_type);
1385                         break;
1386                 }
1387                 break;
1388             }
1389         }
1390     }
1391 }
1392 
1393 } // camera3
1394 } // namespace android
1395