1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <inttypes.h>
22 #include <utils/Log.h>
23 #include <utils/Trace.h>
24
25 #include <cutils/properties.h>
26 #include <gui/Surface.h>
27 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
28
29 #include "api1/Camera2Client.h"
30
31 #include "api1/client2/StreamingProcessor.h"
32 #include "api1/client2/JpegProcessor.h"
33 #include "api1/client2/CaptureSequencer.h"
34 #include "api1/client2/CallbackProcessor.h"
35 #include "api1/client2/ZslProcessor.h"
36
37 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
38 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
39
40 namespace android {
41 using namespace camera2;
42
getCallingPid()43 static int getCallingPid() {
44 return IPCThreadState::self()->getCallingPid();
45 }
46
47 // Interface used by CameraService
48
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,const String16 & clientPackageName,int cameraId,int cameraFacing,int clientPid,uid_t clientUid,int servicePid,bool legacyMode)49 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
50 const sp<hardware::ICameraClient>& cameraClient,
51 const String16& clientPackageName,
52 int cameraId,
53 int cameraFacing,
54 int clientPid,
55 uid_t clientUid,
56 int servicePid,
57 bool legacyMode):
58 Camera2ClientBase(cameraService, cameraClient, clientPackageName,
59 String8::format("%d", cameraId), cameraFacing,
60 clientPid, clientUid, servicePid),
61 mParameters(cameraId, cameraFacing)
62 {
63 ATRACE_CALL();
64
65 SharedParameters::Lock l(mParameters);
66 l.mParameters.state = Parameters::DISCONNECTED;
67
68 mLegacyMode = legacyMode;
69 }
70
initialize(sp<CameraProviderManager> manager)71 status_t Camera2Client::initialize(sp<CameraProviderManager> manager) {
72 return initializeImpl(manager);
73 }
74
75 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr)76 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr)
77 {
78 ATRACE_CALL();
79 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
80 status_t res;
81
82 res = Camera2ClientBase::initialize(providerPtr);
83 if (res != OK) {
84 return res;
85 }
86
87 {
88 SharedParameters::Lock l(mParameters);
89
90 res = l.mParameters.initialize(&(mDevice->info()), mDeviceVersion);
91 if (res != OK) {
92 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
93 __FUNCTION__, mCameraId, strerror(-res), res);
94 return NO_INIT;
95 }
96 }
97
98 String8 threadName;
99
100 mStreamingProcessor = new StreamingProcessor(this);
101 threadName = String8::format("C2-%d-StreamProc",
102 mCameraId);
103
104 mFrameProcessor = new FrameProcessor(mDevice, this);
105 threadName = String8::format("C2-%d-FrameProc",
106 mCameraId);
107 mFrameProcessor->run(threadName.string());
108
109 mCaptureSequencer = new CaptureSequencer(this);
110 threadName = String8::format("C2-%d-CaptureSeq",
111 mCameraId);
112 mCaptureSequencer->run(threadName.string());
113
114 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
115 threadName = String8::format("C2-%d-JpegProc",
116 mCameraId);
117 mJpegProcessor->run(threadName.string());
118
119 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
120
121 threadName = String8::format("C2-%d-ZslProc",
122 mCameraId);
123 mZslProcessor->run(threadName.string());
124
125 mCallbackProcessor = new CallbackProcessor(this);
126 threadName = String8::format("C2-%d-CallbkProc",
127 mCameraId);
128 mCallbackProcessor->run(threadName.string());
129
130 if (gLogLevel >= 1) {
131 SharedParameters::Lock l(mParameters);
132 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
133 mCameraId);
134 ALOGD("%s", l.mParameters.paramsFlattened.string());
135 }
136
137 return OK;
138 }
139
~Camera2Client()140 Camera2Client::~Camera2Client() {
141 ATRACE_CALL();
142 ALOGV("~Camera2Client");
143
144 mDestructionStarted = true;
145
146 disconnect();
147
148 ALOGI("Camera %d: Closed", mCameraId);
149 }
150
dump(int fd,const Vector<String16> & args)151 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
152 return BasicClient::dump(fd, args);
153 }
154
dumpClient(int fd,const Vector<String16> & args)155 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
156 String8 result;
157 result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
158 (getRemoteCallback() != NULL ?
159 (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
160 mClientPid);
161 result.append(" State: ");
162 #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
163
164 const Parameters& p = mParameters.unsafeAccess();
165
166 result.append(Parameters::getStateName(p.state));
167
168 result.append("\n Current parameters:\n");
169 result.appendFormat(" Preview size: %d x %d\n",
170 p.previewWidth, p.previewHeight);
171 result.appendFormat(" Preview FPS range: %d - %d\n",
172 p.previewFpsRange[0], p.previewFpsRange[1]);
173 result.appendFormat(" Preview HAL pixel format: 0x%x\n",
174 p.previewFormat);
175 result.appendFormat(" Preview transform: %x\n",
176 p.previewTransform);
177 result.appendFormat(" Picture size: %d x %d\n",
178 p.pictureWidth, p.pictureHeight);
179 result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
180 p.jpegThumbSize[0], p.jpegThumbSize[1]);
181 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
182 p.jpegQuality, p.jpegThumbQuality);
183 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
184 result.appendFormat(" GPS tags %s\n",
185 p.gpsEnabled ? "enabled" : "disabled");
186 if (p.gpsEnabled) {
187 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
188 p.gpsCoordinates[0], p.gpsCoordinates[1],
189 p.gpsCoordinates[2]);
190 result.appendFormat(" GPS timestamp: %" PRId64 "\n",
191 p.gpsTimestamp);
192 result.appendFormat(" GPS processing method: %s\n",
193 p.gpsProcessingMethod.string());
194 }
195
196 result.append(" White balance mode: ");
197 switch (p.wbMode) {
198 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
199 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
200 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
201 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
202 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
203 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
204 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
205 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
206 default: result.append("UNKNOWN\n");
207 }
208
209 result.append(" Effect mode: ");
210 switch (p.effectMode) {
211 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
212 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
213 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
214 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
215 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
216 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
217 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
218 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
219 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
220 default: result.append("UNKNOWN\n");
221 }
222
223 result.append(" Antibanding mode: ");
224 switch (p.antibandingMode) {
225 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
226 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
227 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
228 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
229 default: result.append("UNKNOWN\n");
230 }
231
232 result.append(" Scene mode: ");
233 switch (p.sceneMode) {
234 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
235 result.append("AUTO\n"); break;
236 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
237 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
238 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
239 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
240 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
241 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
242 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
243 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
244 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
245 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
246 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
247 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
248 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
249 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
250 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
251 default: result.append("UNKNOWN\n");
252 }
253
254 result.append(" Flash mode: ");
255 switch (p.flashMode) {
256 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
257 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
258 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
259 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
260 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
261 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
262 default: result.append("UNKNOWN\n");
263 }
264
265 result.append(" Focus mode: ");
266 switch (p.focusMode) {
267 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
268 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
269 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
270 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
271 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
272 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
273 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
274 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
275 default: result.append("UNKNOWN\n");
276 }
277
278 result.append(" Focus state: ");
279 switch (p.focusState) {
280 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
281 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
282 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
283 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
284 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
285 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
286 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
287 default: result.append("UNKNOWN\n");
288 }
289
290 result.append(" Focusing areas:\n");
291 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
292 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
293 p.focusingAreas[i].left,
294 p.focusingAreas[i].top,
295 p.focusingAreas[i].right,
296 p.focusingAreas[i].bottom,
297 p.focusingAreas[i].weight);
298 }
299
300 result.appendFormat(" Exposure compensation index: %d\n",
301 p.exposureCompensation);
302
303 result.appendFormat(" AE lock %s, AWB lock %s\n",
304 p.autoExposureLock ? "enabled" : "disabled",
305 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
306
307 result.appendFormat(" Metering areas:\n");
308 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
309 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
310 p.meteringAreas[i].left,
311 p.meteringAreas[i].top,
312 p.meteringAreas[i].right,
313 p.meteringAreas[i].bottom,
314 p.meteringAreas[i].weight);
315 }
316
317 result.appendFormat(" Zoom index: %d\n", p.zoom);
318 result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
319 p.videoHeight);
320
321 result.appendFormat(" Recording hint is %s\n",
322 p.recordingHint ? "set" : "not set");
323
324 result.appendFormat(" Video stabilization is %s\n",
325 p.videoStabilization ? "enabled" : "disabled");
326
327 result.appendFormat(" Selected still capture FPS range: %d - %d\n",
328 p.fastInfo.bestStillCaptureFpsRange[0],
329 p.fastInfo.bestStillCaptureFpsRange[1]);
330
331 result.appendFormat(" Use zero shutter lag: %s\n",
332 p.useZeroShutterLag() ? "yes" : "no");
333
334 result.append(" Current streams:\n");
335 result.appendFormat(" Preview stream ID: %d\n",
336 getPreviewStreamId());
337 result.appendFormat(" Capture stream ID: %d\n",
338 getCaptureStreamId());
339 result.appendFormat(" Recording stream ID: %d\n",
340 getRecordingStreamId());
341
342 result.append(" Quirks for this camera:\n");
343 bool haveQuirk = false;
344 if (p.quirks.triggerAfWithAuto) {
345 result.appendFormat(" triggerAfWithAuto\n");
346 haveQuirk = true;
347 }
348 if (p.quirks.useZslFormat) {
349 result.appendFormat(" useZslFormat\n");
350 haveQuirk = true;
351 }
352 if (p.quirks.meteringCropRegion) {
353 result.appendFormat(" meteringCropRegion\n");
354 haveQuirk = true;
355 }
356 if (p.quirks.partialResults) {
357 result.appendFormat(" usePartialResult\n");
358 haveQuirk = true;
359 }
360 if (!haveQuirk) {
361 result.appendFormat(" none\n");
362 }
363
364 write(fd, result.string(), result.size());
365
366 mStreamingProcessor->dump(fd, args);
367
368 mCaptureSequencer->dump(fd, args);
369
370 mFrameProcessor->dump(fd, args);
371
372 mZslProcessor->dump(fd, args);
373
374 return dumpDevice(fd, args);
375 #undef CASE_APPEND_ENUM
376 }
377
378 // ICamera interface
379
disconnect()380 binder::Status Camera2Client::disconnect() {
381 ATRACE_CALL();
382 Mutex::Autolock icl(mBinderSerializationLock);
383
384 binder::Status res = binder::Status::ok();
385 // Allow both client and the cameraserver to disconnect at all times
386 int callingPid = getCallingPid();
387 if (callingPid != mClientPid && callingPid != mServicePid) return res;
388
389 if (mDevice == 0) return res;
390
391 ALOGV("Camera %d: Shutting down", mCameraId);
392
393 /**
394 * disconnect() cannot call any methods that might need to promote a
395 * wp<Camera2Client>, since disconnect can be called from the destructor, at
396 * which point all such promotions will fail.
397 */
398
399 stopPreviewL();
400
401 {
402 SharedParameters::Lock l(mParameters);
403 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
404 l.mParameters.state = Parameters::DISCONNECTED;
405 }
406
407 mFrameProcessor->requestExit();
408 mCaptureSequencer->requestExit();
409 mJpegProcessor->requestExit();
410 mZslProcessor->requestExit();
411 mCallbackProcessor->requestExit();
412
413 ALOGV("Camera %d: Waiting for threads", mCameraId);
414
415 {
416 // Don't wait with lock held, in case the other threads need to
417 // complete callbacks that re-enter Camera2Client
418 mBinderSerializationLock.unlock();
419
420 mFrameProcessor->join();
421 mCaptureSequencer->join();
422 mJpegProcessor->join();
423 mZslProcessor->join();
424 mCallbackProcessor->join();
425
426 mBinderSerializationLock.lock();
427 }
428
429 ALOGV("Camera %d: Deleting streams", mCameraId);
430
431 mStreamingProcessor->deletePreviewStream();
432 mStreamingProcessor->deleteRecordingStream();
433 mJpegProcessor->deleteStream();
434 mCallbackProcessor->deleteStream();
435 mZslProcessor->deleteStream();
436
437 ALOGV("Camera %d: Disconnecting device", mCameraId);
438
439 mDevice->disconnect();
440
441 mDevice.clear();
442
443 CameraService::Client::disconnect();
444
445 return res;
446 }
447
connect(const sp<hardware::ICameraClient> & client)448 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
449 ATRACE_CALL();
450 ALOGV("%s: E", __FUNCTION__);
451 Mutex::Autolock icl(mBinderSerializationLock);
452
453 if (mClientPid != 0 && getCallingPid() != mClientPid) {
454 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
455 "current locked to pid %d", __FUNCTION__,
456 mCameraId, getCallingPid(), mClientPid);
457 return BAD_VALUE;
458 }
459
460 mClientPid = getCallingPid();
461
462 mRemoteCallback = client;
463 mSharedCameraCallbacks = client;
464
465 return OK;
466 }
467
lock()468 status_t Camera2Client::lock() {
469 ATRACE_CALL();
470 ALOGV("%s: E", __FUNCTION__);
471 Mutex::Autolock icl(mBinderSerializationLock);
472 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
473 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
474
475 if (mClientPid == 0) {
476 mClientPid = getCallingPid();
477 return OK;
478 }
479
480 if (mClientPid != getCallingPid()) {
481 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
482 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
483 return EBUSY;
484 }
485
486 return OK;
487 }
488
unlock()489 status_t Camera2Client::unlock() {
490 ATRACE_CALL();
491 ALOGV("%s: E", __FUNCTION__);
492 Mutex::Autolock icl(mBinderSerializationLock);
493 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
494 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
495
496 if (mClientPid == getCallingPid()) {
497 SharedParameters::Lock l(mParameters);
498 if (l.mParameters.state == Parameters::RECORD ||
499 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
500 ALOGD("Not allowed to unlock camera during recording.");
501 return INVALID_OPERATION;
502 }
503 mClientPid = 0;
504 mRemoteCallback.clear();
505 mSharedCameraCallbacks.clear();
506 return OK;
507 }
508
509 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
510 __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
511 return EBUSY;
512 }
513
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)514 status_t Camera2Client::setPreviewTarget(
515 const sp<IGraphicBufferProducer>& bufferProducer) {
516 ATRACE_CALL();
517 ALOGV("%s: E", __FUNCTION__);
518 Mutex::Autolock icl(mBinderSerializationLock);
519 status_t res;
520 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
521
522 sp<IBinder> binder;
523 sp<Surface> window;
524 if (bufferProducer != 0) {
525 binder = IInterface::asBinder(bufferProducer);
526 // Using controlledByApp flag to ensure that the buffer queue remains in
527 // async mode for the old camera API, where many applications depend
528 // on that behavior.
529 window = new Surface(bufferProducer, /*controlledByApp*/ true);
530 }
531 return setPreviewWindowL(binder, window);
532 }
533
setPreviewWindowL(const sp<IBinder> & binder,const sp<Surface> & window)534 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
535 const sp<Surface>& window) {
536 ATRACE_CALL();
537 status_t res;
538
539 if (binder == mPreviewSurface) {
540 ALOGV("%s: Camera %d: New window is same as old window",
541 __FUNCTION__, mCameraId);
542 return NO_ERROR;
543 }
544
545 Parameters::State state;
546 {
547 SharedParameters::Lock l(mParameters);
548 state = l.mParameters.state;
549 }
550 switch (state) {
551 case Parameters::DISCONNECTED:
552 case Parameters::RECORD:
553 case Parameters::STILL_CAPTURE:
554 case Parameters::VIDEO_SNAPSHOT:
555 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
556 __FUNCTION__, mCameraId,
557 Parameters::getStateName(state));
558 return INVALID_OPERATION;
559 case Parameters::STOPPED:
560 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
561 // OK
562 break;
563 case Parameters::PREVIEW:
564 // Already running preview - need to stop and create a new stream
565 res = stopStream();
566 if (res != OK) {
567 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
568 __FUNCTION__, strerror(-res), res);
569 return res;
570 }
571 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
572 break;
573 }
574
575 mPreviewSurface = binder;
576 res = mStreamingProcessor->setPreviewWindow(window);
577 if (res != OK) {
578 ALOGE("%s: Unable to set new preview window: %s (%d)",
579 __FUNCTION__, strerror(-res), res);
580 return res;
581 }
582
583 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
584 SharedParameters::Lock l(mParameters);
585 l.mParameters.state = state;
586 return startPreviewL(l.mParameters, false);
587 }
588
589 return OK;
590 }
591
setPreviewCallbackFlag(int flag)592 void Camera2Client::setPreviewCallbackFlag(int flag) {
593 ATRACE_CALL();
594 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
595 Mutex::Autolock icl(mBinderSerializationLock);
596
597 if ( checkPid(__FUNCTION__) != OK) return;
598
599 SharedParameters::Lock l(mParameters);
600 setPreviewCallbackFlagL(l.mParameters, flag);
601 }
602
setPreviewCallbackFlagL(Parameters & params,int flag)603 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
604 status_t res = OK;
605
606 switch(params.state) {
607 case Parameters::STOPPED:
608 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
609 case Parameters::PREVIEW:
610 case Parameters::STILL_CAPTURE:
611 // OK
612 break;
613 default:
614 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
615 ALOGE("%s: Camera %d: Can't use preview callbacks "
616 "in state %d", __FUNCTION__, mCameraId, params.state);
617 return;
618 }
619 }
620
621 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
622 ALOGV("%s: setting oneshot", __FUNCTION__);
623 params.previewCallbackOneShot = true;
624 }
625 if (params.previewCallbackFlags != (uint32_t)flag) {
626
627 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
628 // Disable any existing preview callback window when enabling
629 // preview callback flags
630 res = mCallbackProcessor->setCallbackWindow(NULL);
631 if (res != OK) {
632 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
633 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
634 return;
635 }
636 params.previewCallbackSurface = false;
637 }
638
639 params.previewCallbackFlags = flag;
640
641 if (params.state == Parameters::PREVIEW) {
642 res = startPreviewL(params, true);
643 if (res != OK) {
644 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
645 __FUNCTION__, mCameraId,
646 Parameters::getStateName(params.state));
647 }
648 }
649 }
650 }
651
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)652 status_t Camera2Client::setPreviewCallbackTarget(
653 const sp<IGraphicBufferProducer>& callbackProducer) {
654 ATRACE_CALL();
655 ALOGV("%s: E", __FUNCTION__);
656 Mutex::Autolock icl(mBinderSerializationLock);
657 status_t res;
658 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
659
660 sp<Surface> window;
661 if (callbackProducer != 0) {
662 window = new Surface(callbackProducer);
663 }
664
665 res = mCallbackProcessor->setCallbackWindow(window);
666 if (res != OK) {
667 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
668 __FUNCTION__, mCameraId, strerror(-res), res);
669 return res;
670 }
671
672 SharedParameters::Lock l(mParameters);
673
674 if (window != NULL) {
675 // Disable traditional callbacks when a valid callback target is given
676 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
677 l.mParameters.previewCallbackOneShot = false;
678 l.mParameters.previewCallbackSurface = true;
679 } else {
680 // Disable callback target if given a NULL interface.
681 l.mParameters.previewCallbackSurface = false;
682 }
683
684 switch(l.mParameters.state) {
685 case Parameters::PREVIEW:
686 res = startPreviewL(l.mParameters, true);
687 break;
688 case Parameters::RECORD:
689 case Parameters::VIDEO_SNAPSHOT:
690 res = startRecordingL(l.mParameters, true);
691 break;
692 default:
693 break;
694 }
695 if (res != OK) {
696 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
697 __FUNCTION__, mCameraId,
698 Parameters::getStateName(l.mParameters.state));
699 }
700
701 return OK;
702 }
703
704
startPreview()705 status_t Camera2Client::startPreview() {
706 ATRACE_CALL();
707 ALOGV("%s: E", __FUNCTION__);
708 Mutex::Autolock icl(mBinderSerializationLock);
709 status_t res;
710 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
711 SharedParameters::Lock l(mParameters);
712 return startPreviewL(l.mParameters, false);
713 }
714
startPreviewL(Parameters & params,bool restart)715 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
716 ATRACE_CALL();
717 status_t res;
718
719 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
720
721 if ( (params.state == Parameters::PREVIEW ||
722 params.state == Parameters::RECORD ||
723 params.state == Parameters::VIDEO_SNAPSHOT)
724 && !restart) {
725 // Succeed attempt to re-enter a streaming state
726 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
727 __FUNCTION__, mCameraId);
728 return OK;
729 }
730 if (params.state > Parameters::PREVIEW && !restart) {
731 ALOGE("%s: Can't start preview in state %s",
732 __FUNCTION__,
733 Parameters::getStateName(params.state));
734 return INVALID_OPERATION;
735 }
736
737 if (!mStreamingProcessor->haveValidPreviewWindow()) {
738 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
739 return OK;
740 }
741 params.state = Parameters::STOPPED;
742 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
743
744 res = mStreamingProcessor->updatePreviewStream(params);
745 if (res != OK) {
746 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
747 __FUNCTION__, mCameraId, strerror(-res), res);
748 return res;
749 }
750
751 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
752
753 // We could wait to create the JPEG output stream until first actual use
754 // (first takePicture call). However, this would substantially increase the
755 // first capture latency on HAL3 devices.
756 // So create it unconditionally at preview start. As a drawback,
757 // this increases gralloc memory consumption for applications that don't
758 // ever take a picture. Do not enter this mode when jpeg stream will slow
759 // down preview.
760 // TODO: Find a better compromise, though this likely would involve HAL
761 // changes.
762 int lastJpegStreamId = mJpegProcessor->getStreamId();
763 // If jpeg stream will slow down preview, make sure we remove it before starting preview
764 if (params.slowJpegMode) {
765 mJpegProcessor->deleteStream();
766 } else {
767 res = updateProcessorStream(mJpegProcessor, params);
768 if (res != OK) {
769 ALOGE("%s: Camera %d: Can't pre-configure still image "
770 "stream: %s (%d)",
771 __FUNCTION__, mCameraId, strerror(-res), res);
772 return res;
773 }
774 }
775 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
776
777 Vector<int32_t> outputStreams;
778 bool callbacksEnabled = (params.previewCallbackFlags &
779 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
780 params.previewCallbackSurface;
781
782 if (callbacksEnabled) {
783 // Can't have recording stream hanging around when enabling callbacks,
784 // since it exceeds the max stream count on some devices.
785 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
786 ALOGV("%s: Camera %d: Clearing out recording stream before "
787 "creating callback stream", __FUNCTION__, mCameraId);
788 res = mStreamingProcessor->stopStream();
789 if (res != OK) {
790 ALOGE("%s: Camera %d: Can't stop streaming to delete "
791 "recording stream", __FUNCTION__, mCameraId);
792 return res;
793 }
794 res = mStreamingProcessor->deleteRecordingStream();
795 if (res != OK) {
796 ALOGE("%s: Camera %d: Unable to delete recording stream before "
797 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
798 strerror(-res), res);
799 return res;
800 }
801 }
802
803 res = mCallbackProcessor->updateStream(params);
804 if (res != OK) {
805 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
806 __FUNCTION__, mCameraId, strerror(-res), res);
807 return res;
808 }
809 outputStreams.push(getCallbackStreamId());
810 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
811 /**
812 * Delete the unused callback stream when preview stream is changed and
813 * preview is not enabled. Don't need stop preview stream as preview is in
814 * STOPPED state now.
815 */
816 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
817 res = mCallbackProcessor->deleteStream();
818 if (res != OK) {
819 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
820 __FUNCTION__, mCameraId, strerror(-res), res);
821 return res;
822 }
823 }
824
825 if (params.useZeroShutterLag() &&
826 getRecordingStreamId() == NO_STREAM) {
827 res = updateProcessorStream(mZslProcessor, params);
828 if (res != OK) {
829 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
830 __FUNCTION__, mCameraId, strerror(-res), res);
831 return res;
832 }
833
834 if (jpegStreamChanged) {
835 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
836 __FUNCTION__, mCameraId);
837 mZslProcessor->clearZslQueue();
838 }
839 outputStreams.push(getZslStreamId());
840 } else {
841 mZslProcessor->deleteStream();
842 }
843
844 outputStreams.push(getPreviewStreamId());
845
846 if (!params.recordingHint) {
847 if (!restart) {
848 res = mStreamingProcessor->updatePreviewRequest(params);
849 if (res != OK) {
850 ALOGE("%s: Camera %d: Can't set up preview request: "
851 "%s (%d)", __FUNCTION__, mCameraId,
852 strerror(-res), res);
853 return res;
854 }
855 }
856 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
857 outputStreams);
858 } else {
859 if (!restart) {
860 res = mStreamingProcessor->updateRecordingRequest(params);
861 if (res != OK) {
862 ALOGE("%s: Camera %d: Can't set up preview request with "
863 "record hint: %s (%d)", __FUNCTION__, mCameraId,
864 strerror(-res), res);
865 return res;
866 }
867 }
868 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
869 outputStreams);
870 }
871 if (res != OK) {
872 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
873 __FUNCTION__, mCameraId, strerror(-res), res);
874 return res;
875 }
876
877 params.state = Parameters::PREVIEW;
878 return OK;
879 }
880
stopPreview()881 void Camera2Client::stopPreview() {
882 ATRACE_CALL();
883 ALOGV("%s: E", __FUNCTION__);
884 Mutex::Autolock icl(mBinderSerializationLock);
885 status_t res;
886 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
887 stopPreviewL();
888 }
889
stopPreviewL()890 void Camera2Client::stopPreviewL() {
891 ATRACE_CALL();
892 status_t res;
893 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
894 Parameters::State state;
895 {
896 SharedParameters::Lock l(mParameters);
897 state = l.mParameters.state;
898 }
899
900 switch (state) {
901 case Parameters::DISCONNECTED:
902 // Nothing to do.
903 break;
904 case Parameters::STOPPED:
905 case Parameters::VIDEO_SNAPSHOT:
906 case Parameters::STILL_CAPTURE:
907 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
908 // no break
909 case Parameters::RECORD:
910 case Parameters::PREVIEW:
911 syncWithDevice();
912 res = stopStream();
913 if (res != OK) {
914 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
915 __FUNCTION__, mCameraId, strerror(-res), res);
916 }
917
918 // Flush all in-process captures and buffer in order to stop
919 // preview faster.
920 res = mDevice->flush();
921 if (res != OK) {
922 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
923 __FUNCTION__, mCameraId, strerror(-res), res);
924 }
925
926 res = mDevice->waitUntilDrained();
927 if (res != OK) {
928 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
929 __FUNCTION__, mCameraId, strerror(-res), res);
930 }
931 // Clean up recording stream
932 res = mStreamingProcessor->deleteRecordingStream();
933 if (res != OK) {
934 ALOGE("%s: Camera %d: Unable to delete recording stream before "
935 "stop preview: %s (%d)",
936 __FUNCTION__, mCameraId, strerror(-res), res);
937 }
938 // no break
939 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
940 SharedParameters::Lock l(mParameters);
941 l.mParameters.state = Parameters::STOPPED;
942 commandStopFaceDetectionL(l.mParameters);
943 break;
944 }
945 default:
946 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
947 state);
948 }
949 }
950
previewEnabled()951 bool Camera2Client::previewEnabled() {
952 ATRACE_CALL();
953 Mutex::Autolock icl(mBinderSerializationLock);
954 status_t res;
955 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
956
957 SharedParameters::Lock l(mParameters);
958 return l.mParameters.state == Parameters::PREVIEW;
959 }
960
setVideoBufferMode(int32_t videoBufferMode)961 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
962 ATRACE_CALL();
963 Mutex::Autolock icl(mBinderSerializationLock);
964 status_t res;
965 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
966
967 SharedParameters::Lock l(mParameters);
968 switch (l.mParameters.state) {
969 case Parameters::RECORD:
970 case Parameters::VIDEO_SNAPSHOT:
971 ALOGE("%s: Camera %d: Can't be called in state %s",
972 __FUNCTION__, mCameraId,
973 Parameters::getStateName(l.mParameters.state));
974 return INVALID_OPERATION;
975 default:
976 // OK
977 break;
978 }
979
980 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
981 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
982 return BAD_VALUE;
983 }
984
985 l.mParameters.videoBufferMode = videoBufferMode;
986
987 return OK;
988 }
989
startRecording()990 status_t Camera2Client::startRecording() {
991 ATRACE_CALL();
992 ALOGV("%s: E", __FUNCTION__);
993 Mutex::Autolock icl(mBinderSerializationLock);
994 status_t res;
995 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
996 SharedParameters::Lock l(mParameters);
997
998 return startRecordingL(l.mParameters, false);
999 }
1000
startRecordingL(Parameters & params,bool restart)1001 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1002 status_t res = OK;
1003
1004 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1005
1006 switch (params.state) {
1007 case Parameters::STOPPED:
1008 res = startPreviewL(params, false);
1009 if (res != OK) return res;
1010 // Make sure first preview request is submitted to the HAL device to avoid
1011 // two consecutive set of configure_streams being called into the HAL.
1012 // TODO: Refactor this to avoid initial preview configuration.
1013 syncWithDevice();
1014 break;
1015 case Parameters::PREVIEW:
1016 // Ready to go
1017 break;
1018 case Parameters::RECORD:
1019 case Parameters::VIDEO_SNAPSHOT:
1020 // OK to call this when recording is already on, just skip unless
1021 // we're looking to restart
1022 if (!restart) return OK;
1023 break;
1024 default:
1025 ALOGE("%s: Camera %d: Can't start recording in state %s",
1026 __FUNCTION__, mCameraId,
1027 Parameters::getStateName(params.state));
1028 return INVALID_OPERATION;
1029 };
1030
1031 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1032 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1033 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1034 return INVALID_OPERATION;
1035 }
1036
1037 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1038 ALOGE("%s: No valid recording window", __FUNCTION__);
1039 return INVALID_OPERATION;
1040 }
1041
1042 if (!restart) {
1043 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1044 mStreamingProcessor->updateRecordingRequest(params);
1045 if (res != OK) {
1046 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1047 __FUNCTION__, mCameraId, strerror(-res), res);
1048 return res;
1049 }
1050 }
1051
1052 // Not all devices can support a preview callback stream and a recording
1053 // stream at the same time, so assume none of them can.
1054 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1055 ALOGV("%s: Camera %d: Clearing out callback stream before "
1056 "creating recording stream", __FUNCTION__, mCameraId);
1057 res = mStreamingProcessor->stopStream();
1058 if (res != OK) {
1059 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1060 __FUNCTION__, mCameraId);
1061 return res;
1062 }
1063 res = mCallbackProcessor->deleteStream();
1064 if (res != OK) {
1065 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1066 "record: %s (%d)", __FUNCTION__, mCameraId,
1067 strerror(-res), res);
1068 return res;
1069 }
1070 }
1071
1072 // Clean up ZSL before transitioning into recording
1073 if (mZslProcessor->getStreamId() != NO_STREAM) {
1074 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1075 "creating recording stream", __FUNCTION__, mCameraId);
1076 res = mStreamingProcessor->stopStream();
1077 if (res != OK) {
1078 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1079 __FUNCTION__, mCameraId);
1080 return res;
1081 }
1082 res = mDevice->waitUntilDrained();
1083 if (res != OK) {
1084 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1085 __FUNCTION__, mCameraId, strerror(-res), res);
1086 }
1087 res = mZslProcessor->clearZslQueue();
1088 if (res != OK) {
1089 ALOGE("%s: Camera %d: Can't clear zsl queue",
1090 __FUNCTION__, mCameraId);
1091 return res;
1092 }
1093 res = mZslProcessor->deleteStream();
1094 if (res != OK) {
1095 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1096 "record: %s (%d)", __FUNCTION__, mCameraId,
1097 strerror(-res), res);
1098 return res;
1099 }
1100 }
1101
1102 // Disable callbacks if they're enabled; can't record and use callbacks,
1103 // and we can't fail record start without stagefright asserting.
1104 params.previewCallbackFlags = 0;
1105
1106 // May need to reconfigure video snapshot JPEG sizes
1107 // during recording startup, so need a more complex sequence here to
1108 // ensure an early stream reconfiguration doesn't happen
1109 bool recordingStreamNeedsUpdate;
1110 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1111 if (res != OK) {
1112 ALOGE("%s: Camera %d: Can't query recording stream",
1113 __FUNCTION__, mCameraId);
1114 return res;
1115 }
1116
1117 if (recordingStreamNeedsUpdate) {
1118 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1119 // Right now camera device cannot handle configureStream failure gracefully
1120 // when device is streaming
1121 res = mStreamingProcessor->stopStream();
1122 if (res != OK) {
1123 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1124 "stream", __FUNCTION__, mCameraId);
1125 return res;
1126 }
1127 res = mDevice->waitUntilDrained();
1128 if (res != OK) {
1129 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1130 "%s (%d)", __FUNCTION__, mCameraId,
1131 strerror(-res), res);
1132 }
1133
1134 res = updateProcessorStream<
1135 StreamingProcessor,
1136 &StreamingProcessor::updateRecordingStream>(
1137 mStreamingProcessor,
1138 params);
1139 if (res != OK) {
1140 ALOGE("%s: Camera %d: Unable to update recording stream: "
1141 "%s (%d)", __FUNCTION__, mCameraId,
1142 strerror(-res), res);
1143 return res;
1144 }
1145 }
1146
1147 Vector<int32_t> outputStreams;
1148 outputStreams.push(getPreviewStreamId());
1149 outputStreams.push(getRecordingStreamId());
1150
1151 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1152 outputStreams);
1153
1154 // startStream might trigger a configureStream call and device might fail
1155 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1156 // to video size.
1157 if (res == BAD_VALUE) {
1158 overrideVideoSnapshotSize(params);
1159 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1160 outputStreams);
1161 }
1162
1163 if (res != OK) {
1164 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1165 __FUNCTION__, mCameraId, strerror(-res), res);
1166 return res;
1167 }
1168
1169 if (params.state < Parameters::RECORD) {
1170 params.state = Parameters::RECORD;
1171 }
1172
1173 return OK;
1174 }
1175
stopRecording()1176 void Camera2Client::stopRecording() {
1177 ATRACE_CALL();
1178 ALOGV("%s: E", __FUNCTION__);
1179 Mutex::Autolock icl(mBinderSerializationLock);
1180 SharedParameters::Lock l(mParameters);
1181
1182 status_t res;
1183 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1184
1185 switch (l.mParameters.state) {
1186 case Parameters::RECORD:
1187 // OK to stop
1188 break;
1189 case Parameters::STOPPED:
1190 case Parameters::PREVIEW:
1191 case Parameters::STILL_CAPTURE:
1192 case Parameters::VIDEO_SNAPSHOT:
1193 default:
1194 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1195 __FUNCTION__, mCameraId,
1196 Parameters::getStateName(l.mParameters.state));
1197 return;
1198 };
1199
1200 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1201
1202 // Remove recording stream because the video target may be abandoned soon.
1203 res = stopStream();
1204 if (res != OK) {
1205 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1206 __FUNCTION__, mCameraId, strerror(-res), res);
1207 }
1208
1209 res = mDevice->waitUntilDrained();
1210 if (res != OK) {
1211 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1212 __FUNCTION__, mCameraId, strerror(-res), res);
1213 }
1214 // Clean up recording stream
1215 res = mStreamingProcessor->deleteRecordingStream();
1216 if (res != OK) {
1217 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1218 "stop preview: %s (%d)",
1219 __FUNCTION__, mCameraId, strerror(-res), res);
1220 }
1221 l.mParameters.recoverOverriddenJpegSize();
1222
1223 // Restart preview
1224 res = startPreviewL(l.mParameters, true);
1225 if (res != OK) {
1226 ALOGE("%s: Camera %d: Unable to return to preview",
1227 __FUNCTION__, mCameraId);
1228 }
1229 }
1230
recordingEnabled()1231 bool Camera2Client::recordingEnabled() {
1232 ATRACE_CALL();
1233 Mutex::Autolock icl(mBinderSerializationLock);
1234
1235 if ( checkPid(__FUNCTION__) != OK) return false;
1236
1237 return recordingEnabledL();
1238 }
1239
recordingEnabledL()1240 bool Camera2Client::recordingEnabledL() {
1241 ATRACE_CALL();
1242 SharedParameters::Lock l(mParameters);
1243
1244 return (l.mParameters.state == Parameters::RECORD
1245 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1246 }
1247
releaseRecordingFrame(const sp<IMemory> & mem)1248 void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
1249 (void)mem;
1250 ATRACE_CALL();
1251 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1252 }
1253
releaseRecordingFrameHandle(native_handle_t * handle)1254 void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
1255 (void)handle;
1256 ATRACE_CALL();
1257 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1258 }
1259
releaseRecordingFrameHandleBatch(const std::vector<native_handle_t * > & handles)1260 void Camera2Client::releaseRecordingFrameHandleBatch(
1261 const std::vector<native_handle_t*>& handles) {
1262 (void)handles;
1263 ATRACE_CALL();
1264 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1265 }
1266
autoFocus()1267 status_t Camera2Client::autoFocus() {
1268 ATRACE_CALL();
1269 Mutex::Autolock icl(mBinderSerializationLock);
1270 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1271 status_t res;
1272 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1273
1274 int triggerId;
1275 bool notifyImmediately = false;
1276 bool notifySuccess = false;
1277 {
1278 SharedParameters::Lock l(mParameters);
1279 if (l.mParameters.state < Parameters::PREVIEW) {
1280 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1281 __FUNCTION__, mCameraId, l.mParameters.state);
1282 return INVALID_OPERATION;
1283 }
1284
1285 /**
1286 * If the camera does not support auto-focus, it is a no-op and
1287 * onAutoFocus(boolean, Camera) callback will be called immediately
1288 * with a fake value of success set to true.
1289 *
1290 * Similarly, if focus mode is set to INFINITY, there's no reason to
1291 * bother the HAL.
1292 */
1293 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1294 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1295 notifyImmediately = true;
1296 notifySuccess = true;
1297 }
1298 /**
1299 * If we're in CAF mode, and AF has already been locked, just fire back
1300 * the callback right away; the HAL would not send a notification since
1301 * no state change would happen on a AF trigger.
1302 */
1303 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1304 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1305 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1306 notifyImmediately = true;
1307 notifySuccess = true;
1308 }
1309 /**
1310 * Send immediate notification back to client
1311 */
1312 if (notifyImmediately) {
1313 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1314 if (l.mRemoteCallback != 0) {
1315 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1316 notifySuccess ? 1 : 0, 0);
1317 }
1318 return OK;
1319 }
1320 /**
1321 * Handle quirk mode for AF in scene modes
1322 */
1323 if (l.mParameters.quirks.triggerAfWithAuto &&
1324 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1325 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1326 !l.mParameters.focusingAreas[0].isEmpty()) {
1327 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1328 __FUNCTION__, l.mParameters.focusMode);
1329 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1330 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1331 updateRequests(l.mParameters);
1332 }
1333
1334 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1335 triggerId = l.mParameters.currentAfTriggerId;
1336 }
1337 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1338
1339 syncWithDevice();
1340
1341 mDevice->triggerAutofocus(triggerId);
1342
1343 return OK;
1344 }
1345
cancelAutoFocus()1346 status_t Camera2Client::cancelAutoFocus() {
1347 ATRACE_CALL();
1348 Mutex::Autolock icl(mBinderSerializationLock);
1349 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1350 status_t res;
1351 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1352
1353 int triggerId;
1354 {
1355 SharedParameters::Lock l(mParameters);
1356 // Canceling does nothing in FIXED or INFINITY modes
1357 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1358 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1359 return OK;
1360 }
1361
1362 // An active AF trigger is canceled
1363 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1364 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1365 }
1366
1367 triggerId = ++l.mParameters.afTriggerCounter;
1368
1369 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1370 // the real state at this point. No need to cancel explicitly if
1371 // changing the AF mode.
1372 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1373 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1374 l.mParameters.shadowFocusMode);
1375 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1376 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1377 updateRequests(l.mParameters);
1378
1379 return OK;
1380 }
1381 if (l.mParameters.allowZslMode) {
1382 mZslProcessor->clearZslQueue();
1383 }
1384 }
1385 syncWithDevice();
1386
1387 mDevice->triggerCancelAutofocus(triggerId);
1388
1389 return OK;
1390 }
1391
takePicture(int msgType)1392 status_t Camera2Client::takePicture(int msgType) {
1393 ATRACE_CALL();
1394 Mutex::Autolock icl(mBinderSerializationLock);
1395 status_t res;
1396 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1397
1398 int takePictureCounter;
1399 {
1400 SharedParameters::Lock l(mParameters);
1401 switch (l.mParameters.state) {
1402 case Parameters::DISCONNECTED:
1403 case Parameters::STOPPED:
1404 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1405 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1406 __FUNCTION__, mCameraId);
1407 return INVALID_OPERATION;
1408 case Parameters::PREVIEW:
1409 // Good to go for takePicture
1410 res = commandStopFaceDetectionL(l.mParameters);
1411 if (res != OK) {
1412 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1413 __FUNCTION__, mCameraId);
1414 return res;
1415 }
1416 l.mParameters.state = Parameters::STILL_CAPTURE;
1417
1418 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1419 if (l.mParameters.isJpegSizeOverridden() &&
1420 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1421 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1422 if (res != OK) {
1423 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1424 __FUNCTION__, mCameraId, strerror(-res), res);
1425 }
1426 res = mDevice->waitUntilDrained();
1427 if (res != OK) {
1428 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1429 __FUNCTION__, mCameraId, strerror(-res), res);
1430 }
1431 // Clean up recording stream
1432 res = mStreamingProcessor->deleteRecordingStream();
1433 if (res != OK) {
1434 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1435 "stop preview: %s (%d)",
1436 __FUNCTION__, mCameraId, strerror(-res), res);
1437 }
1438 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1439 if (res != OK) {
1440 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1441 __FUNCTION__, mCameraId, strerror(-res), res);
1442 }
1443 l.mParameters.recoverOverriddenJpegSize();
1444 }
1445 break;
1446 case Parameters::RECORD:
1447 // Good to go for video snapshot
1448 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1449 break;
1450 case Parameters::STILL_CAPTURE:
1451 case Parameters::VIDEO_SNAPSHOT:
1452 ALOGE("%s: Camera %d: Already taking a picture",
1453 __FUNCTION__, mCameraId);
1454 return INVALID_OPERATION;
1455 }
1456
1457 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1458 int lastJpegStreamId = mJpegProcessor->getStreamId();
1459 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1460 if (!l.mParameters.slowJpegMode) {
1461 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1462 }
1463
1464 // If video snapshot fail to configureStream, try override video snapshot size to
1465 // video size
1466 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1467 overrideVideoSnapshotSize(l.mParameters);
1468 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1469 }
1470 if (res != OK) {
1471 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1472 __FUNCTION__, mCameraId, strerror(-res), res);
1473 return res;
1474 }
1475 takePictureCounter = ++l.mParameters.takePictureCounter;
1476
1477 // Clear ZSL buffer queue when Jpeg size is changed.
1478 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1479 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1480 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1481 __FUNCTION__, mCameraId);
1482 mZslProcessor->clearZslQueue();
1483 }
1484 }
1485
1486 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1487
1488 // Need HAL to have correct settings before (possibly) triggering precapture
1489 syncWithDevice();
1490
1491 res = mCaptureSequencer->startCapture(msgType);
1492 if (res != OK) {
1493 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1494 __FUNCTION__, mCameraId, strerror(-res), res);
1495 }
1496
1497 return res;
1498 }
1499
setParameters(const String8 & params)1500 status_t Camera2Client::setParameters(const String8& params) {
1501 ATRACE_CALL();
1502 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1503 Mutex::Autolock icl(mBinderSerializationLock);
1504 status_t res;
1505 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1506
1507 SharedParameters::Lock l(mParameters);
1508
1509 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1510 res = l.mParameters.set(params);
1511 if (res != OK) return res;
1512 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1513
1514 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1515 mZslProcessor->clearZslQueue();
1516 }
1517
1518 res = updateRequests(l.mParameters);
1519
1520 return res;
1521 }
1522
getParameters() const1523 String8 Camera2Client::getParameters() const {
1524 ATRACE_CALL();
1525 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1526 Mutex::Autolock icl(mBinderSerializationLock);
1527 // The camera service can unconditionally get the parameters at all times
1528 if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1529
1530 SharedParameters::ReadLock l(mParameters);
1531
1532 return l.mParameters.get();
1533 }
1534
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1535 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1536 ATRACE_CALL();
1537 Mutex::Autolock icl(mBinderSerializationLock);
1538 status_t res;
1539 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1540
1541 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1542 cmd, arg1, arg2);
1543
1544 switch (cmd) {
1545 case CAMERA_CMD_START_SMOOTH_ZOOM:
1546 return commandStartSmoothZoomL();
1547 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1548 return commandStopSmoothZoomL();
1549 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1550 return commandSetDisplayOrientationL(arg1);
1551 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1552 return commandEnableShutterSoundL(arg1 == 1);
1553 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1554 return commandPlayRecordingSoundL();
1555 case CAMERA_CMD_START_FACE_DETECTION:
1556 return commandStartFaceDetectionL(arg1);
1557 case CAMERA_CMD_STOP_FACE_DETECTION: {
1558 SharedParameters::Lock l(mParameters);
1559 return commandStopFaceDetectionL(l.mParameters);
1560 }
1561 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1562 return commandEnableFocusMoveMsgL(arg1 == 1);
1563 case CAMERA_CMD_PING:
1564 return commandPingL();
1565 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1566 case CAMERA_CMD_SET_VIDEO_FORMAT:
1567 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1568 __FUNCTION__, cmd, arg1, arg2);
1569 return BAD_VALUE;
1570 default:
1571 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1572 __FUNCTION__, cmd, arg1, arg2);
1573 return BAD_VALUE;
1574 }
1575 }
1576
commandStartSmoothZoomL()1577 status_t Camera2Client::commandStartSmoothZoomL() {
1578 ALOGE("%s: Unimplemented!", __FUNCTION__);
1579 return OK;
1580 }
1581
commandStopSmoothZoomL()1582 status_t Camera2Client::commandStopSmoothZoomL() {
1583 ALOGE("%s: Unimplemented!", __FUNCTION__);
1584 return OK;
1585 }
1586
commandSetDisplayOrientationL(int degrees)1587 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1588 int transform = Parameters::degToTransform(degrees,
1589 mCameraFacing == CAMERA_FACING_FRONT);
1590 if (transform == -1) {
1591 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1592 __FUNCTION__, mCameraId, degrees);
1593 return BAD_VALUE;
1594 }
1595 SharedParameters::Lock l(mParameters);
1596 if (transform != l.mParameters.previewTransform &&
1597 getPreviewStreamId() != NO_STREAM) {
1598 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1599 }
1600 l.mParameters.previewTransform = transform;
1601 return OK;
1602 }
1603
commandEnableShutterSoundL(bool enable)1604 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1605 SharedParameters::Lock l(mParameters);
1606 if (enable) {
1607 l.mParameters.playShutterSound = true;
1608 return OK;
1609 }
1610
1611 // the camera2 api legacy mode can unconditionally disable the shutter sound
1612 if (mLegacyMode) {
1613 ALOGV("%s: Disable shutter sound in legacy mode", __FUNCTION__);
1614 l.mParameters.playShutterSound = false;
1615 return OK;
1616 }
1617
1618 // Disabling shutter sound may not be allowed. In that case only
1619 // allow the mediaserver process to disable the sound.
1620 char value[PROPERTY_VALUE_MAX];
1621 property_get("ro.camera.sound.forced", value, "0");
1622 if (strncmp(value, "0", 2) != 0) {
1623 // Disabling shutter sound is not allowed. Deny if the current
1624 // process is not mediaserver.
1625 if (getCallingPid() != getpid()) {
1626 ALOGE("Failed to disable shutter sound. Permission denied (pid %d)",
1627 getCallingPid());
1628 return PERMISSION_DENIED;
1629 }
1630 }
1631
1632 l.mParameters.playShutterSound = false;
1633 return OK;
1634 }
1635
commandPlayRecordingSoundL()1636 status_t Camera2Client::commandPlayRecordingSoundL() {
1637 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1638 return OK;
1639 }
1640
commandStartFaceDetectionL(int)1641 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1642 ALOGV("%s: Camera %d: Starting face detection",
1643 __FUNCTION__, mCameraId);
1644 status_t res;
1645 SharedParameters::Lock l(mParameters);
1646 switch (l.mParameters.state) {
1647 case Parameters::DISCONNECTED:
1648 case Parameters::STOPPED:
1649 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1650 case Parameters::STILL_CAPTURE:
1651 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1652 __FUNCTION__, mCameraId);
1653 return INVALID_OPERATION;
1654 case Parameters::PREVIEW:
1655 case Parameters::RECORD:
1656 case Parameters::VIDEO_SNAPSHOT:
1657 // Good to go for starting face detect
1658 break;
1659 }
1660 // Ignoring type
1661 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1662 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1663 ALOGE("%s: Camera %d: Face detection not supported",
1664 __FUNCTION__, mCameraId);
1665 return BAD_VALUE;
1666 }
1667 if (l.mParameters.enableFaceDetect) return OK;
1668
1669 l.mParameters.enableFaceDetect = true;
1670
1671 res = updateRequests(l.mParameters);
1672
1673 return res;
1674 }
1675
commandStopFaceDetectionL(Parameters & params)1676 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1677 status_t res = OK;
1678 ALOGV("%s: Camera %d: Stopping face detection",
1679 __FUNCTION__, mCameraId);
1680
1681 if (!params.enableFaceDetect) return OK;
1682
1683 params.enableFaceDetect = false;
1684
1685 if (params.state == Parameters::PREVIEW
1686 || params.state == Parameters::RECORD
1687 || params.state == Parameters::VIDEO_SNAPSHOT) {
1688 res = updateRequests(params);
1689 }
1690
1691 return res;
1692 }
1693
commandEnableFocusMoveMsgL(bool enable)1694 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1695 SharedParameters::Lock l(mParameters);
1696 l.mParameters.enableFocusMoveMessages = enable;
1697
1698 return OK;
1699 }
1700
commandPingL()1701 status_t Camera2Client::commandPingL() {
1702 // Always ping back if access is proper and device is alive
1703 SharedParameters::Lock l(mParameters);
1704 if (l.mParameters.state != Parameters::DISCONNECTED) {
1705 return OK;
1706 } else {
1707 return NO_INIT;
1708 }
1709 }
1710
notifyError(int32_t errorCode,const CaptureResultExtras & resultExtras)1711 void Camera2Client::notifyError(int32_t errorCode,
1712 const CaptureResultExtras& resultExtras) {
1713 int32_t err = CAMERA_ERROR_UNKNOWN;
1714 switch(errorCode) {
1715 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1716 err = CAMERA_ERROR_RELEASED;
1717 break;
1718 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1719 err = CAMERA_ERROR_UNKNOWN;
1720 break;
1721 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1722 err = CAMERA_ERROR_SERVER_DIED;
1723 break;
1724 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1725 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1726 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1727 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1728 __FUNCTION__, errorCode, resultExtras.requestId);
1729 return;
1730 default:
1731 err = CAMERA_ERROR_UNKNOWN;
1732 break;
1733 }
1734
1735 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1736 resultExtras.requestId);
1737
1738 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1739 if (l.mRemoteCallback != nullptr) {
1740 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1741 }
1742 }
1743
1744
1745 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1746 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1747 ALOGV("%s: Autofocus state now %d, last trigger %d",
1748 __FUNCTION__, newState, triggerId);
1749 bool sendCompletedMessage = false;
1750 bool sendMovingMessage = false;
1751
1752 bool success = false;
1753 bool afInMotion = false;
1754 {
1755 SharedParameters::Lock l(mParameters);
1756 // Trace end of AF state
1757 char tmp[32];
1758 if (l.mParameters.afStateCounter > 0) {
1759 camera_metadata_enum_snprint(
1760 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1761 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1762 }
1763
1764 // Update state
1765 l.mParameters.focusState = newState;
1766 l.mParameters.afStateCounter++;
1767
1768 // Trace start of AF state
1769
1770 camera_metadata_enum_snprint(
1771 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1772 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1773
1774 switch (l.mParameters.focusMode) {
1775 case Parameters::FOCUS_MODE_AUTO:
1776 case Parameters::FOCUS_MODE_MACRO:
1777 // Don't send notifications upstream if they're not for the current AF
1778 // trigger. For example, if cancel was called in between, or if we
1779 // already sent a notification about this AF call.
1780 if (triggerId != l.mParameters.currentAfTriggerId) break;
1781 switch (newState) {
1782 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1783 success = true;
1784 // no break
1785 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1786 sendCompletedMessage = true;
1787 l.mParameters.currentAfTriggerId = -1;
1788 break;
1789 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1790 // Just starting focusing, ignore
1791 break;
1792 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1793 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1794 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1795 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1796 default:
1797 // Unexpected in AUTO/MACRO mode
1798 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1799 __FUNCTION__, newState);
1800 break;
1801 }
1802 break;
1803 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1804 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1805 switch (newState) {
1806 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1807 success = true;
1808 // no break
1809 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1810 // Don't send notifications upstream if they're not for
1811 // the current AF trigger. For example, if cancel was
1812 // called in between, or if we already sent a
1813 // notification about this AF call.
1814 // Send both a 'AF done' callback and a 'AF move' callback
1815 if (triggerId != l.mParameters.currentAfTriggerId) break;
1816 sendCompletedMessage = true;
1817 afInMotion = false;
1818 if (l.mParameters.enableFocusMoveMessages &&
1819 l.mParameters.afInMotion) {
1820 sendMovingMessage = true;
1821 }
1822 l.mParameters.currentAfTriggerId = -1;
1823 break;
1824 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1825 // Cancel was called, or we switched state; care if
1826 // currently moving
1827 afInMotion = false;
1828 if (l.mParameters.enableFocusMoveMessages &&
1829 l.mParameters.afInMotion) {
1830 sendMovingMessage = true;
1831 }
1832 break;
1833 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1834 // Start passive scan, inform upstream
1835 afInMotion = true;
1836 // no break
1837 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1838 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1839 // Stop passive scan, inform upstream
1840 if (l.mParameters.enableFocusMoveMessages) {
1841 sendMovingMessage = true;
1842 }
1843 break;
1844 }
1845 l.mParameters.afInMotion = afInMotion;
1846 break;
1847 case Parameters::FOCUS_MODE_EDOF:
1848 case Parameters::FOCUS_MODE_INFINITY:
1849 case Parameters::FOCUS_MODE_FIXED:
1850 default:
1851 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1852 ALOGE("%s: Unexpected AF state change %d "
1853 "(ID %d) in focus mode %d",
1854 __FUNCTION__, newState, triggerId,
1855 l.mParameters.focusMode);
1856 }
1857 }
1858 }
1859 if (sendMovingMessage) {
1860 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1861 if (l.mRemoteCallback != 0) {
1862 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1863 afInMotion ? 1 : 0, 0);
1864 }
1865 }
1866 if (sendCompletedMessage) {
1867 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1868 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1869 if (l.mRemoteCallback != 0) {
1870 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1871 success ? 1 : 0, 0);
1872 }
1873 }
1874 }
1875
notifyAutoExposure(uint8_t newState,int triggerId)1876 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
1877 ALOGV("%s: Autoexposure state now %d, last trigger %d",
1878 __FUNCTION__, newState, triggerId);
1879 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
1880 }
1881
notifyShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)1882 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
1883 nsecs_t timestamp) {
1884 (void)resultExtras;
1885 (void)timestamp;
1886
1887 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
1888 __FUNCTION__, resultExtras.requestId, timestamp);
1889 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
1890
1891 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
1892 }
1893
getParameters()1894 camera2::SharedParameters& Camera2Client::getParameters() {
1895 return mParameters;
1896 }
1897
getPreviewStreamId() const1898 int Camera2Client::getPreviewStreamId() const {
1899 return mStreamingProcessor->getPreviewStreamId();
1900 }
1901
getCaptureStreamId() const1902 int Camera2Client::getCaptureStreamId() const {
1903 return mJpegProcessor->getStreamId();
1904 }
1905
getCallbackStreamId() const1906 int Camera2Client::getCallbackStreamId() const {
1907 return mCallbackProcessor->getStreamId();
1908 }
1909
getRecordingStreamId() const1910 int Camera2Client::getRecordingStreamId() const {
1911 return mStreamingProcessor->getRecordingStreamId();
1912 }
1913
getZslStreamId() const1914 int Camera2Client::getZslStreamId() const {
1915 return mZslProcessor->getStreamId();
1916 }
1917
registerFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener,bool sendPartials)1918 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
1919 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
1920 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
1921 }
1922
removeFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener)1923 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
1924 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
1925 return mFrameProcessor->removeListener(minId, maxId, listener);
1926 }
1927
stopStream()1928 status_t Camera2Client::stopStream() {
1929 return mStreamingProcessor->stopStream();
1930 }
1931
createJpegStreamL(Parameters & params)1932 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
1933 status_t res = OK;
1934 int lastJpegStreamId = mJpegProcessor->getStreamId();
1935 if (lastJpegStreamId != NO_STREAM) {
1936 return INVALID_OPERATION;
1937 }
1938
1939 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1940 if (res != OK) {
1941 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1942 __FUNCTION__, mCameraId, strerror(-res), res);
1943 return res;
1944 }
1945
1946 res = mDevice->flush();
1947 if (res != OK) {
1948 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
1949 __FUNCTION__, mCameraId, strerror(-res), res);
1950 return res;
1951 }
1952
1953 // Ideally we don't need this, but current camera device
1954 // status tracking mechanism demands it.
1955 res = mDevice->waitUntilDrained();
1956 if (res != OK) {
1957 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
1958 __FUNCTION__, mCameraId, strerror(-res), res);
1959 }
1960
1961 res = updateProcessorStream(mJpegProcessor, params);
1962 return res;
1963 }
1964
1965 const int32_t Camera2Client::kPreviewRequestIdStart;
1966 const int32_t Camera2Client::kPreviewRequestIdEnd;
1967 const int32_t Camera2Client::kRecordingRequestIdStart;
1968 const int32_t Camera2Client::kRecordingRequestIdEnd;
1969 const int32_t Camera2Client::kCaptureRequestIdStart;
1970 const int32_t Camera2Client::kCaptureRequestIdEnd;
1971
1972 /** Utility methods */
1973
updateRequests(Parameters & params)1974 status_t Camera2Client::updateRequests(Parameters ¶ms) {
1975 status_t res;
1976
1977 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
1978
1979 res = mStreamingProcessor->incrementStreamingIds();
1980 if (res != OK) {
1981 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
1982 __FUNCTION__, mCameraId, strerror(-res), res);
1983 return res;
1984 }
1985
1986 res = mStreamingProcessor->updatePreviewRequest(params);
1987 if (res != OK) {
1988 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
1989 __FUNCTION__, mCameraId, strerror(-res), res);
1990 return res;
1991 }
1992 res = mStreamingProcessor->updateRecordingRequest(params);
1993 if (res != OK) {
1994 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1995 __FUNCTION__, mCameraId, strerror(-res), res);
1996 return res;
1997 }
1998
1999 if (params.state == Parameters::PREVIEW) {
2000 res = startPreviewL(params, true);
2001 if (res != OK) {
2002 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2003 __FUNCTION__, mCameraId, strerror(-res), res);
2004 return res;
2005 }
2006 } else if (params.state == Parameters::RECORD ||
2007 params.state == Parameters::VIDEO_SNAPSHOT) {
2008 res = startRecordingL(params, true);
2009 if (res != OK) {
2010 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2011 __FUNCTION__, mCameraId, strerror(-res), res);
2012 return res;
2013 }
2014 }
2015 return res;
2016 }
2017
2018
calculateBufferSize(int width,int height,int format,int stride)2019 size_t Camera2Client::calculateBufferSize(int width, int height,
2020 int format, int stride) {
2021 switch (format) {
2022 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2023 return width * height * 2;
2024 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2025 return width * height * 3 / 2;
2026 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2027 return width * height * 2;
2028 case HAL_PIXEL_FORMAT_YV12: { // YV12
2029 size_t ySize = stride * height;
2030 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2031 size_t uvSize = uvStride * height / 2;
2032 return ySize + uvSize * 2;
2033 }
2034 case HAL_PIXEL_FORMAT_RGB_565:
2035 return width * height * 2;
2036 case HAL_PIXEL_FORMAT_RGBA_8888:
2037 return width * height * 4;
2038 case HAL_PIXEL_FORMAT_RAW16:
2039 return width * height * 2;
2040 default:
2041 ALOGE("%s: Unknown preview format: %x",
2042 __FUNCTION__, format);
2043 return 0;
2044 }
2045 }
2046
syncWithDevice()2047 status_t Camera2Client::syncWithDevice() {
2048 ATRACE_CALL();
2049 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2050 status_t res;
2051
2052 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2053 if (activeRequestId == 0) return OK;
2054
2055 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2056 if (res == TIMED_OUT) {
2057 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2058 __FUNCTION__, mCameraId);
2059 } else if (res != OK) {
2060 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2061 __FUNCTION__, mCameraId);
2062 }
2063 return res;
2064 }
2065
2066 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)2067 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2068 camera2::Parameters params) {
2069 // No default template arguments until C++11, so we need this overload
2070 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2071 processor, params);
2072 }
2073
2074 template <typename ProcessorT,
2075 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)2076 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2077 Parameters params) {
2078 status_t res;
2079
2080 // Get raw pointer since sp<T> doesn't have operator->*
2081 ProcessorT *processorPtr = processor.get();
2082 res = (processorPtr->*updateStreamF)(params);
2083
2084 /**
2085 * Can't update the stream if it's busy?
2086 *
2087 * Then we need to stop the device (by temporarily clearing the request
2088 * queue) and then try again. Resume streaming once we're done.
2089 */
2090 if (res == -EBUSY) {
2091 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2092 mCameraId);
2093 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2094 if (res != OK) {
2095 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2096 __FUNCTION__, mCameraId, strerror(-res), res);
2097 }
2098
2099 res = mDevice->waitUntilDrained();
2100 if (res != OK) {
2101 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2102 __FUNCTION__, mCameraId, strerror(-res), res);
2103 }
2104
2105 res = (processorPtr->*updateStreamF)(params);
2106 if (res != OK) {
2107 ALOGE("%s: Camera %d: Failed to update processing stream "
2108 " despite having halted streaming first: %s (%d)",
2109 __FUNCTION__, mCameraId, strerror(-res), res);
2110 }
2111
2112 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2113 if (res != OK) {
2114 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2115 __FUNCTION__, mCameraId, strerror(-res), res);
2116 }
2117 }
2118
2119 return res;
2120 }
2121
overrideVideoSnapshotSize(Parameters & params)2122 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2123 ALOGV("%s: Camera %d: configure still size to video size before recording"
2124 , __FUNCTION__, mCameraId);
2125 params.overrideJpegSizeByVideoSize();
2126 status_t res = updateProcessorStream(mJpegProcessor, params);
2127 if (res != OK) {
2128 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2129 __FUNCTION__, mCameraId, strerror(-res), res);
2130 }
2131 return res;
2132 }
2133
setVideoTarget(const sp<IGraphicBufferProducer> & bufferProducer)2134 status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
2135 ATRACE_CALL();
2136 ALOGV("%s: E", __FUNCTION__);
2137 Mutex::Autolock icl(mBinderSerializationLock);
2138 status_t res;
2139 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2140
2141 sp<IBinder> binder = IInterface::asBinder(bufferProducer);
2142 if (binder == mVideoSurface) {
2143 ALOGV("%s: Camera %d: New video window is same as old video window",
2144 __FUNCTION__, mCameraId);
2145 return NO_ERROR;
2146 }
2147
2148 sp<Surface> window;
2149 int format;
2150 android_dataspace dataSpace;
2151
2152 if (bufferProducer != nullptr) {
2153 // Using controlledByApp flag to ensure that the buffer queue remains in
2154 // async mode for the old camera API, where many applications depend
2155 // on that behavior.
2156 window = new Surface(bufferProducer, /*controlledByApp*/ true);
2157
2158 ANativeWindow *anw = window.get();
2159
2160 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2161 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2162 return res;
2163 }
2164
2165 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2166 reinterpret_cast<int*>(&dataSpace))) != OK) {
2167 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2168 return res;
2169 }
2170 }
2171
2172 Parameters::State state;
2173 {
2174 SharedParameters::Lock l(mParameters);
2175 state = l.mParameters.state;
2176 }
2177
2178 switch (state) {
2179 case Parameters::STOPPED:
2180 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2181 case Parameters::PREVIEW:
2182 // OK
2183 break;
2184 case Parameters::DISCONNECTED:
2185 case Parameters::RECORD:
2186 case Parameters::STILL_CAPTURE:
2187 case Parameters::VIDEO_SNAPSHOT:
2188 default:
2189 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2190 __FUNCTION__, mCameraId,
2191 Parameters::getStateName(state));
2192 return INVALID_OPERATION;
2193 }
2194
2195 mVideoSurface = binder;
2196 res = mStreamingProcessor->setRecordingWindow(window);
2197 if (res != OK) {
2198 ALOGE("%s: Unable to set new recording window: %s (%d)",
2199 __FUNCTION__, strerror(-res), res);
2200 return res;
2201 }
2202
2203 {
2204 SharedParameters::Lock l(mParameters);
2205 l.mParameters.videoFormat = format;
2206 l.mParameters.videoDataSpace = dataSpace;
2207 }
2208
2209 return OK;
2210 }
2211
2212 const char* Camera2Client::kAutofocusLabel = "autofocus";
2213 const char* Camera2Client::kTakepictureLabel = "take_picture";
2214
2215 } // namespace android
2216