1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Contains implementation of an abstract class EmulatedCameraDevice that 19 * defines functionality expected from an emulated physical camera device: 20 * - Obtaining and setting camera parameters 21 * - Capturing frames 22 * - Streaming video 23 * - etc. 24 */ 25 26 #define LOG_NDEBUG 0 27 #define LOG_TAG "EmulatedCamera_Device" 28 #include "EmulatedCameraDevice.h" 29 #include <cutils/log.h> 30 #include <sys/select.h> 31 #include <algorithm> 32 #include <cmath> 33 #include "EmulatedCamera.h" 34 35 namespace android { 36 37 const float GAMMA_CORRECTION = 2.2f; 38 EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal) 39 : mObjectLock(), 40 mCurFrameTimestamp(0), 41 mCameraHAL(camera_hal), 42 mCurrentFrame(NULL), 43 mExposureCompensation(1.0f), 44 mWhiteBalanceScale(NULL), 45 mIsFocusing(false), 46 mSupportedWhiteBalanceScale(), 47 mState(ECDS_CONSTRUCTED) {} 48 49 EmulatedCameraDevice::~EmulatedCameraDevice() { 50 ALOGV("EmulatedCameraDevice destructor"); 51 if (mCurrentFrame != NULL) { 52 delete[] mCurrentFrame; 53 } 54 for (size_t i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) { 55 if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) { 56 delete[] mSupportedWhiteBalanceScale.valueAt(i); 57 } 58 } 59 } 60 61 /**************************************************************************** 62 * Emulated camera device public API 63 ***************************************************************************/ 64 65 status_t EmulatedCameraDevice::Initialize() { 66 if (isInitialized()) { 67 ALOGW("%s: Emulated camera device is already initialized: mState = %d", 68 __FUNCTION__, mState); 69 return NO_ERROR; 70 } 71 72 /* Instantiate worker thread object. */ 73 mWorkerThread = new WorkerThread(this); 74 if (getWorkerThread() == NULL) { 75 ALOGE("%s: Unable to instantiate worker thread object", __FUNCTION__); 76 return ENOMEM; 77 } 78 79 mState = ECDS_INITIALIZED; 80 81 return NO_ERROR; 82 } 83 84 status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst) { 85 ALOGV("%s", __FUNCTION__); 86 87 if (!isStarted()) { 88 ALOGE("%s: Device is not started", __FUNCTION__); 89 return EINVAL; 90 } 91 92 /* Frames will be delivered from the thread routine. */ 93 const status_t res = startWorkerThread(one_burst); 94 ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__); 95 return res; 96 } 97 98 status_t EmulatedCameraDevice::stopDeliveringFrames() { 99 ALOGV("%s", __FUNCTION__); 100 101 if (!isStarted()) { 102 ALOGW("%s: Device is not started", __FUNCTION__); 103 return NO_ERROR; 104 } 105 106 const status_t res = stopWorkerThread(); 107 ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__); 108 return res; 109 } 110 111 void EmulatedCameraDevice::setExposureCompensation(const float ev) { 112 ALOGV("%s", __FUNCTION__); 113 114 if (!isStarted()) { 115 ALOGW("%s: Fake camera device is not started.", __FUNCTION__); 116 } 117 118 mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION); 119 ALOGV("New exposure compensation is %f", mExposureCompensation); 120 } 121 122 void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode, 123 const float r_scale, 124 const float b_scale) { 125 ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale); 126 float* value = new float[3]; 127 value[0] = r_scale; 128 value[1] = 1.0f; 129 value[2] = b_scale; 130 mSupportedWhiteBalanceScale.add(String8(mode), value); 131 } 132 133 void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) { 134 ALOGV("%s with white balance %s", __FUNCTION__, mode); 135 mWhiteBalanceScale = mSupportedWhiteBalanceScale.valueFor(String8(mode)); 136 } 137 138 void EmulatedCameraDevice::startAutoFocus() { mIsFocusing = true; } 139 140 /* Computes the pixel value after adjusting the white balance to the current 141 * one. The input the y, u, v channel of the pixel and the adjusted value will 142 * be stored in place. The adjustment is done in RGB space. 143 */ 144 void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y, uint8_t& u, 145 uint8_t& v) const { 146 float r_scale = mWhiteBalanceScale[0]; 147 float b_scale = mWhiteBalanceScale[2]; 148 int r = static_cast<float>(YUV2R(y, u, v)) / r_scale; 149 int g = YUV2G(y, u, v); 150 int b = static_cast<float>(YUV2B(y, u, v)) / b_scale; 151 152 y = RGB2Y(r, g, b); 153 u = RGB2U(r, g, b); 154 v = RGB2V(r, g, b); 155 } 156 157 void EmulatedCameraDevice::simulateAutoFocus() { 158 if (mIsFocusing) { 159 ALOGV("%s: Simulating auto-focus", __FUNCTION__); 160 mCameraHAL->onCameraFocusAcquired(); 161 mIsFocusing = false; 162 } 163 } 164 165 status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer) { 166 if (!isStarted()) { 167 ALOGE("%s: Device is not started", __FUNCTION__); 168 return EINVAL; 169 } 170 if (mCurrentFrame == NULL || buffer == NULL) { 171 ALOGE("%s: No framebuffer", __FUNCTION__); 172 return EINVAL; 173 } 174 175 /* In emulation the framebuffer is never RGB. */ 176 switch (mPixelFormat) { 177 case V4L2_PIX_FMT_YVU420: 178 YV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight); 179 return NO_ERROR; 180 case V4L2_PIX_FMT_YUV420: 181 YU12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight); 182 return NO_ERROR; 183 case V4L2_PIX_FMT_NV21: 184 NV21ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight); 185 return NO_ERROR; 186 case V4L2_PIX_FMT_NV12: 187 NV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight); 188 return NO_ERROR; 189 190 default: 191 ALOGE("%s: Unknown pixel format %.4s", __FUNCTION__, 192 reinterpret_cast<const char*>(&mPixelFormat)); 193 return EINVAL; 194 } 195 } 196 197 /**************************************************************************** 198 * Emulated camera device private API 199 ***************************************************************************/ 200 201 status_t EmulatedCameraDevice::commonStartDevice(int width, int height, 202 uint32_t pix_fmt, int fps) { 203 /* Validate pixel format, and calculate framebuffer size at the same time. */ 204 switch (pix_fmt) { 205 case V4L2_PIX_FMT_YVU420: 206 case V4L2_PIX_FMT_YUV420: 207 case V4L2_PIX_FMT_NV21: 208 case V4L2_PIX_FMT_NV12: 209 mFrameBufferSize = (width * height * 12) / 8; 210 break; 211 212 default: 213 ALOGE("%s: Unknown pixel format %.4s", __FUNCTION__, 214 reinterpret_cast<const char*>(&pix_fmt)); 215 return EINVAL; 216 } 217 218 /* Cache framebuffer info. */ 219 mFrameWidth = width; 220 mFrameHeight = height; 221 mPixelFormat = pix_fmt; 222 mTotalPixels = width * height; 223 mTargetFps = fps; 224 225 /* Allocate framebuffer. */ 226 mCurrentFrame = new uint8_t[mFrameBufferSize]; 227 if (mCurrentFrame == NULL) { 228 ALOGE("%s: Unable to allocate framebuffer", __FUNCTION__); 229 return ENOMEM; 230 } 231 ALOGV("%s: Allocated %p %zu bytes for %d pixels in %.4s[%dx%d] frame", 232 __FUNCTION__, mCurrentFrame, mFrameBufferSize, mTotalPixels, 233 reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, 234 mFrameHeight); 235 return NO_ERROR; 236 } 237 238 void EmulatedCameraDevice::commonStopDevice() { 239 mFrameWidth = mFrameHeight = mTotalPixels = 0; 240 mPixelFormat = 0; 241 mTargetFps = 0; 242 243 if (mCurrentFrame != NULL) { 244 delete[] mCurrentFrame; 245 mCurrentFrame = NULL; 246 } 247 } 248 249 const CameraParameters* EmulatedCameraDevice::getCameraParameters() { 250 return mCameraHAL->getCameraParameters(); 251 } 252 253 /**************************************************************************** 254 * Worker thread management. 255 ***************************************************************************/ 256 257 status_t EmulatedCameraDevice::startWorkerThread(bool one_burst) { 258 ALOGV("%s", __FUNCTION__); 259 260 if (!isInitialized()) { 261 ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__); 262 return EINVAL; 263 } 264 265 const status_t res = getWorkerThread()->startThread(one_burst); 266 ALOGE_IF(res != NO_ERROR, "%s: Unable to start worker thread", __FUNCTION__); 267 return res; 268 } 269 270 status_t EmulatedCameraDevice::stopWorkerThread() { 271 ALOGV("%s", __FUNCTION__); 272 273 if (!isInitialized()) { 274 ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__); 275 return EINVAL; 276 } 277 278 const status_t res = getWorkerThread()->stopThread(); 279 ALOGE_IF(res != NO_ERROR, "%s: Unable to stop worker thread", __FUNCTION__); 280 return res; 281 } 282 283 bool EmulatedCameraDevice::inWorkerThread() { 284 /* This will end the thread loop, and will terminate the thread. Derived 285 * classes must override this method. */ 286 return false; 287 } 288 289 /**************************************************************************** 290 * Worker thread implementation. 291 ***************************************************************************/ 292 293 status_t EmulatedCameraDevice::WorkerThread::readyToRun() { 294 ALOGV("Starting emulated camera device worker thread..."); 295 296 ALOGW_IF(mThreadControl >= 0 || mControlFD >= 0, 297 "%s: Thread control FDs are opened", __FUNCTION__); 298 /* Create a pair of FDs that would be used to control the thread. */ 299 int thread_fds[2]; 300 status_t ret; 301 Mutex::Autolock lock(mCameraDevice->mObjectLock); 302 if (pipe(thread_fds) == 0) { 303 mThreadControl = thread_fds[1]; 304 mControlFD = thread_fds[0]; 305 ALOGV("Emulated device's worker thread has been started."); 306 ret = NO_ERROR; 307 } else { 308 ALOGE("%s: Unable to create thread control FDs: %d -> %s", __FUNCTION__, 309 errno, strerror(errno)); 310 ret = errno; 311 } 312 313 mSetup.signal(); 314 return ret; 315 } 316 317 status_t EmulatedCameraDevice::WorkerThread::stopThread() { 318 ALOGV("Stopping emulated camera device's worker thread..."); 319 320 status_t res = EINVAL; 321 322 // Limit the scope of the Autolock 323 { 324 // If thread is running and readyToRun() has not finished running, 325 // then wait until it is done. 326 Mutex::Autolock lock(mCameraDevice->mObjectLock); 327 #if VSOC_PLATFORM_SDK_AFTER(J_MR2) 328 if (isRunning() && (mThreadControl < 0 || mControlFD < 0)) { 329 #else 330 if (getTid() != -1 && (mThreadControl < 0 || mControlFD < 0)) { 331 #endif 332 mSetup.wait(mCameraDevice->mObjectLock); 333 } 334 } 335 336 if (mThreadControl >= 0) { 337 /* Send "stop" message to the thread loop. */ 338 const ControlMessage msg = THREAD_STOP; 339 const int wres = 340 TEMP_FAILURE_RETRY(write(mThreadControl, &msg, sizeof(msg))); 341 if (wres == sizeof(msg)) { 342 /* Stop the thread, and wait till it's terminated. */ 343 res = requestExitAndWait(); 344 if (res == NO_ERROR) { 345 /* Close control FDs. */ 346 if (mThreadControl >= 0) { 347 close(mThreadControl); 348 mThreadControl = -1; 349 } 350 if (mControlFD >= 0) { 351 close(mControlFD); 352 mControlFD = -1; 353 } 354 ALOGV("Emulated camera device's worker thread has been stopped."); 355 } else { 356 ALOGE("%s: requestExitAndWait failed: %d -> %s", __FUNCTION__, res, 357 strerror(-res)); 358 } 359 } else { 360 ALOGE("%s: Unable to send THREAD_STOP message: %d -> %s", __FUNCTION__, 361 errno, strerror(errno)); 362 res = errno ? errno : EINVAL; 363 } 364 } else { 365 ALOGE("%s: Thread control FDs are not opened", __FUNCTION__); 366 } 367 368 return res; 369 } 370 371 EmulatedCameraDevice::WorkerThread::SelectRes 372 EmulatedCameraDevice::WorkerThread::Select(int fd, int timeout) { 373 fd_set fds[1]; 374 struct timeval tv, *tvp = NULL; 375 376 mCameraDevice->simulateAutoFocus(); 377 378 const int fd_num = (fd >= 0) ? std::max(fd, mControlFD) + 1 : mControlFD + 1; 379 FD_ZERO(fds); 380 FD_SET(mControlFD, fds); 381 if (fd >= 0) { 382 FD_SET(fd, fds); 383 } 384 if (timeout) { 385 tv.tv_sec = timeout / 1000000; 386 tv.tv_usec = timeout % 1000000; 387 tvp = &tv; 388 } 389 int res = TEMP_FAILURE_RETRY(select(fd_num, fds, NULL, NULL, tvp)); 390 if (res < 0) { 391 ALOGE("%s: select returned %d and failed: %d -> %s", __FUNCTION__, res, 392 errno, strerror(errno)); 393 return ERROR; 394 } else if (res == 0) { 395 /* Timeout. */ 396 return TIMEOUT; 397 } else if (FD_ISSET(mControlFD, fds)) { 398 /* A control event. Lets read the message. */ 399 ControlMessage msg; 400 res = TEMP_FAILURE_RETRY(read(mControlFD, &msg, sizeof(msg))); 401 if (res != sizeof(msg)) { 402 ALOGE("%s: Unexpected message size %d, or an error %d -> %s", 403 __FUNCTION__, res, errno, strerror(errno)); 404 return ERROR; 405 } 406 /* THREAD_STOP is the only message expected here. */ 407 if (msg == THREAD_STOP) { 408 ALOGV("%s: THREAD_STOP message is received", __FUNCTION__); 409 return EXIT_THREAD; 410 } else { 411 ALOGE("Unknown worker thread message %d", msg); 412 return ERROR; 413 } 414 } else { 415 /* Must be an FD. */ 416 ALOGW_IF(fd < 0 || !FD_ISSET(fd, fds), "%s: Undefined 'select' result", 417 __FUNCTION__); 418 return READY; 419 } 420 } 421 422 }; /* namespace android */ 423