CameraSourceTimeLapse.cpp revision 28934a90e168291f6c77c56e8a05f272e5151bbd
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSourceTimeLapse" 19 20#include <binder/IPCThreadState.h> 21#include <binder/MemoryBase.h> 22#include <binder/MemoryHeapBase.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/CameraSourceTimeLapse.h> 25#include <media/stagefright/MediaDebug.h> 26#include <media/stagefright/MetaData.h> 27#include <media/stagefright/YUVImage.h> 28#include <media/stagefright/YUVCanvas.h> 29#include <camera/Camera.h> 30#include <camera/CameraParameters.h> 31#include <ui/Rect.h> 32#include <utils/String8.h> 33#include <utils/Vector.h> 34#include "OMX_Video.h" 35#include <limits.h> 36 37namespace android { 38 39// static 40CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( 41 const sp<ICamera> &camera, 42 int32_t cameraId, 43 Size videoSize, 44 int32_t videoFrameRate, 45 const sp<Surface>& surface, 46 int64_t timeBetweenTimeLapseFrameCaptureUs) { 47 48 CameraSourceTimeLapse *source = new 49 CameraSourceTimeLapse(camera, cameraId, 50 videoSize, videoFrameRate, surface, 51 timeBetweenTimeLapseFrameCaptureUs); 52 53 if (source != NULL) { 54 if (source->initCheck() != OK) { 55 delete source; 56 return NULL; 57 } 58 } 59 return source; 60} 61 62CameraSourceTimeLapse::CameraSourceTimeLapse( 63 const sp<ICamera>& camera, 64 int32_t cameraId, 65 Size videoSize, 66 int32_t videoFrameRate, 67 const sp<Surface>& surface, 68 int64_t timeBetweenTimeLapseFrameCaptureUs) 69 : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false), 70 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), 71 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), 72 mLastTimeLapseFrameRealTimestampUs(0), 73 mSkipCurrentFrame(false) { 74 75 LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs); 76 mVideoWidth = videoSize.width; 77 mVideoHeight = videoSize.height; 78 79 if (trySettingVideoSize(videoSize.width, videoSize.height)) { 80 mUseStillCameraForTimeLapse = false; 81 } else { 82 // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater 83 // than the fastest rate at which the still camera can take pictures. 84 mUseStillCameraForTimeLapse = true; 85 CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height)); 86 mNeedCropping = computeCropRectangleOffset(); 87 mMeta->setInt32(kKeyWidth, videoSize.width); 88 mMeta->setInt32(kKeyHeight, videoSize.height); 89 } 90 91 // Initialize quick stop variables. 92 mQuickStop = false; 93 mForceRead = false; 94 mLastReadBufferCopy = NULL; 95 mStopWaitingForIdleCamera = false; 96} 97 98CameraSourceTimeLapse::~CameraSourceTimeLapse() { 99} 100 101void CameraSourceTimeLapse::startQuickReadReturns() { 102 Mutex::Autolock autoLock(mQuickStopLock); 103 LOGV("Enabling quick read returns"); 104 105 // Enable quick stop mode. 106 mQuickStop = true; 107 108 if (mUseStillCameraForTimeLapse) { 109 // wake up the thread right away. 110 mTakePictureCondition.signal(); 111 } else { 112 // Force dataCallbackTimestamp() coming from the video camera to not skip the 113 // next frame as we want read() to get a get a frame right away. 114 mForceRead = true; 115 } 116} 117 118bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) { 119 LOGV("trySettingVideoSize: %dx%d", width, height); 120 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 121 String8 s = mCamera->getParameters(); 122 123 CameraParameters params(s); 124 Vector<Size> supportedSizes; 125 params.getSupportedVideoSizes(supportedSizes); 126 bool videoOutputSupported = false; 127 if (supportedSizes.size() == 0) { 128 params.getSupportedPreviewSizes(supportedSizes); 129 } else { 130 videoOutputSupported = true; 131 } 132 133 bool videoSizeSupported = false; 134 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 135 int32_t pictureWidth = supportedSizes[i].width; 136 int32_t pictureHeight = supportedSizes[i].height; 137 138 if ((pictureWidth == width) && (pictureHeight == height)) { 139 videoSizeSupported = true; 140 } 141 } 142 143 bool isSuccessful = false; 144 if (videoSizeSupported) { 145 LOGV("Video size (%d, %d) is supported", width, height); 146 if (videoOutputSupported) { 147 params.setVideoSize(width, height); 148 } else { 149 params.setPreviewSize(width, height); 150 } 151 if (mCamera->setParameters(params.flatten()) == OK) { 152 isSuccessful = true; 153 } else { 154 LOGE("Failed to set preview size to %dx%d", width, height); 155 isSuccessful = false; 156 } 157 } 158 159 IPCThreadState::self()->restoreCallingIdentity(token); 160 return isSuccessful; 161} 162 163bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { 164 LOGV("setPictureSizeToClosestSupported: %dx%d", width, height); 165 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 166 String8 s = mCamera->getParameters(); 167 IPCThreadState::self()->restoreCallingIdentity(token); 168 169 CameraParameters params(s); 170 Vector<Size> supportedSizes; 171 params.getSupportedPictureSizes(supportedSizes); 172 173 int32_t minPictureSize = INT_MAX; 174 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 175 int32_t pictureWidth = supportedSizes[i].width; 176 int32_t pictureHeight = supportedSizes[i].height; 177 178 if ((pictureWidth >= width) && (pictureHeight >= height)) { 179 int32_t pictureSize = pictureWidth*pictureHeight; 180 if (pictureSize < minPictureSize) { 181 minPictureSize = pictureSize; 182 mPictureWidth = pictureWidth; 183 mPictureHeight = pictureHeight; 184 } 185 } 186 } 187 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); 188 return (minPictureSize != INT_MAX); 189} 190 191bool CameraSourceTimeLapse::computeCropRectangleOffset() { 192 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { 193 return false; 194 } 195 196 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); 197 198 int32_t widthDifference = mPictureWidth - mVideoWidth; 199 int32_t heightDifference = mPictureHeight - mVideoHeight; 200 201 mCropRectStartX = widthDifference/2; 202 mCropRectStartY = heightDifference/2; 203 204 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); 205 206 return true; 207} 208 209void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { 210 Mutex::Autolock autoLock(mQuickStopLock); 211 if (mQuickStop && (buffer == mLastReadBufferCopy)) { 212 buffer->setObserver(NULL); 213 buffer->release(); 214 } else { 215 return CameraSource::signalBufferReturned(buffer); 216 } 217} 218 219void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) { 220 size_t sourceSize = sourceBuffer.size(); 221 void* sourcePointer = sourceBuffer.data(); 222 223 (*newBuffer) = new MediaBuffer(sourceSize); 224 memcpy((*newBuffer)->data(), sourcePointer, sourceSize); 225 226 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime); 227} 228 229void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { 230 int64_t frameTime; 231 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); 232 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); 233 mLastReadBufferCopy->add_ref(); 234 mLastReadBufferCopy->setObserver(this); 235} 236 237status_t CameraSourceTimeLapse::read( 238 MediaBuffer **buffer, const ReadOptions *options) { 239 if (mLastReadBufferCopy == NULL) { 240 mLastReadStatus = CameraSource::read(buffer, options); 241 242 // mQuickStop may have turned to true while read was blocked. Make a copy of 243 // the buffer in that case. 244 Mutex::Autolock autoLock(mQuickStopLock); 245 if (mQuickStop && *buffer) { 246 fillLastReadBufferCopy(**buffer); 247 } 248 return mLastReadStatus; 249 } else { 250 (*buffer) = mLastReadBufferCopy; 251 (*buffer)->add_ref(); 252 return mLastReadStatus; 253 } 254} 255 256// static 257void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { 258 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); 259 source->threadTimeLapseEntry(); 260 return NULL; 261} 262 263void CameraSourceTimeLapse::threadTimeLapseEntry() { 264 while (mStarted) { 265 { 266 Mutex::Autolock autoLock(mCameraIdleLock); 267 if (!mCameraIdle) { 268 mCameraIdleCondition.wait(mCameraIdleLock); 269 } 270 CHECK(mCameraIdle); 271 mCameraIdle = false; 272 } 273 274 // Even if mQuickStop == true we need to take one more picture 275 // as a read() may be blocked, waiting for a frame to get available. 276 // After this takePicture, if mQuickStop == true, we can safely exit 277 // this thread as read() will make a copy of this last frame and keep 278 // returning it in the quick stop mode. 279 Mutex::Autolock autoLock(mQuickStopLock); 280 CHECK_EQ(OK, mCamera->takePicture()); 281 if (mQuickStop) { 282 LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true"); 283 return; 284 } 285 mTakePictureCondition.waitRelative(mQuickStopLock, 286 mTimeBetweenTimeLapseFrameCaptureUs * 1000); 287 } 288 LOGV("threadTimeLapseEntry: Exiting due to mStarted = false"); 289} 290 291void CameraSourceTimeLapse::startCameraRecording() { 292 if (mUseStillCameraForTimeLapse) { 293 LOGV("start time lapse recording using still camera"); 294 295 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 296 String8 s = mCamera->getParameters(); 297 298 CameraParameters params(s); 299 params.setPictureSize(mPictureWidth, mPictureHeight); 300 mCamera->setParameters(params.flatten()); 301 mCameraIdle = true; 302 mStopWaitingForIdleCamera = false; 303 304 // disable shutter sound and play the recording sound. 305 mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0); 306 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); 307 IPCThreadState::self()->restoreCallingIdentity(token); 308 309 // create a thread which takes pictures in a loop 310 pthread_attr_t attr; 311 pthread_attr_init(&attr); 312 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); 313 314 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); 315 pthread_attr_destroy(&attr); 316 } else { 317 LOGV("start time lapse recording using video camera"); 318 CHECK_EQ(OK, mCamera->startRecording()); 319 } 320} 321 322void CameraSourceTimeLapse::stopCameraRecording() { 323 if (mUseStillCameraForTimeLapse) { 324 void *dummy; 325 pthread_join(mThreadTimeLapse, &dummy); 326 327 // Last takePicture may still be underway. Wait for the camera to get 328 // idle. 329 Mutex::Autolock autoLock(mCameraIdleLock); 330 mStopWaitingForIdleCamera = true; 331 if (!mCameraIdle) { 332 mCameraIdleCondition.wait(mCameraIdleLock); 333 } 334 CHECK(mCameraIdle); 335 mCamera->setListener(NULL); 336 337 // play the recording sound. 338 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); 339 } else { 340 mCamera->setListener(NULL); 341 mCamera->stopRecording(); 342 } 343 if (mLastReadBufferCopy) { 344 mLastReadBufferCopy->release(); 345 mLastReadBufferCopy = NULL; 346 } 347} 348 349void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { 350 if (!mUseStillCameraForTimeLapse) { 351 mCamera->releaseRecordingFrame(frame); 352 } 353} 354 355sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { 356 size_t source_size = source_data->size(); 357 void* source_pointer = source_data->pointer(); 358 359 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); 360 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); 361 memcpy(newMemory->pointer(), source_pointer, source_size); 362 return newMemory; 363} 364 365// Allocates IMemory of final type MemoryBase with the given size. 366sp<IMemory> allocateIMemory(size_t size) { 367 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size); 368 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size); 369 return newMemory; 370} 371 372// static 373void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { 374 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); 375 source->threadStartPreview(); 376 return NULL; 377} 378 379void CameraSourceTimeLapse::threadStartPreview() { 380 CHECK_EQ(OK, mCamera->startPreview()); 381 Mutex::Autolock autoLock(mCameraIdleLock); 382 mCameraIdle = true; 383 mCameraIdleCondition.signal(); 384} 385 386void CameraSourceTimeLapse::restartPreview() { 387 // Start this in a different thread, so that the dataCallback can return 388 LOGV("restartPreview"); 389 pthread_attr_t attr; 390 pthread_attr_init(&attr); 391 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); 392 393 pthread_t threadPreview; 394 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); 395 pthread_attr_destroy(&attr); 396} 397 398sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) { 399 // find the YUV format 400 int32_t srcFormat; 401 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); 402 YUVImage::YUVFormat yuvFormat; 403 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { 404 yuvFormat = YUVImage::YUV420SemiPlanar; 405 } else { 406 CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar); 407 yuvFormat = YUVImage::YUV420Planar; 408 } 409 410 // allocate memory for cropped image and setup a canvas using it. 411 sp<IMemory> croppedImageMemory = allocateIMemory( 412 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); 413 YUVImage yuvImageCropped(yuvFormat, 414 mVideoWidth, mVideoHeight, 415 (uint8_t *)croppedImageMemory->pointer()); 416 YUVCanvas yuvCanvasCrop(yuvImageCropped); 417 418 YUVImage yuvImageSource(yuvFormat, 419 mPictureWidth, mPictureHeight, 420 (uint8_t *)source_data->pointer()); 421 yuvCanvasCrop.CopyImageRect( 422 Rect(mCropRectStartX, mCropRectStartY, 423 mCropRectStartX + mVideoWidth, 424 mCropRectStartY + mVideoHeight), 425 0, 0, 426 yuvImageSource); 427 428 return croppedImageMemory; 429} 430 431void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { 432 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { 433 // takePicture will complete after this callback, so restart preview. 434 restartPreview(); 435 return; 436 } 437 if (msgType != CAMERA_MSG_RAW_IMAGE) { 438 return; 439 } 440 441 LOGV("dataCallback for timelapse still frame"); 442 CHECK_EQ(true, mUseStillCameraForTimeLapse); 443 444 int64_t timestampUs; 445 if (mNumFramesReceived == 0) { 446 timestampUs = mStartTimeUs; 447 } else { 448 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 449 } 450 451 if (mNeedCropping) { 452 sp<IMemory> croppedImageData = cropYUVImage(data); 453 dataCallbackTimestamp(timestampUs, msgType, croppedImageData); 454 } else { 455 sp<IMemory> dataCopy = createIMemoryCopy(data); 456 dataCallbackTimestamp(timestampUs, msgType, dataCopy); 457 } 458} 459 460bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { 461 if (mSkipCurrentFrame) { 462 mSkipCurrentFrame = false; 463 return true; 464 } else { 465 return false; 466 } 467} 468 469bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { 470 if (!mUseStillCameraForTimeLapse) { 471 if (mLastTimeLapseFrameRealTimestampUs == 0) { 472 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs 473 // to current time (timestampUs) and save frame data. 474 LOGV("dataCallbackTimestamp timelapse: initial frame"); 475 476 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 477 return false; 478 } 479 480 { 481 Mutex::Autolock autoLock(mQuickStopLock); 482 483 // mForceRead may be set to true by startQuickReadReturns(). In that 484 // case don't skip this frame. 485 if (mForceRead) { 486 LOGV("dataCallbackTimestamp timelapse: forced read"); 487 mForceRead = false; 488 *timestampUs = mLastFrameTimestampUs; 489 return false; 490 } 491 } 492 493 if (*timestampUs < 494 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { 495 // Skip all frames from last encoded frame until 496 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. 497 // Tell the camera to release its recording frame and return. 498 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); 499 return true; 500 } else { 501 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: 502 // - Reset mLastTimeLapseFrameRealTimestampUs to current time. 503 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead 504 // of the last encoded frame's time stamp. 505 LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); 506 507 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 508 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 509 return false; 510 } 511 } 512 return false; 513} 514 515void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, 516 const sp<IMemory> &data) { 517 if (!mUseStillCameraForTimeLapse) { 518 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); 519 } else { 520 Mutex::Autolock autoLock(mCameraIdleLock); 521 // If we are using the still camera and stop() has been called, it may 522 // be waiting for the camera to get idle. In that case return 523 // immediately. Calling CameraSource::dataCallbackTimestamp() will lead 524 // to a deadlock since it tries to access CameraSource::mLock which in 525 // this case is held by CameraSource::stop() currently waiting for the 526 // camera to get idle. And camera will not get idle until this call 527 // returns. 528 if (mStopWaitingForIdleCamera) { 529 return; 530 } 531 } 532 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); 533} 534 535} // namespace android 536