CameraSourceTimeLapse.cpp revision 5c9523154d106b555db6c41f85ab205a4f189b02
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSourceTimeLapse" 19 20#include <binder/IPCThreadState.h> 21#include <binder/MemoryBase.h> 22#include <binder/MemoryHeapBase.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/CameraSourceTimeLapse.h> 25#include <media/stagefright/MediaDebug.h> 26#include <media/stagefright/MetaData.h> 27#include <media/stagefright/YUVImage.h> 28#include <media/stagefright/YUVCanvas.h> 29#include <camera/Camera.h> 30#include <camera/CameraParameters.h> 31#include <ui/Rect.h> 32#include <utils/String8.h> 33#include <utils/Vector.h> 34#include "OMX_Video.h" 35#include <limits.h> 36 37namespace android { 38 39// static 40CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( 41 const sp<ICamera> &camera, 42 int32_t cameraId, 43 Size videoSize, 44 int32_t videoFrameRate, 45 const sp<Surface>& surface, 46 int64_t timeBetweenTimeLapseFrameCaptureUs) { 47 48 CameraSourceTimeLapse *source = new 49 CameraSourceTimeLapse(camera, cameraId, 50 videoSize, videoFrameRate, surface, 51 timeBetweenTimeLapseFrameCaptureUs); 52 53 if (source != NULL) { 54 if (source->initCheck() != OK) { 55 delete source; 56 return NULL; 57 } 58 } 59 return source; 60} 61 62CameraSourceTimeLapse::CameraSourceTimeLapse( 63 const sp<ICamera>& camera, 64 int32_t cameraId, 65 Size videoSize, 66 int32_t videoFrameRate, 67 const sp<Surface>& surface, 68 int64_t timeBetweenTimeLapseFrameCaptureUs) 69 : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false), 70 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), 71 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), 72 mLastTimeLapseFrameRealTimestampUs(0), 73 mSkipCurrentFrame(false) { 74 75 LOGV("starting time lapse mode"); 76 mVideoWidth = videoSize.width; 77 mVideoHeight = videoSize.height; 78 79 if (trySettingPreviewSize(videoSize.width, videoSize.height)) { 80 mUseStillCameraForTimeLapse = false; 81 } else { 82 // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater 83 // than the fastest rate at which the still camera can take pictures. 84 mUseStillCameraForTimeLapse = true; 85 CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height)); 86 mNeedCropping = computeCropRectangleOffset(); 87 mMeta->setInt32(kKeyWidth, videoSize.width); 88 mMeta->setInt32(kKeyHeight, videoSize.height); 89 } 90 91 // Initialize quick stop variables. 92 mQuickStop = false; 93 mForceRead = false; 94 mLastReadBufferCopy = NULL; 95 mStopWaitingForIdleCamera = false; 96} 97 98CameraSourceTimeLapse::~CameraSourceTimeLapse() { 99} 100 101void CameraSourceTimeLapse::startQuickReadReturns() { 102 Mutex::Autolock autoLock(mQuickStopLock); 103 LOGV("Enabling quick read returns"); 104 105 // Enable quick stop mode. 106 mQuickStop = true; 107 108 if (mUseStillCameraForTimeLapse) { 109 // wake up the thread right away. 110 mTakePictureCondition.signal(); 111 } else { 112 // Force dataCallbackTimestamp() coming from the video camera to not skip the 113 // next frame as we want read() to get a get a frame right away. 114 mForceRead = true; 115 } 116} 117 118bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) { 119 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 120 String8 s = mCamera->getParameters(); 121 IPCThreadState::self()->restoreCallingIdentity(token); 122 123 CameraParameters params(s); 124 Vector<Size> supportedSizes; 125 params.getSupportedPreviewSizes(supportedSizes); 126 127 bool previewSizeSupported = false; 128 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 129 int32_t pictureWidth = supportedSizes[i].width; 130 int32_t pictureHeight = supportedSizes[i].height; 131 132 if ((pictureWidth == width) && (pictureHeight == height)) { 133 previewSizeSupported = true; 134 } 135 } 136 137 if (previewSizeSupported) { 138 LOGV("Video size (%d, %d) is a supported preview size", width, height); 139 params.setPreviewSize(width, height); 140 CHECK(mCamera->setParameters(params.flatten())); 141 return true; 142 } 143 144 return false; 145} 146 147bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { 148 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 149 String8 s = mCamera->getParameters(); 150 IPCThreadState::self()->restoreCallingIdentity(token); 151 152 CameraParameters params(s); 153 Vector<Size> supportedSizes; 154 params.getSupportedPictureSizes(supportedSizes); 155 156 int32_t minPictureSize = INT_MAX; 157 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 158 int32_t pictureWidth = supportedSizes[i].width; 159 int32_t pictureHeight = supportedSizes[i].height; 160 161 if ((pictureWidth >= width) && (pictureHeight >= height)) { 162 int32_t pictureSize = pictureWidth*pictureHeight; 163 if (pictureSize < minPictureSize) { 164 minPictureSize = pictureSize; 165 mPictureWidth = pictureWidth; 166 mPictureHeight = pictureHeight; 167 } 168 } 169 } 170 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); 171 return (minPictureSize != INT_MAX); 172} 173 174bool CameraSourceTimeLapse::computeCropRectangleOffset() { 175 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { 176 return false; 177 } 178 179 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); 180 181 int32_t widthDifference = mPictureWidth - mVideoWidth; 182 int32_t heightDifference = mPictureHeight - mVideoHeight; 183 184 mCropRectStartX = widthDifference/2; 185 mCropRectStartY = heightDifference/2; 186 187 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); 188 189 return true; 190} 191 192void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { 193 Mutex::Autolock autoLock(mQuickStopLock); 194 if (mQuickStop && (buffer == mLastReadBufferCopy)) { 195 buffer->setObserver(NULL); 196 buffer->release(); 197 } else { 198 return CameraSource::signalBufferReturned(buffer); 199 } 200} 201 202void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) { 203 size_t sourceSize = sourceBuffer.size(); 204 void* sourcePointer = sourceBuffer.data(); 205 206 (*newBuffer) = new MediaBuffer(sourceSize); 207 memcpy((*newBuffer)->data(), sourcePointer, sourceSize); 208 209 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime); 210} 211 212void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { 213 int64_t frameTime; 214 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); 215 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); 216 mLastReadBufferCopy->add_ref(); 217 mLastReadBufferCopy->setObserver(this); 218} 219 220status_t CameraSourceTimeLapse::read( 221 MediaBuffer **buffer, const ReadOptions *options) { 222 if (mLastReadBufferCopy == NULL) { 223 mLastReadStatus = CameraSource::read(buffer, options); 224 225 // mQuickStop may have turned to true while read was blocked. Make a copy of 226 // the buffer in that case. 227 Mutex::Autolock autoLock(mQuickStopLock); 228 if (mQuickStop && *buffer) { 229 fillLastReadBufferCopy(**buffer); 230 } 231 return mLastReadStatus; 232 } else { 233 (*buffer) = mLastReadBufferCopy; 234 (*buffer)->add_ref(); 235 return mLastReadStatus; 236 } 237} 238 239// static 240void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { 241 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); 242 source->threadTimeLapseEntry(); 243 return NULL; 244} 245 246void CameraSourceTimeLapse::threadTimeLapseEntry() { 247 while (mStarted) { 248 { 249 Mutex::Autolock autoLock(mCameraIdleLock); 250 if (!mCameraIdle) { 251 mCameraIdleCondition.wait(mCameraIdleLock); 252 } 253 CHECK(mCameraIdle); 254 mCameraIdle = false; 255 } 256 257 // Even if mQuickStop == true we need to take one more picture 258 // as a read() may be blocked, waiting for a frame to get available. 259 // After this takePicture, if mQuickStop == true, we can safely exit 260 // this thread as read() will make a copy of this last frame and keep 261 // returning it in the quick stop mode. 262 Mutex::Autolock autoLock(mQuickStopLock); 263 CHECK_EQ(OK, mCamera->takePicture()); 264 if (mQuickStop) { 265 LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true"); 266 return; 267 } 268 mTakePictureCondition.waitRelative(mQuickStopLock, 269 mTimeBetweenTimeLapseFrameCaptureUs * 1000); 270 } 271 LOGV("threadTimeLapseEntry: Exiting due to mStarted = false"); 272} 273 274void CameraSourceTimeLapse::startCameraRecording() { 275 if (mUseStillCameraForTimeLapse) { 276 LOGV("start time lapse recording using still camera"); 277 278 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 279 String8 s = mCamera->getParameters(); 280 IPCThreadState::self()->restoreCallingIdentity(token); 281 282 CameraParameters params(s); 283 params.setPictureSize(mPictureWidth, mPictureHeight); 284 mCamera->setParameters(params.flatten()); 285 mCameraIdle = true; 286 mStopWaitingForIdleCamera = false; 287 288 // disable shutter sound and play the recording sound. 289 mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0); 290 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); 291 292 // create a thread which takes pictures in a loop 293 pthread_attr_t attr; 294 pthread_attr_init(&attr); 295 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); 296 297 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); 298 pthread_attr_destroy(&attr); 299 } else { 300 LOGV("start time lapse recording using video camera"); 301 CHECK_EQ(OK, mCamera->startRecording()); 302 } 303} 304 305void CameraSourceTimeLapse::stopCameraRecording() { 306 if (mUseStillCameraForTimeLapse) { 307 void *dummy; 308 pthread_join(mThreadTimeLapse, &dummy); 309 310 // Last takePicture may still be underway. Wait for the camera to get 311 // idle. 312 Mutex::Autolock autoLock(mCameraIdleLock); 313 mStopWaitingForIdleCamera = true; 314 if (!mCameraIdle) { 315 mCameraIdleCondition.wait(mCameraIdleLock); 316 } 317 CHECK(mCameraIdle); 318 mCamera->setListener(NULL); 319 320 // play the recording sound. 321 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); 322 } else { 323 mCamera->setListener(NULL); 324 mCamera->stopRecording(); 325 } 326 if (mLastReadBufferCopy) { 327 mLastReadBufferCopy->release(); 328 mLastReadBufferCopy = NULL; 329 } 330} 331 332void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { 333 if (!mUseStillCameraForTimeLapse) { 334 mCamera->releaseRecordingFrame(frame); 335 } 336} 337 338sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { 339 size_t source_size = source_data->size(); 340 void* source_pointer = source_data->pointer(); 341 342 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); 343 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); 344 memcpy(newMemory->pointer(), source_pointer, source_size); 345 return newMemory; 346} 347 348// Allocates IMemory of final type MemoryBase with the given size. 349sp<IMemory> allocateIMemory(size_t size) { 350 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size); 351 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size); 352 return newMemory; 353} 354 355// static 356void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { 357 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); 358 source->threadStartPreview(); 359 return NULL; 360} 361 362void CameraSourceTimeLapse::threadStartPreview() { 363 CHECK_EQ(OK, mCamera->startPreview()); 364 Mutex::Autolock autoLock(mCameraIdleLock); 365 mCameraIdle = true; 366 mCameraIdleCondition.signal(); 367} 368 369void CameraSourceTimeLapse::restartPreview() { 370 // Start this in a different thread, so that the dataCallback can return 371 LOGV("restartPreview"); 372 pthread_attr_t attr; 373 pthread_attr_init(&attr); 374 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); 375 376 pthread_t threadPreview; 377 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); 378 pthread_attr_destroy(&attr); 379} 380 381sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) { 382 // find the YUV format 383 int32_t srcFormat; 384 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); 385 YUVImage::YUVFormat yuvFormat; 386 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { 387 yuvFormat = YUVImage::YUV420SemiPlanar; 388 } else { 389 CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar); 390 yuvFormat = YUVImage::YUV420Planar; 391 } 392 393 // allocate memory for cropped image and setup a canvas using it. 394 sp<IMemory> croppedImageMemory = allocateIMemory( 395 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); 396 YUVImage yuvImageCropped(yuvFormat, 397 mVideoWidth, mVideoHeight, 398 (uint8_t *)croppedImageMemory->pointer()); 399 YUVCanvas yuvCanvasCrop(yuvImageCropped); 400 401 YUVImage yuvImageSource(yuvFormat, 402 mPictureWidth, mPictureHeight, 403 (uint8_t *)source_data->pointer()); 404 yuvCanvasCrop.CopyImageRect( 405 Rect(mCropRectStartX, mCropRectStartY, 406 mCropRectStartX + mVideoWidth, 407 mCropRectStartY + mVideoHeight), 408 0, 0, 409 yuvImageSource); 410 411 return croppedImageMemory; 412} 413 414void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { 415 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { 416 // takePicture will complete after this callback, so restart preview. 417 restartPreview(); 418 return; 419 } 420 if (msgType != CAMERA_MSG_RAW_IMAGE) { 421 return; 422 } 423 424 LOGV("dataCallback for timelapse still frame"); 425 CHECK_EQ(true, mUseStillCameraForTimeLapse); 426 427 int64_t timestampUs; 428 if (mNumFramesReceived == 0) { 429 timestampUs = mStartTimeUs; 430 } else { 431 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 432 } 433 434 if (mNeedCropping) { 435 sp<IMemory> croppedImageData = cropYUVImage(data); 436 dataCallbackTimestamp(timestampUs, msgType, croppedImageData); 437 } else { 438 sp<IMemory> dataCopy = createIMemoryCopy(data); 439 dataCallbackTimestamp(timestampUs, msgType, dataCopy); 440 } 441} 442 443bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { 444 if (mSkipCurrentFrame) { 445 mSkipCurrentFrame = false; 446 return true; 447 } else { 448 return false; 449 } 450} 451 452bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { 453 if (!mUseStillCameraForTimeLapse) { 454 if (mLastTimeLapseFrameRealTimestampUs == 0) { 455 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs 456 // to current time (timestampUs) and save frame data. 457 LOGV("dataCallbackTimestamp timelapse: initial frame"); 458 459 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 460 return false; 461 } 462 463 { 464 Mutex::Autolock autoLock(mQuickStopLock); 465 466 // mForceRead may be set to true by startQuickReadReturns(). In that 467 // case don't skip this frame. 468 if (mForceRead) { 469 LOGV("dataCallbackTimestamp timelapse: forced read"); 470 mForceRead = false; 471 *timestampUs = mLastFrameTimestampUs; 472 return false; 473 } 474 } 475 476 if (*timestampUs < 477 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { 478 // Skip all frames from last encoded frame until 479 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. 480 // Tell the camera to release its recording frame and return. 481 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); 482 return true; 483 } else { 484 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: 485 // - Reset mLastTimeLapseFrameRealTimestampUs to current time. 486 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead 487 // of the last encoded frame's time stamp. 488 LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); 489 490 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 491 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 492 return false; 493 } 494 } 495 return false; 496} 497 498void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, 499 const sp<IMemory> &data) { 500 if (!mUseStillCameraForTimeLapse) { 501 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); 502 } else { 503 Mutex::Autolock autoLock(mCameraIdleLock); 504 // If we are using the still camera and stop() has been called, it may 505 // be waiting for the camera to get idle. In that case return 506 // immediately. Calling CameraSource::dataCallbackTimestamp() will lead 507 // to a deadlock since it tries to access CameraSource::mLock which in 508 // this case is held by CameraSource::stop() currently waiting for the 509 // camera to get idle. And camera will not get idle until this call 510 // returns. 511 if (mStopWaitingForIdleCamera) { 512 return; 513 } 514 } 515 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); 516} 517 518} // namespace android 519