CameraSourceTimeLapse.cpp revision 7757f5010a771fb8824b6fdf9788f588a1577e3f
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSourceTimeLapse" 19 20#include <binder/IPCThreadState.h> 21#include <binder/MemoryBase.h> 22#include <binder/MemoryHeapBase.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/CameraSourceTimeLapse.h> 25#include <media/stagefright/MediaDebug.h> 26#include <media/stagefright/MetaData.h> 27#include <media/stagefright/YUVImage.h> 28#include <media/stagefright/YUVCanvas.h> 29#include <camera/Camera.h> 30#include <camera/CameraParameters.h> 31#include <ui/Rect.h> 32#include <utils/String8.h> 33#include <utils/Vector.h> 34#include "OMX_Video.h" 35#include <limits.h> 36 37namespace android { 38 39// static 40CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( 41 const sp<ICamera> &camera, 42 int32_t cameraId, 43 Size videoSize, 44 int32_t videoFrameRate, 45 const sp<Surface>& surface, 46 int64_t timeBetweenTimeLapseFrameCaptureUs) { 47 48 CameraSourceTimeLapse *source = new 49 CameraSourceTimeLapse(camera, cameraId, 50 videoSize, videoFrameRate, surface, 51 timeBetweenTimeLapseFrameCaptureUs); 52 53 if (source != NULL) { 54 if (source->initCheck() != OK) { 55 delete source; 56 return NULL; 57 } 58 } 59 return source; 60} 61 62CameraSourceTimeLapse::CameraSourceTimeLapse( 63 const sp<ICamera>& camera, 64 int32_t cameraId, 65 Size videoSize, 66 int32_t videoFrameRate, 67 const sp<Surface>& surface, 68 int64_t timeBetweenTimeLapseFrameCaptureUs) 69 : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, true), 70 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), 71 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), 72 mLastTimeLapseFrameRealTimestampUs(0), 73 mSkipCurrentFrame(false) { 74 75 LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs); 76 mVideoWidth = videoSize.width; 77 mVideoHeight = videoSize.height; 78 79 if (trySettingVideoSize(videoSize.width, videoSize.height)) { 80 mUseStillCameraForTimeLapse = false; 81 } else { 82 // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater 83 // than the fastest rate at which the still camera can take pictures. 84 mUseStillCameraForTimeLapse = true; 85 CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height)); 86 mNeedCropping = computeCropRectangleOffset(); 87 mMeta->setInt32(kKeyWidth, videoSize.width); 88 mMeta->setInt32(kKeyHeight, videoSize.height); 89 } 90 91 // Initialize quick stop variables. 92 mQuickStop = false; 93 mForceRead = false; 94 mLastReadBufferCopy = NULL; 95 mStopWaitingForIdleCamera = false; 96} 97 98CameraSourceTimeLapse::~CameraSourceTimeLapse() { 99} 100 101void CameraSourceTimeLapse::startQuickReadReturns() { 102 Mutex::Autolock autoLock(mQuickStopLock); 103 LOGV("Enabling quick read returns"); 104 105 // Enable quick stop mode. 106 mQuickStop = true; 107 108 if (mUseStillCameraForTimeLapse) { 109 // wake up the thread right away. 110 mTakePictureCondition.signal(); 111 } else { 112 // Force dataCallbackTimestamp() coming from the video camera to not skip the 113 // next frame as we want read() to get a get a frame right away. 114 mForceRead = true; 115 } 116} 117 118bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) { 119 LOGV("trySettingVideoSize: %dx%d", width, height); 120 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 121 String8 s = mCamera->getParameters(); 122 123 CameraParameters params(s); 124 Vector<Size> supportedSizes; 125 params.getSupportedVideoSizes(supportedSizes); 126 bool videoOutputSupported = false; 127 if (supportedSizes.size() == 0) { 128 params.getSupportedPreviewSizes(supportedSizes); 129 } else { 130 videoOutputSupported = true; 131 } 132 133 bool videoSizeSupported = false; 134 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 135 int32_t pictureWidth = supportedSizes[i].width; 136 int32_t pictureHeight = supportedSizes[i].height; 137 138 if ((pictureWidth == width) && (pictureHeight == height)) { 139 videoSizeSupported = true; 140 } 141 } 142 143 bool isSuccessful = false; 144 if (videoSizeSupported) { 145 LOGV("Video size (%d, %d) is supported", width, height); 146 if (videoOutputSupported) { 147 params.setVideoSize(width, height); 148 } else { 149 params.setPreviewSize(width, height); 150 } 151 if (mCamera->setParameters(params.flatten()) == OK) { 152 isSuccessful = true; 153 } else { 154 LOGE("Failed to set preview size to %dx%d", width, height); 155 isSuccessful = false; 156 } 157 } 158 159 IPCThreadState::self()->restoreCallingIdentity(token); 160 return isSuccessful; 161} 162 163bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { 164 LOGV("setPictureSizeToClosestSupported: %dx%d", width, height); 165 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 166 String8 s = mCamera->getParameters(); 167 IPCThreadState::self()->restoreCallingIdentity(token); 168 169 CameraParameters params(s); 170 Vector<Size> supportedSizes; 171 params.getSupportedPictureSizes(supportedSizes); 172 173 int32_t minPictureSize = INT_MAX; 174 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 175 int32_t pictureWidth = supportedSizes[i].width; 176 int32_t pictureHeight = supportedSizes[i].height; 177 178 if ((pictureWidth >= width) && (pictureHeight >= height)) { 179 int32_t pictureSize = pictureWidth*pictureHeight; 180 if (pictureSize < minPictureSize) { 181 minPictureSize = pictureSize; 182 mPictureWidth = pictureWidth; 183 mPictureHeight = pictureHeight; 184 } 185 } 186 } 187 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); 188 return (minPictureSize != INT_MAX); 189} 190 191bool CameraSourceTimeLapse::computeCropRectangleOffset() { 192 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { 193 return false; 194 } 195 196 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); 197 198 int32_t widthDifference = mPictureWidth - mVideoWidth; 199 int32_t heightDifference = mPictureHeight - mVideoHeight; 200 201 mCropRectStartX = widthDifference/2; 202 mCropRectStartY = heightDifference/2; 203 204 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); 205 206 return true; 207} 208 209void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { 210 Mutex::Autolock autoLock(mQuickStopLock); 211 if (mQuickStop && (buffer == mLastReadBufferCopy)) { 212 buffer->setObserver(NULL); 213 buffer->release(); 214 } else { 215 return CameraSource::signalBufferReturned(buffer); 216 } 217} 218 219void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) { 220 size_t sourceSize = sourceBuffer.size(); 221 void* sourcePointer = sourceBuffer.data(); 222 223 (*newBuffer) = new MediaBuffer(sourceSize); 224 memcpy((*newBuffer)->data(), sourcePointer, sourceSize); 225 226 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime); 227} 228 229void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { 230 int64_t frameTime; 231 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); 232 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); 233 mLastReadBufferCopy->add_ref(); 234 mLastReadBufferCopy->setObserver(this); 235} 236 237status_t CameraSourceTimeLapse::read( 238 MediaBuffer **buffer, const ReadOptions *options) { 239 if (mLastReadBufferCopy == NULL) { 240 mLastReadStatus = CameraSource::read(buffer, options); 241 242 // mQuickStop may have turned to true while read was blocked. Make a copy of 243 // the buffer in that case. 244 Mutex::Autolock autoLock(mQuickStopLock); 245 if (mQuickStop && *buffer) { 246 fillLastReadBufferCopy(**buffer); 247 } 248 return mLastReadStatus; 249 } else { 250 (*buffer) = mLastReadBufferCopy; 251 (*buffer)->add_ref(); 252 return mLastReadStatus; 253 } 254} 255 256// static 257void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { 258 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); 259 source->threadTimeLapseEntry(); 260 return NULL; 261} 262 263void CameraSourceTimeLapse::threadTimeLapseEntry() { 264 while (mStarted) { 265 { 266 Mutex::Autolock autoLock(mCameraIdleLock); 267 if (!mCameraIdle) { 268 mCameraIdleCondition.wait(mCameraIdleLock); 269 } 270 CHECK(mCameraIdle); 271 mCameraIdle = false; 272 } 273 274 // Even if mQuickStop == true we need to take one more picture 275 // as a read() may be blocked, waiting for a frame to get available. 276 // After this takePicture, if mQuickStop == true, we can safely exit 277 // this thread as read() will make a copy of this last frame and keep 278 // returning it in the quick stop mode. 279 Mutex::Autolock autoLock(mQuickStopLock); 280 CHECK_EQ(OK, mCamera->takePicture()); 281 if (mQuickStop) { 282 LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true"); 283 return; 284 } 285 mTakePictureCondition.waitRelative(mQuickStopLock, 286 mTimeBetweenTimeLapseFrameCaptureUs * 1000); 287 } 288 LOGV("threadTimeLapseEntry: Exiting due to mStarted = false"); 289} 290 291void CameraSourceTimeLapse::startCameraRecording() { 292 if (mUseStillCameraForTimeLapse) { 293 LOGV("start time lapse recording using still camera"); 294 295 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 296 String8 s = mCamera->getParameters(); 297 298 CameraParameters params(s); 299 params.setPictureSize(mPictureWidth, mPictureHeight); 300 mCamera->setParameters(params.flatten()); 301 mCameraIdle = true; 302 mStopWaitingForIdleCamera = false; 303 304 // disable shutter sound and play the recording sound. 305 mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0); 306 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); 307 IPCThreadState::self()->restoreCallingIdentity(token); 308 309 // create a thread which takes pictures in a loop 310 pthread_attr_t attr; 311 pthread_attr_init(&attr); 312 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); 313 314 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); 315 pthread_attr_destroy(&attr); 316 } else { 317 LOGV("start time lapse recording using video camera"); 318 CHECK_EQ(OK, mCamera->startRecording()); 319 } 320} 321 322void CameraSourceTimeLapse::stopCameraRecording() { 323 if (mUseStillCameraForTimeLapse) { 324 void *dummy; 325 pthread_join(mThreadTimeLapse, &dummy); 326 327 // Last takePicture may still be underway. Wait for the camera to get 328 // idle. 329 Mutex::Autolock autoLock(mCameraIdleLock); 330 mStopWaitingForIdleCamera = true; 331 if (!mCameraIdle) { 332 mCameraIdleCondition.wait(mCameraIdleLock); 333 } 334 CHECK(mCameraIdle); 335 mCamera->setListener(NULL); 336 337 // play the recording sound. 338 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); 339 } else { 340 mCamera->setListener(NULL); 341 mCamera->stopRecording(); 342 } 343 if (mLastReadBufferCopy) { 344 mLastReadBufferCopy->release(); 345 mLastReadBufferCopy = NULL; 346 } 347} 348 349void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { 350 if (!mUseStillCameraForTimeLapse && 351 mCamera != NULL) { 352 mCamera->releaseRecordingFrame(frame); 353 } 354} 355 356sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { 357 size_t source_size = source_data->size(); 358 void* source_pointer = source_data->pointer(); 359 360 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); 361 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); 362 memcpy(newMemory->pointer(), source_pointer, source_size); 363 return newMemory; 364} 365 366// Allocates IMemory of final type MemoryBase with the given size. 367sp<IMemory> allocateIMemory(size_t size) { 368 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size); 369 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size); 370 return newMemory; 371} 372 373// static 374void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { 375 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); 376 source->threadStartPreview(); 377 return NULL; 378} 379 380void CameraSourceTimeLapse::threadStartPreview() { 381 CHECK_EQ(OK, mCamera->startPreview()); 382 Mutex::Autolock autoLock(mCameraIdleLock); 383 mCameraIdle = true; 384 mCameraIdleCondition.signal(); 385} 386 387void CameraSourceTimeLapse::restartPreview() { 388 // Start this in a different thread, so that the dataCallback can return 389 LOGV("restartPreview"); 390 pthread_attr_t attr; 391 pthread_attr_init(&attr); 392 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); 393 394 pthread_t threadPreview; 395 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); 396 pthread_attr_destroy(&attr); 397} 398 399sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) { 400 // find the YUV format 401 int32_t srcFormat; 402 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); 403 YUVImage::YUVFormat yuvFormat; 404 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { 405 yuvFormat = YUVImage::YUV420SemiPlanar; 406 } else { 407 CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar); 408 yuvFormat = YUVImage::YUV420Planar; 409 } 410 411 // allocate memory for cropped image and setup a canvas using it. 412 sp<IMemory> croppedImageMemory = allocateIMemory( 413 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); 414 YUVImage yuvImageCropped(yuvFormat, 415 mVideoWidth, mVideoHeight, 416 (uint8_t *)croppedImageMemory->pointer()); 417 YUVCanvas yuvCanvasCrop(yuvImageCropped); 418 419 YUVImage yuvImageSource(yuvFormat, 420 mPictureWidth, mPictureHeight, 421 (uint8_t *)source_data->pointer()); 422 yuvCanvasCrop.CopyImageRect( 423 Rect(mCropRectStartX, mCropRectStartY, 424 mCropRectStartX + mVideoWidth, 425 mCropRectStartY + mVideoHeight), 426 0, 0, 427 yuvImageSource); 428 429 return croppedImageMemory; 430} 431 432void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { 433 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { 434 // takePicture will complete after this callback, so restart preview. 435 restartPreview(); 436 return; 437 } 438 if (msgType != CAMERA_MSG_RAW_IMAGE) { 439 return; 440 } 441 442 LOGV("dataCallback for timelapse still frame"); 443 CHECK_EQ(true, mUseStillCameraForTimeLapse); 444 445 int64_t timestampUs; 446 if (mNumFramesReceived == 0) { 447 timestampUs = mStartTimeUs; 448 } else { 449 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 450 } 451 452 if (mNeedCropping) { 453 sp<IMemory> croppedImageData = cropYUVImage(data); 454 dataCallbackTimestamp(timestampUs, msgType, croppedImageData); 455 } else { 456 sp<IMemory> dataCopy = createIMemoryCopy(data); 457 dataCallbackTimestamp(timestampUs, msgType, dataCopy); 458 } 459} 460 461bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { 462 if (mSkipCurrentFrame) { 463 mSkipCurrentFrame = false; 464 return true; 465 } else { 466 return false; 467 } 468} 469 470bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { 471 if (!mUseStillCameraForTimeLapse) { 472 if (mLastTimeLapseFrameRealTimestampUs == 0) { 473 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs 474 // to current time (timestampUs) and save frame data. 475 LOGV("dataCallbackTimestamp timelapse: initial frame"); 476 477 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 478 return false; 479 } 480 481 { 482 Mutex::Autolock autoLock(mQuickStopLock); 483 484 // mForceRead may be set to true by startQuickReadReturns(). In that 485 // case don't skip this frame. 486 if (mForceRead) { 487 LOGV("dataCallbackTimestamp timelapse: forced read"); 488 mForceRead = false; 489 *timestampUs = mLastFrameTimestampUs; 490 return false; 491 } 492 } 493 494 // Workaround to bypass the first 2 input frames for skipping. 495 // The first 2 output frames from the encoder are: decoder specific info and 496 // the compressed video frame data for the first input video frame. 497 if (mNumFramesEncoded >= 1 && *timestampUs < 498 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { 499 // Skip all frames from last encoded frame until 500 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. 501 // Tell the camera to release its recording frame and return. 502 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); 503 return true; 504 } else { 505 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: 506 // - Reset mLastTimeLapseFrameRealTimestampUs to current time. 507 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead 508 // of the last encoded frame's time stamp. 509 LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); 510 511 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 512 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 513 return false; 514 } 515 } 516 return false; 517} 518 519void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, 520 const sp<IMemory> &data) { 521 if (!mUseStillCameraForTimeLapse) { 522 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); 523 } else { 524 Mutex::Autolock autoLock(mCameraIdleLock); 525 // If we are using the still camera and stop() has been called, it may 526 // be waiting for the camera to get idle. In that case return 527 // immediately. Calling CameraSource::dataCallbackTimestamp() will lead 528 // to a deadlock since it tries to access CameraSource::mLock which in 529 // this case is held by CameraSource::stop() currently waiting for the 530 // camera to get idle. And camera will not get idle until this call 531 // returns. 532 if (mStopWaitingForIdleCamera) { 533 return; 534 } 535 } 536 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); 537} 538 539} // namespace android 540