CameraSourceTimeLapse.cpp revision 7e3c19fbead7af39b3c0ca3d170406a98f97dbd0
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSourceTimeLapse" 19 20#include <binder/IPCThreadState.h> 21#include <binder/MemoryBase.h> 22#include <binder/MemoryHeapBase.h> 23#include <media/stagefright/foundation/ADebug.h> 24#include <media/stagefright/CameraSource.h> 25#include <media/stagefright/CameraSourceTimeLapse.h> 26#include <media/stagefright/MetaData.h> 27#include <camera/Camera.h> 28#include <camera/CameraParameters.h> 29#include <utils/String8.h> 30#include <utils/Vector.h> 31 32namespace android { 33 34// static 35CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( 36 const sp<ICamera> &camera, 37 const sp<ICameraRecordingProxy> &proxy, 38 int32_t cameraId, 39 const String16& clientName, 40 uid_t clientUid, 41 Size videoSize, 42 int32_t videoFrameRate, 43 const sp<IGraphicBufferProducer>& surface, 44 int64_t timeBetweenFrameCaptureUs, 45 bool storeMetaDataInVideoBuffers) { 46 47 CameraSourceTimeLapse *source = new 48 CameraSourceTimeLapse(camera, proxy, cameraId, 49 clientName, clientUid, 50 videoSize, videoFrameRate, surface, 51 timeBetweenFrameCaptureUs, 52 storeMetaDataInVideoBuffers); 53 54 if (source != NULL) { 55 if (source->initCheck() != OK) { 56 delete source; 57 return NULL; 58 } 59 } 60 return source; 61} 62 63CameraSourceTimeLapse::CameraSourceTimeLapse( 64 const sp<ICamera>& camera, 65 const sp<ICameraRecordingProxy>& proxy, 66 int32_t cameraId, 67 const String16& clientName, 68 uid_t clientUid, 69 Size videoSize, 70 int32_t videoFrameRate, 71 const sp<IGraphicBufferProducer>& surface, 72 int64_t timeBetweenFrameCaptureUs, 73 bool storeMetaDataInVideoBuffers) 74 : CameraSource(camera, proxy, cameraId, clientName, clientUid, 75 videoSize, videoFrameRate, surface, 76 storeMetaDataInVideoBuffers), 77 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), 78 mLastTimeLapseFrameRealTimestampUs(0), 79 mSkipCurrentFrame(false) { 80 81 mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs; 82 ALOGD("starting time lapse mode: %lld us", 83 mTimeBetweenFrameCaptureUs); 84 85 mVideoWidth = videoSize.width; 86 mVideoHeight = videoSize.height; 87 88 if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) { 89 releaseCamera(); 90 mInitCheck = NO_INIT; 91 } 92 93 // Initialize quick stop variables. 94 mQuickStop = false; 95 mForceRead = false; 96 mLastReadBufferCopy = NULL; 97 mStopWaitingForIdleCamera = false; 98} 99 100CameraSourceTimeLapse::~CameraSourceTimeLapse() { 101 if (mLastReadBufferCopy) { 102 mLastReadBufferCopy->release(); 103 mLastReadBufferCopy = NULL; 104 } 105} 106 107void CameraSourceTimeLapse::startQuickReadReturns() { 108 ALOGV("startQuickReadReturns"); 109 Mutex::Autolock autoLock(mQuickStopLock); 110 111 // Enable quick stop mode. 112 mQuickStop = true; 113 114 // Force dataCallbackTimestamp() coming from the video camera to 115 // not skip the next frame as we want read() to get a get a frame 116 // right away. 117 mForceRead = true; 118} 119 120bool CameraSourceTimeLapse::trySettingVideoSize( 121 int32_t width, int32_t height) { 122 123 ALOGV("trySettingVideoSize"); 124 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 125 String8 s = mCamera->getParameters(); 126 127 CameraParameters params(s); 128 Vector<Size> supportedSizes; 129 params.getSupportedVideoSizes(supportedSizes); 130 bool videoOutputSupported = false; 131 if (supportedSizes.size() == 0) { 132 params.getSupportedPreviewSizes(supportedSizes); 133 } else { 134 videoOutputSupported = true; 135 } 136 137 bool videoSizeSupported = false; 138 for (uint32_t i = 0; i < supportedSizes.size(); ++i) { 139 int32_t pictureWidth = supportedSizes[i].width; 140 int32_t pictureHeight = supportedSizes[i].height; 141 142 if ((pictureWidth == width) && (pictureHeight == height)) { 143 videoSizeSupported = true; 144 } 145 } 146 147 bool isSuccessful = false; 148 if (videoSizeSupported) { 149 ALOGV("Video size (%d, %d) is supported", width, height); 150 if (videoOutputSupported) { 151 params.setVideoSize(width, height); 152 } else { 153 params.setPreviewSize(width, height); 154 } 155 if (mCamera->setParameters(params.flatten()) == OK) { 156 isSuccessful = true; 157 } else { 158 ALOGE("Failed to set preview size to %dx%d", width, height); 159 isSuccessful = false; 160 } 161 } 162 163 IPCThreadState::self()->restoreCallingIdentity(token); 164 return isSuccessful; 165} 166 167void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { 168 ALOGV("signalBufferReturned"); 169 Mutex::Autolock autoLock(mQuickStopLock); 170 if (mQuickStop && (buffer == mLastReadBufferCopy)) { 171 buffer->setObserver(NULL); 172 buffer->release(); 173 } else { 174 return CameraSource::signalBufferReturned(buffer); 175 } 176} 177 178void createMediaBufferCopy( 179 const MediaBuffer& sourceBuffer, 180 int64_t frameTime, 181 MediaBuffer **newBuffer) { 182 183 ALOGV("createMediaBufferCopy"); 184 size_t sourceSize = sourceBuffer.size(); 185 void* sourcePointer = sourceBuffer.data(); 186 187 (*newBuffer) = new MediaBuffer(sourceSize); 188 memcpy((*newBuffer)->data(), sourcePointer, sourceSize); 189 190 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime); 191} 192 193void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { 194 ALOGV("fillLastReadBufferCopy"); 195 int64_t frameTime; 196 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); 197 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); 198 mLastReadBufferCopy->add_ref(); 199 mLastReadBufferCopy->setObserver(this); 200} 201 202status_t CameraSourceTimeLapse::read( 203 MediaBuffer **buffer, const ReadOptions *options) { 204 ALOGV("read"); 205 if (mLastReadBufferCopy == NULL) { 206 mLastReadStatus = CameraSource::read(buffer, options); 207 208 // mQuickStop may have turned to true while read was blocked. 209 // Make a copy of the buffer in that case. 210 Mutex::Autolock autoLock(mQuickStopLock); 211 if (mQuickStop && *buffer) { 212 fillLastReadBufferCopy(**buffer); 213 } 214 return mLastReadStatus; 215 } else { 216 (*buffer) = mLastReadBufferCopy; 217 (*buffer)->add_ref(); 218 return mLastReadStatus; 219 } 220} 221 222sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy( 223 const sp<IMemory> &source_data) { 224 225 ALOGV("createIMemoryCopy"); 226 size_t source_size = source_data->size(); 227 void* source_pointer = source_data->pointer(); 228 229 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); 230 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); 231 memcpy(newMemory->pointer(), source_pointer, source_size); 232 return newMemory; 233} 234 235bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { 236 ALOGV("skipCurrentFrame"); 237 if (mSkipCurrentFrame) { 238 mSkipCurrentFrame = false; 239 return true; 240 } else { 241 return false; 242 } 243} 244 245bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { 246 ALOGV("skipFrameAndModifyTimeStamp"); 247 if (mLastTimeLapseFrameRealTimestampUs == 0) { 248 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs 249 // to current time (timestampUs) and save frame data. 250 ALOGV("dataCallbackTimestamp timelapse: initial frame"); 251 252 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 253 return false; 254 } 255 256 { 257 Mutex::Autolock autoLock(mQuickStopLock); 258 259 // mForceRead may be set to true by startQuickReadReturns(). In that 260 // case don't skip this frame. 261 if (mForceRead) { 262 ALOGV("dataCallbackTimestamp timelapse: forced read"); 263 mForceRead = false; 264 *timestampUs = 265 mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 266 267 // Really make sure that this video recording frame will not be dropped. 268 if (*timestampUs < mStartTimeUs) { 269 ALOGI("set timestampUs to start time stamp %lld us", mStartTimeUs); 270 *timestampUs = mStartTimeUs; 271 } 272 return false; 273 } 274 } 275 276 // Workaround to bypass the first 2 input frames for skipping. 277 // The first 2 output frames from the encoder are: decoder specific info and 278 // the compressed video frame data for the first input video frame. 279 if (mNumFramesEncoded >= 1 && *timestampUs < 280 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) { 281 // Skip all frames from last encoded frame until 282 // sufficient time (mTimeBetweenFrameCaptureUs) has passed. 283 // Tell the camera to release its recording frame and return. 284 ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); 285 return true; 286 } else { 287 // Desired frame has arrived after mTimeBetweenFrameCaptureUs time: 288 // - Reset mLastTimeLapseFrameRealTimestampUs to current time. 289 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead 290 // of the last encoded frame's time stamp. 291 ALOGV("dataCallbackTimestamp timelapse: got timelapse frame"); 292 293 mLastTimeLapseFrameRealTimestampUs = *timestampUs; 294 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; 295 return false; 296 } 297 return false; 298} 299 300void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, 301 const sp<IMemory> &data) { 302 ALOGV("dataCallbackTimestamp"); 303 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); 304 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); 305} 306 307} // namespace android 308