CameraSource.cpp revision 030b888ef6a586a1f0fafe0adc4312f775878d2b
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22#include <binder/IPCThreadState.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/MediaDebug.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <utils/String8.h> 31#include <cutils/properties.h> 32 33namespace android { 34 35struct CameraSourceListener : public CameraListener { 36 CameraSourceListener(const sp<CameraSource> &source); 37 38 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 39 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr); 40 41 virtual void postDataTimestamp( 42 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 43 44protected: 45 virtual ~CameraSourceListener(); 46 47private: 48 wp<CameraSource> mSource; 49 50 CameraSourceListener(const CameraSourceListener &); 51 CameraSourceListener &operator=(const CameraSourceListener &); 52}; 53 54CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 55 : mSource(source) { 56} 57 58CameraSourceListener::~CameraSourceListener() { 59} 60 61void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 62 LOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 63} 64 65void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { 66 LOGV("postData(%d, ptr:%p, size:%d)", 67 msgType, dataPtr->pointer(), dataPtr->size()); 68 69 sp<CameraSource> source = mSource.promote(); 70 if (source.get() != NULL) { 71 source->dataCallback(msgType, dataPtr); 72 } 73} 74 75void CameraSourceListener::postDataTimestamp( 76 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 77 78 sp<CameraSource> source = mSource.promote(); 79 if (source.get() != NULL) { 80 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 81 } 82} 83 84static int32_t getColorFormat(const char* colorFormat) { 85 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 86 return OMX_COLOR_FormatYUV422SemiPlanar; 87 } 88 89 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 90 return OMX_COLOR_FormatYUV420SemiPlanar; 91 } 92 93 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 94 return OMX_COLOR_FormatYCbYCr; 95 } 96 97 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 98 return OMX_COLOR_Format16bitRGB565; 99 } 100 101 CHECK_EQ(0, "Unknown color format"); 102} 103 104// static 105CameraSource *CameraSource::Create() { 106 sp<Camera> camera = Camera::connect(0); 107 108 if (camera.get() == NULL) { 109 return NULL; 110 } 111 112 return new CameraSource(camera); 113} 114 115// static 116CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) { 117 if (camera.get() == NULL) { 118 return NULL; 119 } 120 121 return new CameraSource(camera); 122} 123 124CameraSource::CameraSource(const sp<Camera> &camera) 125 : mCamera(camera), 126 mNumFramesReceived(0), 127 mLastFrameTimestampUs(0), 128 mStarted(false), 129 mFirstFrameTimeUs(0), 130 mNumFramesEncoded(0), 131 mNumFramesDropped(0), 132 mNumGlitches(0), 133 mGlitchDurationThresholdUs(200000), 134 mCollectStats(false) { 135 136 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 137 String8 s = mCamera->getParameters(); 138 IPCThreadState::self()->restoreCallingIdentity(token); 139 140 printf("params: \"%s\"\n", s.string()); 141 142 int32_t width, height, stride, sliceHeight; 143 CameraParameters params(s); 144 params.getPreviewSize(&width, &height); 145 146 // Calculate glitch duraton threshold based on frame rate 147 int32_t frameRate = params.getPreviewFrameRate(); 148 int64_t glitchDurationUs = (1000000LL / frameRate); 149 if (glitchDurationUs > mGlitchDurationThresholdUs) { 150 mGlitchDurationThresholdUs = glitchDurationUs; 151 } 152 153 const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT); 154 CHECK(colorFormatStr != NULL); 155 int32_t colorFormat = getColorFormat(colorFormatStr); 156 157 // XXX: query camera for the stride and slice height 158 // when the capability becomes available. 159 stride = width; 160 sliceHeight = height; 161 162 mMeta = new MetaData; 163 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 164 mMeta->setInt32(kKeyColorFormat, colorFormat); 165 mMeta->setInt32(kKeyWidth, width); 166 mMeta->setInt32(kKeyHeight, height); 167 mMeta->setInt32(kKeyStride, stride); 168 mMeta->setInt32(kKeySliceHeight, sliceHeight); 169} 170 171CameraSource::~CameraSource() { 172 if (mStarted) { 173 stop(); 174 } 175} 176 177void CameraSource::startCameraRecording() { 178 CHECK_EQ(OK, mCamera->startRecording()); 179} 180 181status_t CameraSource::start(MetaData *meta) { 182 CHECK(!mStarted); 183 184 char value[PROPERTY_VALUE_MAX]; 185 if (property_get("media.stagefright.record-stats", value, NULL) 186 && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { 187 mCollectStats = true; 188 } 189 190 mStartTimeUs = 0; 191 int64_t startTimeUs; 192 if (meta && meta->findInt64(kKeyTime, &startTimeUs)) { 193 mStartTimeUs = startTimeUs; 194 } 195 196 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 197 mCamera->setListener(new CameraSourceListener(this)); 198 startCameraRecording(); 199 IPCThreadState::self()->restoreCallingIdentity(token); 200 201 mStarted = true; 202 return OK; 203} 204 205void CameraSource::stopCameraRecording() { 206 mCamera->stopRecording(); 207} 208 209status_t CameraSource::stop() { 210 LOGV("stop"); 211 Mutex::Autolock autoLock(mLock); 212 mStarted = false; 213 mFrameAvailableCondition.signal(); 214 215 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 216 mCamera->setListener(NULL); 217 stopCameraRecording(); 218 releaseQueuedFrames(); 219 while (!mFramesBeingEncoded.empty()) { 220 LOGI("Waiting for outstanding frames being encoded: %d", 221 mFramesBeingEncoded.size()); 222 mFrameCompleteCondition.wait(mLock); 223 } 224 mCamera = NULL; 225 IPCThreadState::self()->restoreCallingIdentity(token); 226 227 if (mCollectStats) { 228 LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", 229 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 230 mLastFrameTimestampUs - mFirstFrameTimeUs); 231 } 232 233 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 234 return OK; 235} 236 237void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { 238 mCamera->releaseRecordingFrame(frame); 239} 240 241void CameraSource::releaseQueuedFrames() { 242 List<sp<IMemory> >::iterator it; 243 while (!mFramesReceived.empty()) { 244 it = mFramesReceived.begin(); 245 releaseRecordingFrame(*it); 246 mFramesReceived.erase(it); 247 ++mNumFramesDropped; 248 } 249} 250 251sp<MetaData> CameraSource::getFormat() { 252 return mMeta; 253} 254 255void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { 256 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 257 releaseRecordingFrame(frame); 258 IPCThreadState::self()->restoreCallingIdentity(token); 259} 260 261void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 262 LOGV("signalBufferReturned: %p", buffer->data()); 263 Mutex::Autolock autoLock(mLock); 264 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 265 it != mFramesBeingEncoded.end(); ++it) { 266 if ((*it)->pointer() == buffer->data()) { 267 releaseOneRecordingFrame((*it)); 268 mFramesBeingEncoded.erase(it); 269 ++mNumFramesEncoded; 270 buffer->setObserver(0); 271 buffer->release(); 272 mFrameCompleteCondition.signal(); 273 return; 274 } 275 } 276 CHECK_EQ(0, "signalBufferReturned: bogus buffer"); 277} 278 279status_t CameraSource::read( 280 MediaBuffer **buffer, const ReadOptions *options) { 281 LOGV("read"); 282 283 *buffer = NULL; 284 285 int64_t seekTimeUs; 286 ReadOptions::SeekMode mode; 287 if (options && options->getSeekTo(&seekTimeUs, &mode)) { 288 return ERROR_UNSUPPORTED; 289 } 290 291 sp<IMemory> frame; 292 int64_t frameTime; 293 294 { 295 Mutex::Autolock autoLock(mLock); 296 while (mStarted) { 297 while(mFramesReceived.empty()) { 298 mFrameAvailableCondition.wait(mLock); 299 } 300 301 if (!mStarted) { 302 return OK; 303 } 304 305 frame = *mFramesReceived.begin(); 306 mFramesReceived.erase(mFramesReceived.begin()); 307 308 frameTime = *mFrameTimes.begin(); 309 mFrameTimes.erase(mFrameTimes.begin()); 310 int64_t skipTimeUs; 311 if (!options || !options->getSkipFrame(&skipTimeUs)) { 312 skipTimeUs = frameTime; 313 } 314 if (skipTimeUs > frameTime) { 315 LOGV("skipTimeUs: %lld us > frameTime: %lld us", 316 skipTimeUs, frameTime); 317 releaseOneRecordingFrame(frame); 318 ++mNumFramesDropped; 319 // Safeguard against the abuse of the kSkipFrame_Option. 320 if (skipTimeUs - frameTime >= 1E6) { 321 LOGE("Frame skipping requested is way too long: %lld us", 322 skipTimeUs - frameTime); 323 return UNKNOWN_ERROR; 324 } 325 } else { 326 mFramesBeingEncoded.push_back(frame); 327 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 328 (*buffer)->setObserver(this); 329 (*buffer)->add_ref(); 330 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 331 332 return OK; 333 } 334 } 335 } 336 return OK; 337} 338 339void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 340 int32_t msgType, const sp<IMemory> &data) { 341 LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 342 Mutex::Autolock autoLock(mLock); 343 if (!mStarted) { 344 releaseOneRecordingFrame(data); 345 ++mNumFramesReceived; 346 ++mNumFramesDropped; 347 return; 348 } 349 350 if (mNumFramesReceived > 0 && 351 timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { 352 if (mNumGlitches % 10 == 0) { // Don't spam the log 353 LOGW("Long delay detected in video recording"); 354 } 355 ++mNumGlitches; 356 } 357 358 // May need to skip frame or modify timestamp. Currently implemented 359 // by the subclass CameraSourceTimeLapse. 360 if(skipCurrentFrame(timestampUs)) { 361 releaseOneRecordingFrame(data); 362 return; 363 } 364 365 mLastFrameTimestampUs = timestampUs; 366 if (mNumFramesReceived == 0) { 367 mFirstFrameTimeUs = timestampUs; 368 // Initial delay 369 if (mStartTimeUs > 0) { 370 if (timestampUs < mStartTimeUs) { 371 // Frame was captured before recording was started 372 // Drop it without updating the statistical data. 373 releaseOneRecordingFrame(data); 374 return; 375 } 376 mStartTimeUs = timestampUs - mStartTimeUs; 377 } 378 } 379 ++mNumFramesReceived; 380 381 mFramesReceived.push_back(data); 382 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 383 mFrameTimes.push_back(timeUs); 384 LOGV("initial delay: %lld, current time stamp: %lld", 385 mStartTimeUs, timeUs); 386 mFrameAvailableCondition.signal(); 387} 388 389} // namespace android 390