CameraSource.cpp revision 4ca2c7c913f8bd4ada13aca56d36045d42d1e00f
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22#include <binder/IPCThreadState.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/MediaDebug.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <surfaceflinger/Surface.h> 31#include <utils/String8.h> 32#include <cutils/properties.h> 33 34namespace android { 35 36struct CameraSourceListener : public CameraListener { 37 CameraSourceListener(const sp<CameraSource> &source); 38 39 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 40 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr); 41 42 virtual void postDataTimestamp( 43 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 44 45protected: 46 virtual ~CameraSourceListener(); 47 48private: 49 wp<CameraSource> mSource; 50 51 CameraSourceListener(const CameraSourceListener &); 52 CameraSourceListener &operator=(const CameraSourceListener &); 53}; 54 55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 56 : mSource(source) { 57} 58 59CameraSourceListener::~CameraSourceListener() { 60} 61 62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 63 LOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 64} 65 66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { 67 LOGV("postData(%d, ptr:%p, size:%d)", 68 msgType, dataPtr->pointer(), dataPtr->size()); 69 70 sp<CameraSource> source = mSource.promote(); 71 if (source.get() != NULL) { 72 source->dataCallback(msgType, dataPtr); 73 } 74} 75 76void CameraSourceListener::postDataTimestamp( 77 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 78 79 sp<CameraSource> source = mSource.promote(); 80 if (source.get() != NULL) { 81 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 82 } 83} 84 85static int32_t getColorFormat(const char* colorFormat) { 86 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) { 87 return OMX_COLOR_FormatYUV420Planar; 88 } 89 90 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 91 return OMX_COLOR_FormatYUV422SemiPlanar; 92 } 93 94 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 95 return OMX_COLOR_FormatYUV420SemiPlanar; 96 } 97 98 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 99 return OMX_COLOR_FormatYCbYCr; 100 } 101 102 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 103 return OMX_COLOR_Format16bitRGB565; 104 } 105 106 LOGE("Uknown color format (%s), please add it to " 107 "CameraSource::getColorFormat", colorFormat); 108 109 CHECK_EQ(0, "Unknown color format"); 110} 111 112CameraSource *CameraSource::Create() { 113 Size size; 114 size.width = -1; 115 size.height = -1; 116 117 sp<ICamera> camera; 118 return new CameraSource(camera, NULL, 0, size, -1, NULL, false); 119} 120 121// static 122CameraSource *CameraSource::CreateFromCamera( 123 const sp<ICamera>& camera, 124 const sp<ICameraRecordingProxy>& proxy, 125 int32_t cameraId, 126 Size videoSize, 127 int32_t frameRate, 128 const sp<Surface>& surface, 129 bool storeMetaDataInVideoBuffers) { 130 131 CameraSource *source = new CameraSource(camera, proxy, cameraId, 132 videoSize, frameRate, surface, 133 storeMetaDataInVideoBuffers); 134 return source; 135} 136 137CameraSource::CameraSource( 138 const sp<ICamera>& camera, 139 const sp<ICameraRecordingProxy>& proxy, 140 int32_t cameraId, 141 Size videoSize, 142 int32_t frameRate, 143 const sp<Surface>& surface, 144 bool storeMetaDataInVideoBuffers) 145 : mCameraFlags(0), 146 mVideoFrameRate(-1), 147 mCamera(0), 148 mSurface(surface), 149 mNumFramesReceived(0), 150 mLastFrameTimestampUs(0), 151 mStarted(false), 152 mNumFramesEncoded(0), 153 mFirstFrameTimeUs(0), 154 mNumFramesDropped(0), 155 mNumGlitches(0), 156 mGlitchDurationThresholdUs(200000), 157 mCollectStats(false) { 158 mVideoSize.width = -1; 159 mVideoSize.height = -1; 160 161 mInitCheck = init(camera, proxy, cameraId, 162 videoSize, frameRate, 163 storeMetaDataInVideoBuffers); 164} 165 166status_t CameraSource::initCheck() const { 167 return mInitCheck; 168} 169 170status_t CameraSource::isCameraAvailable( 171 const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, 172 int32_t cameraId) { 173 174 if (camera == 0) { 175 mCamera = Camera::connect(cameraId); 176 if (mCamera == 0) return -EBUSY; 177 // If proxy is not passed in by applications, still use the proxy of 178 // our own Camera to simplify the code. 179 mCameraRecordingProxy = mCamera->getRecordingProxy(); 180 mCameraFlags &= ~FLAGS_HOT_CAMERA; 181 } else { 182 // We get the proxy from Camera, not ICamera. We need to get the proxy 183 // to the remote Camera owned by the application. Here mCamera is a 184 // local Camera object created by us. We cannot use the proxy from 185 // mCamera here. 186 mCamera = Camera::create(camera); 187 if (mCamera == 0) return -EBUSY; 188 mCameraRecordingProxy = proxy; 189 mCameraFlags |= FLAGS_HOT_CAMERA; 190 } 191 192 mCamera->lock(); 193 mDeathNotifier = new DeathNotifier(); 194 // isBinderAlive needs linkToDeath to work. 195 mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier); 196 197 return OK; 198} 199 200 201/* 202 * Check to see whether the requested video width and height is one 203 * of the supported sizes. 204 * @param width the video frame width in pixels 205 * @param height the video frame height in pixels 206 * @param suppportedSizes the vector of sizes that we check against 207 * @return true if the dimension (width and height) is supported. 208 */ 209static bool isVideoSizeSupported( 210 int32_t width, int32_t height, 211 const Vector<Size>& supportedSizes) { 212 213 LOGV("isVideoSizeSupported"); 214 for (size_t i = 0; i < supportedSizes.size(); ++i) { 215 if (width == supportedSizes[i].width && 216 height == supportedSizes[i].height) { 217 return true; 218 } 219 } 220 return false; 221} 222 223/* 224 * If the preview and video output is separate, we only set the 225 * the video size, and applications should set the preview size 226 * to some proper value, and the recording framework will not 227 * change the preview size; otherwise, if the video and preview 228 * output is the same, we need to set the preview to be the same 229 * as the requested video size. 230 * 231 */ 232/* 233 * Query the camera to retrieve the supported video frame sizes 234 * and also to see whether CameraParameters::setVideoSize() 235 * is supported or not. 236 * @param params CameraParameters to retrieve the information 237 * @@param isSetVideoSizeSupported retunrs whether method 238 * CameraParameters::setVideoSize() is supported or not. 239 * @param sizes returns the vector of Size objects for the 240 * supported video frame sizes advertised by the camera. 241 */ 242static void getSupportedVideoSizes( 243 const CameraParameters& params, 244 bool *isSetVideoSizeSupported, 245 Vector<Size>& sizes) { 246 247 *isSetVideoSizeSupported = true; 248 params.getSupportedVideoSizes(sizes); 249 if (sizes.size() == 0) { 250 LOGD("Camera does not support setVideoSize()"); 251 params.getSupportedPreviewSizes(sizes); 252 *isSetVideoSizeSupported = false; 253 } 254} 255 256/* 257 * Check whether the camera has the supported color format 258 * @param params CameraParameters to retrieve the information 259 * @return OK if no error. 260 */ 261status_t CameraSource::isCameraColorFormatSupported( 262 const CameraParameters& params) { 263 mColorFormat = getColorFormat(params.get( 264 CameraParameters::KEY_VIDEO_FRAME_FORMAT)); 265 if (mColorFormat == -1) { 266 return BAD_VALUE; 267 } 268 return OK; 269} 270 271/* 272 * Configure the camera to use the requested video size 273 * (width and height) and/or frame rate. If both width and 274 * height are -1, configuration on the video size is skipped. 275 * if frameRate is -1, configuration on the frame rate 276 * is skipped. Skipping the configuration allows one to 277 * use the current camera setting without the need to 278 * actually know the specific values (see Create() method). 279 * 280 * @param params the CameraParameters to be configured 281 * @param width the target video frame width in pixels 282 * @param height the target video frame height in pixels 283 * @param frameRate the target frame rate in frames per second. 284 * @return OK if no error. 285 */ 286status_t CameraSource::configureCamera( 287 CameraParameters* params, 288 int32_t width, int32_t height, 289 int32_t frameRate) { 290 291 Vector<Size> sizes; 292 bool isSetVideoSizeSupportedByCamera = true; 293 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes); 294 bool isCameraParamChanged = false; 295 if (width != -1 && height != -1) { 296 if (!isVideoSizeSupported(width, height, sizes)) { 297 LOGE("Video dimension (%dx%d) is unsupported", width, height); 298 releaseCamera(); 299 return BAD_VALUE; 300 } 301 if (isSetVideoSizeSupportedByCamera) { 302 params->setVideoSize(width, height); 303 } else { 304 params->setPreviewSize(width, height); 305 } 306 isCameraParamChanged = true; 307 } else if ((width == -1 && height != -1) || 308 (width != -1 && height == -1)) { 309 // If one and only one of the width and height is -1 310 // we reject such a request. 311 LOGE("Requested video size (%dx%d) is not supported", width, height); 312 releaseCamera(); 313 return BAD_VALUE; 314 } else { // width == -1 && height == -1 315 // Do not configure the camera. 316 // Use the current width and height value setting from the camera. 317 } 318 319 if (frameRate != -1) { 320 CHECK(frameRate > 0 && frameRate <= 120); 321 const char* supportedFrameRates = 322 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); 323 CHECK(supportedFrameRates != NULL); 324 LOGV("Supported frame rates: %s", supportedFrameRates); 325 char buf[4]; 326 snprintf(buf, 4, "%d", frameRate); 327 if (strstr(supportedFrameRates, buf) == NULL) { 328 LOGE("Requested frame rate (%d) is not supported: %s", 329 frameRate, supportedFrameRates); 330 releaseCamera(); 331 return BAD_VALUE; 332 } 333 334 // The frame rate is supported, set the camera to the requested value. 335 params->setPreviewFrameRate(frameRate); 336 isCameraParamChanged = true; 337 } else { // frameRate == -1 338 // Do not configure the camera. 339 // Use the current frame rate value setting from the camera 340 } 341 342 if (isCameraParamChanged) { 343 // Either frame rate or frame size needs to be changed. 344 String8 s = params->flatten(); 345 if (OK != mCamera->setParameters(s)) { 346 LOGE("Could not change settings." 347 " Someone else is using camera %p?", mCamera.get()); 348 return -EBUSY; 349 } 350 } 351 return OK; 352} 353 354/* 355 * Check whether the requested video frame size 356 * has been successfully configured or not. If both width and height 357 * are -1, check on the current width and height value setting 358 * is performed. 359 * 360 * @param params CameraParameters to retrieve the information 361 * @param the target video frame width in pixels to check against 362 * @param the target video frame height in pixels to check against 363 * @return OK if no error 364 */ 365status_t CameraSource::checkVideoSize( 366 const CameraParameters& params, 367 int32_t width, int32_t height) { 368 369 // The actual video size is the same as the preview size 370 // if the camera hal does not support separate video and 371 // preview output. In this case, we retrieve the video 372 // size from preview. 373 int32_t frameWidthActual = -1; 374 int32_t frameHeightActual = -1; 375 Vector<Size> sizes; 376 params.getSupportedVideoSizes(sizes); 377 if (sizes.size() == 0) { 378 // video size is the same as preview size 379 params.getPreviewSize(&frameWidthActual, &frameHeightActual); 380 } else { 381 // video size may not be the same as preview 382 params.getVideoSize(&frameWidthActual, &frameHeightActual); 383 } 384 if (frameWidthActual < 0 || frameHeightActual < 0) { 385 LOGE("Failed to retrieve video frame size (%dx%d)", 386 frameWidthActual, frameHeightActual); 387 return UNKNOWN_ERROR; 388 } 389 390 // Check the actual video frame size against the target/requested 391 // video frame size. 392 if (width != -1 && height != -1) { 393 if (frameWidthActual != width || frameHeightActual != height) { 394 LOGE("Failed to set video frame size to %dx%d. " 395 "The actual video size is %dx%d ", width, height, 396 frameWidthActual, frameHeightActual); 397 return UNKNOWN_ERROR; 398 } 399 } 400 401 // Good now. 402 mVideoSize.width = frameWidthActual; 403 mVideoSize.height = frameHeightActual; 404 return OK; 405} 406 407/* 408 * Check the requested frame rate has been successfully configured or not. 409 * If the target frameRate is -1, check on the current frame rate value 410 * setting is performed. 411 * 412 * @param params CameraParameters to retrieve the information 413 * @param the target video frame rate to check against 414 * @return OK if no error. 415 */ 416status_t CameraSource::checkFrameRate( 417 const CameraParameters& params, 418 int32_t frameRate) { 419 420 int32_t frameRateActual = params.getPreviewFrameRate(); 421 if (frameRateActual < 0) { 422 LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual); 423 return UNKNOWN_ERROR; 424 } 425 426 // Check the actual video frame rate against the target/requested 427 // video frame rate. 428 if (frameRate != -1 && (frameRateActual - frameRate) != 0) { 429 LOGE("Failed to set preview frame rate to %d fps. The actual " 430 "frame rate is %d", frameRate, frameRateActual); 431 return UNKNOWN_ERROR; 432 } 433 434 // Good now. 435 mVideoFrameRate = frameRateActual; 436 return OK; 437} 438 439/* 440 * Initialize the CameraSource to so that it becomes 441 * ready for providing the video input streams as requested. 442 * @param camera the camera object used for the video source 443 * @param cameraId if camera == 0, use camera with this id 444 * as the video source 445 * @param videoSize the target video frame size. If both 446 * width and height in videoSize is -1, use the current 447 * width and heigth settings by the camera 448 * @param frameRate the target frame rate in frames per second. 449 * if it is -1, use the current camera frame rate setting. 450 * @param storeMetaDataInVideoBuffers request to store meta 451 * data or real YUV data in video buffers. Request to 452 * store meta data in video buffers may not be honored 453 * if the source does not support this feature. 454 * 455 * @return OK if no error. 456 */ 457status_t CameraSource::init( 458 const sp<ICamera>& camera, 459 const sp<ICameraRecordingProxy>& proxy, 460 int32_t cameraId, 461 Size videoSize, 462 int32_t frameRate, 463 bool storeMetaDataInVideoBuffers) { 464 465 status_t err = OK; 466 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 467 468 if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) { 469 LOGE("Camera connection could not be established."); 470 return err; 471 } 472 CameraParameters params(mCamera->getParameters()); 473 if ((err = isCameraColorFormatSupported(params)) != OK) { 474 return err; 475 } 476 477 // Set the camera to use the requested video frame size 478 // and/or frame rate. 479 if ((err = configureCamera(¶ms, 480 videoSize.width, videoSize.height, 481 frameRate))) { 482 return err; 483 } 484 485 // Check on video frame size and frame rate. 486 CameraParameters newCameraParams(mCamera->getParameters()); 487 if ((err = checkVideoSize(newCameraParams, 488 videoSize.width, videoSize.height)) != OK) { 489 return err; 490 } 491 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) { 492 return err; 493 } 494 495 // This CHECK is good, since we just passed the lock/unlock 496 // check earlier by calling mCamera->setParameters(). 497 CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface)); 498 499 // By default, do not store metadata in video buffers 500 mIsMetaDataStoredInVideoBuffers = false; 501 mCamera->storeMetaDataInBuffers(false); 502 if (storeMetaDataInVideoBuffers) { 503 if (OK == mCamera->storeMetaDataInBuffers(true)) { 504 mIsMetaDataStoredInVideoBuffers = true; 505 } 506 } 507 508 IPCThreadState::self()->restoreCallingIdentity(token); 509 510 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate); 511 if (glitchDurationUs > mGlitchDurationThresholdUs) { 512 mGlitchDurationThresholdUs = glitchDurationUs; 513 } 514 515 // XXX: query camera for the stride and slice height 516 // when the capability becomes available. 517 mMeta = new MetaData; 518 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 519 mMeta->setInt32(kKeyColorFormat, mColorFormat); 520 mMeta->setInt32(kKeyWidth, mVideoSize.width); 521 mMeta->setInt32(kKeyHeight, mVideoSize.height); 522 mMeta->setInt32(kKeyStride, mVideoSize.width); 523 mMeta->setInt32(kKeySliceHeight, mVideoSize.height); 524 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate); 525 return OK; 526} 527 528CameraSource::~CameraSource() { 529 if (mStarted) { 530 stop(); 531 } 532} 533 534void CameraSource::startCameraRecording() { 535 // Reset the identity to the current thread because media server owns the 536 // camera and recording is started by the applications. The applications 537 // will connect to the camera in ICameraRecordingProxy::startRecording. 538 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 539 mCamera->unlock(); 540 mCamera.clear(); 541 IPCThreadState::self()->restoreCallingIdentity(token); 542 CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this))); 543} 544 545status_t CameraSource::start(MetaData *meta) { 546 CHECK(!mStarted); 547 if (mInitCheck != OK) { 548 LOGE("CameraSource is not initialized yet"); 549 return mInitCheck; 550 } 551 552 char value[PROPERTY_VALUE_MAX]; 553 if (property_get("media.stagefright.record-stats", value, NULL) 554 && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { 555 mCollectStats = true; 556 } 557 558 mStartTimeUs = 0; 559 int64_t startTimeUs; 560 if (meta && meta->findInt64(kKeyTime, &startTimeUs)) { 561 mStartTimeUs = startTimeUs; 562 } 563 564 startCameraRecording(); 565 566 mStarted = true; 567 return OK; 568} 569 570void CameraSource::stopCameraRecording() { 571 mCameraRecordingProxy->stopRecording(); 572} 573 574void CameraSource::releaseCamera() { 575 LOGV("releaseCamera"); 576 if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) { 577 LOGV("Camera was cold when we started, stopping preview"); 578 mCamera->stopPreview(); 579 } 580 mCamera.clear(); 581 mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier); 582 mCameraRecordingProxy.clear(); 583 mCameraFlags = 0; 584} 585 586status_t CameraSource::stop() { 587 LOGD("stop: E"); 588 Mutex::Autolock autoLock(mLock); 589 mStarted = false; 590 mFrameAvailableCondition.signal(); 591 592 releaseQueuedFrames(); 593 while (!mFramesBeingEncoded.empty()) { 594 if (NO_ERROR != 595 mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) { 596 LOGW("Timed out waiting for outstanding frames being encoded: %d", 597 mFramesBeingEncoded.size()); 598 } 599 } 600 stopCameraRecording(); 601 releaseCamera(); 602 603 if (mCollectStats) { 604 LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", 605 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 606 mLastFrameTimestampUs - mFirstFrameTimeUs); 607 } 608 609 if (mNumGlitches > 0) { 610 LOGW("%d long delays between neighboring video frames", mNumGlitches); 611 } 612 613 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 614 LOGD("stop: X"); 615 return OK; 616} 617 618void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { 619 if (mCameraRecordingProxy != NULL) { 620 mCameraRecordingProxy->releaseRecordingFrame(frame); 621 } 622} 623 624void CameraSource::releaseQueuedFrames() { 625 List<sp<IMemory> >::iterator it; 626 while (!mFramesReceived.empty()) { 627 it = mFramesReceived.begin(); 628 releaseRecordingFrame(*it); 629 mFramesReceived.erase(it); 630 ++mNumFramesDropped; 631 } 632} 633 634sp<MetaData> CameraSource::getFormat() { 635 return mMeta; 636} 637 638void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { 639 releaseRecordingFrame(frame); 640} 641 642void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 643 LOGV("signalBufferReturned: %p", buffer->data()); 644 Mutex::Autolock autoLock(mLock); 645 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 646 it != mFramesBeingEncoded.end(); ++it) { 647 if ((*it)->pointer() == buffer->data()) { 648 releaseOneRecordingFrame((*it)); 649 mFramesBeingEncoded.erase(it); 650 ++mNumFramesEncoded; 651 buffer->setObserver(0); 652 buffer->release(); 653 mFrameCompleteCondition.signal(); 654 return; 655 } 656 } 657 CHECK_EQ(0, "signalBufferReturned: bogus buffer"); 658} 659 660status_t CameraSource::read( 661 MediaBuffer **buffer, const ReadOptions *options) { 662 LOGV("read"); 663 664 *buffer = NULL; 665 666 int64_t seekTimeUs; 667 ReadOptions::SeekMode mode; 668 if (options && options->getSeekTo(&seekTimeUs, &mode)) { 669 return ERROR_UNSUPPORTED; 670 } 671 672 sp<IMemory> frame; 673 int64_t frameTime; 674 675 { 676 Mutex::Autolock autoLock(mLock); 677 while (mStarted && mFramesReceived.empty()) { 678 if (NO_ERROR != 679 mFrameAvailableCondition.waitRelative(mLock, 1000000000LL)) { 680 if (!mCameraRecordingProxy->asBinder()->isBinderAlive()) { 681 LOGW("camera recording proxy is gone"); 682 return ERROR_END_OF_STREAM; 683 } 684 LOGW("Timed out waiting for incoming camera video frames: %lld us", 685 mLastFrameTimestampUs); 686 } 687 } 688 if (!mStarted) { 689 return OK; 690 } 691 frame = *mFramesReceived.begin(); 692 mFramesReceived.erase(mFramesReceived.begin()); 693 694 frameTime = *mFrameTimes.begin(); 695 mFrameTimes.erase(mFrameTimes.begin()); 696 mFramesBeingEncoded.push_back(frame); 697 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 698 (*buffer)->setObserver(this); 699 (*buffer)->add_ref(); 700 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 701 } 702 return OK; 703} 704 705void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 706 int32_t msgType, const sp<IMemory> &data) { 707 LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 708 Mutex::Autolock autoLock(mLock); 709 if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { 710 LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs); 711 releaseOneRecordingFrame(data); 712 return; 713 } 714 715 if (mNumFramesReceived > 0) { 716 CHECK(timestampUs > mLastFrameTimestampUs); 717 if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { 718 ++mNumGlitches; 719 } 720 } 721 722 // May need to skip frame or modify timestamp. Currently implemented 723 // by the subclass CameraSourceTimeLapse. 724 if (skipCurrentFrame(timestampUs)) { 725 releaseOneRecordingFrame(data); 726 return; 727 } 728 729 mLastFrameTimestampUs = timestampUs; 730 if (mNumFramesReceived == 0) { 731 mFirstFrameTimeUs = timestampUs; 732 // Initial delay 733 if (mStartTimeUs > 0) { 734 if (timestampUs < mStartTimeUs) { 735 // Frame was captured before recording was started 736 // Drop it without updating the statistical data. 737 releaseOneRecordingFrame(data); 738 return; 739 } 740 mStartTimeUs = timestampUs - mStartTimeUs; 741 } 742 } 743 ++mNumFramesReceived; 744 745 CHECK(data != NULL && data->size() > 0); 746 mFramesReceived.push_back(data); 747 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 748 mFrameTimes.push_back(timeUs); 749 LOGV("initial delay: %lld, current time stamp: %lld", 750 mStartTimeUs, timeUs); 751 mFrameAvailableCondition.signal(); 752} 753 754bool CameraSource::isMetaDataStoredInVideoBuffers() const { 755 LOGV("isMetaDataStoredInVideoBuffers"); 756 return mIsMetaDataStoredInVideoBuffers; 757} 758 759CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) { 760 mSource = source; 761} 762 763void CameraSource::ProxyListener::dataCallbackTimestamp( 764 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 765 mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr); 766} 767 768void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) { 769 LOGI("Camera recording proxy died"); 770} 771 772} // namespace android 773