CameraSource.cpp revision ae4c1ac6401185539c03ce0819e174fd1b04b136
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22#include <binder/IPCThreadState.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/MediaDebug.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <surfaceflinger/Surface.h> 31#include <utils/String8.h> 32#include <cutils/properties.h> 33 34namespace android { 35 36struct CameraSourceListener : public CameraListener { 37 CameraSourceListener(const sp<CameraSource> &source); 38 39 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 40 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr); 41 42 virtual void postDataTimestamp( 43 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 44 45protected: 46 virtual ~CameraSourceListener(); 47 48private: 49 wp<CameraSource> mSource; 50 51 CameraSourceListener(const CameraSourceListener &); 52 CameraSourceListener &operator=(const CameraSourceListener &); 53}; 54 55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 56 : mSource(source) { 57} 58 59CameraSourceListener::~CameraSourceListener() { 60} 61 62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 63 LOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 64} 65 66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { 67 LOGV("postData(%d, ptr:%p, size:%d)", 68 msgType, dataPtr->pointer(), dataPtr->size()); 69 70 sp<CameraSource> source = mSource.promote(); 71 if (source.get() != NULL) { 72 source->dataCallback(msgType, dataPtr); 73 } 74} 75 76void CameraSourceListener::postDataTimestamp( 77 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 78 79 sp<CameraSource> source = mSource.promote(); 80 if (source.get() != NULL) { 81 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 82 } 83} 84 85static int32_t getColorFormat(const char* colorFormat) { 86 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) { 87 return OMX_COLOR_FormatYUV420Planar; 88 } 89 90 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 91 return OMX_COLOR_FormatYUV422SemiPlanar; 92 } 93 94 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 95 return OMX_COLOR_FormatYUV420SemiPlanar; 96 } 97 98 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 99 return OMX_COLOR_FormatYCbYCr; 100 } 101 102 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 103 return OMX_COLOR_Format16bitRGB565; 104 } 105 106 LOGE("Uknown color format (%s), please add it to " 107 "CameraSource::getColorFormat", colorFormat); 108 109 CHECK_EQ(0, "Unknown color format"); 110} 111 112CameraSource *CameraSource::Create() { 113 Size size; 114 size.width = -1; 115 size.height = -1; 116 117 sp<ICamera> camera; 118 return new CameraSource(camera, NULL, 0, size, -1, NULL, false); 119} 120 121// static 122CameraSource *CameraSource::CreateFromCamera( 123 const sp<ICamera>& camera, 124 const sp<ICameraRecordingProxy>& proxy, 125 int32_t cameraId, 126 Size videoSize, 127 int32_t frameRate, 128 const sp<Surface>& surface, 129 bool storeMetaDataInVideoBuffers) { 130 131 CameraSource *source = new CameraSource(camera, proxy, cameraId, 132 videoSize, frameRate, surface, 133 storeMetaDataInVideoBuffers); 134 return source; 135} 136 137CameraSource::CameraSource( 138 const sp<ICamera>& camera, 139 const sp<ICameraRecordingProxy>& proxy, 140 int32_t cameraId, 141 Size videoSize, 142 int32_t frameRate, 143 const sp<Surface>& surface, 144 bool storeMetaDataInVideoBuffers) 145 : mCameraFlags(0), 146 mVideoFrameRate(-1), 147 mCamera(0), 148 mSurface(surface), 149 mNumFramesReceived(0), 150 mLastFrameTimestampUs(0), 151 mStarted(false), 152 mNumFramesEncoded(0), 153 mFirstFrameTimeUs(0), 154 mNumFramesDropped(0), 155 mNumGlitches(0), 156 mGlitchDurationThresholdUs(200000), 157 mCollectStats(false) { 158 mVideoSize.width = -1; 159 mVideoSize.height = -1; 160 161 mInitCheck = init(camera, proxy, cameraId, 162 videoSize, frameRate, 163 storeMetaDataInVideoBuffers); 164 if (mInitCheck != OK) releaseCamera(); 165} 166 167status_t CameraSource::initCheck() const { 168 return mInitCheck; 169} 170 171status_t CameraSource::isCameraAvailable( 172 const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, 173 int32_t cameraId) { 174 175 if (camera == 0) { 176 mCamera = Camera::connect(cameraId); 177 if (mCamera == 0) return -EBUSY; 178 // If proxy is not passed in by applications, still use the proxy of 179 // our own Camera to simplify the code. 180 mCameraRecordingProxy = mCamera->getRecordingProxy(); 181 mCameraFlags &= ~FLAGS_HOT_CAMERA; 182 } else { 183 // We get the proxy from Camera, not ICamera. We need to get the proxy 184 // to the remote Camera owned by the application. Here mCamera is a 185 // local Camera object created by us. We cannot use the proxy from 186 // mCamera here. 187 mCamera = Camera::create(camera); 188 if (mCamera == 0) return -EBUSY; 189 mCameraRecordingProxy = proxy; 190 mCameraFlags |= FLAGS_HOT_CAMERA; 191 } 192 193 mCamera->lock(); 194 mDeathNotifier = new DeathNotifier(); 195 // isBinderAlive needs linkToDeath to work. 196 mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier); 197 198 return OK; 199} 200 201 202/* 203 * Check to see whether the requested video width and height is one 204 * of the supported sizes. 205 * @param width the video frame width in pixels 206 * @param height the video frame height in pixels 207 * @param suppportedSizes the vector of sizes that we check against 208 * @return true if the dimension (width and height) is supported. 209 */ 210static bool isVideoSizeSupported( 211 int32_t width, int32_t height, 212 const Vector<Size>& supportedSizes) { 213 214 LOGV("isVideoSizeSupported"); 215 for (size_t i = 0; i < supportedSizes.size(); ++i) { 216 if (width == supportedSizes[i].width && 217 height == supportedSizes[i].height) { 218 return true; 219 } 220 } 221 return false; 222} 223 224/* 225 * If the preview and video output is separate, we only set the 226 * the video size, and applications should set the preview size 227 * to some proper value, and the recording framework will not 228 * change the preview size; otherwise, if the video and preview 229 * output is the same, we need to set the preview to be the same 230 * as the requested video size. 231 * 232 */ 233/* 234 * Query the camera to retrieve the supported video frame sizes 235 * and also to see whether CameraParameters::setVideoSize() 236 * is supported or not. 237 * @param params CameraParameters to retrieve the information 238 * @@param isSetVideoSizeSupported retunrs whether method 239 * CameraParameters::setVideoSize() is supported or not. 240 * @param sizes returns the vector of Size objects for the 241 * supported video frame sizes advertised by the camera. 242 */ 243static void getSupportedVideoSizes( 244 const CameraParameters& params, 245 bool *isSetVideoSizeSupported, 246 Vector<Size>& sizes) { 247 248 *isSetVideoSizeSupported = true; 249 params.getSupportedVideoSizes(sizes); 250 if (sizes.size() == 0) { 251 LOGD("Camera does not support setVideoSize()"); 252 params.getSupportedPreviewSizes(sizes); 253 *isSetVideoSizeSupported = false; 254 } 255} 256 257/* 258 * Check whether the camera has the supported color format 259 * @param params CameraParameters to retrieve the information 260 * @return OK if no error. 261 */ 262status_t CameraSource::isCameraColorFormatSupported( 263 const CameraParameters& params) { 264 mColorFormat = getColorFormat(params.get( 265 CameraParameters::KEY_VIDEO_FRAME_FORMAT)); 266 if (mColorFormat == -1) { 267 return BAD_VALUE; 268 } 269 return OK; 270} 271 272/* 273 * Configure the camera to use the requested video size 274 * (width and height) and/or frame rate. If both width and 275 * height are -1, configuration on the video size is skipped. 276 * if frameRate is -1, configuration on the frame rate 277 * is skipped. Skipping the configuration allows one to 278 * use the current camera setting without the need to 279 * actually know the specific values (see Create() method). 280 * 281 * @param params the CameraParameters to be configured 282 * @param width the target video frame width in pixels 283 * @param height the target video frame height in pixels 284 * @param frameRate the target frame rate in frames per second. 285 * @return OK if no error. 286 */ 287status_t CameraSource::configureCamera( 288 CameraParameters* params, 289 int32_t width, int32_t height, 290 int32_t frameRate) { 291 292 Vector<Size> sizes; 293 bool isSetVideoSizeSupportedByCamera = true; 294 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes); 295 bool isCameraParamChanged = false; 296 if (width != -1 && height != -1) { 297 if (!isVideoSizeSupported(width, height, sizes)) { 298 LOGE("Video dimension (%dx%d) is unsupported", width, height); 299 return BAD_VALUE; 300 } 301 if (isSetVideoSizeSupportedByCamera) { 302 params->setVideoSize(width, height); 303 } else { 304 params->setPreviewSize(width, height); 305 } 306 isCameraParamChanged = true; 307 } else if ((width == -1 && height != -1) || 308 (width != -1 && height == -1)) { 309 // If one and only one of the width and height is -1 310 // we reject such a request. 311 LOGE("Requested video size (%dx%d) is not supported", width, height); 312 return BAD_VALUE; 313 } else { // width == -1 && height == -1 314 // Do not configure the camera. 315 // Use the current width and height value setting from the camera. 316 } 317 318 if (frameRate != -1) { 319 CHECK(frameRate > 0 && frameRate <= 120); 320 const char* supportedFrameRates = 321 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); 322 CHECK(supportedFrameRates != NULL); 323 LOGV("Supported frame rates: %s", supportedFrameRates); 324 char buf[4]; 325 snprintf(buf, 4, "%d", frameRate); 326 if (strstr(supportedFrameRates, buf) == NULL) { 327 LOGE("Requested frame rate (%d) is not supported: %s", 328 frameRate, supportedFrameRates); 329 return BAD_VALUE; 330 } 331 332 // The frame rate is supported, set the camera to the requested value. 333 params->setPreviewFrameRate(frameRate); 334 isCameraParamChanged = true; 335 } else { // frameRate == -1 336 // Do not configure the camera. 337 // Use the current frame rate value setting from the camera 338 } 339 340 if (isCameraParamChanged) { 341 // Either frame rate or frame size needs to be changed. 342 String8 s = params->flatten(); 343 if (OK != mCamera->setParameters(s)) { 344 LOGE("Could not change settings." 345 " Someone else is using camera %p?", mCamera.get()); 346 return -EBUSY; 347 } 348 } 349 return OK; 350} 351 352/* 353 * Check whether the requested video frame size 354 * has been successfully configured or not. If both width and height 355 * are -1, check on the current width and height value setting 356 * is performed. 357 * 358 * @param params CameraParameters to retrieve the information 359 * @param the target video frame width in pixels to check against 360 * @param the target video frame height in pixels to check against 361 * @return OK if no error 362 */ 363status_t CameraSource::checkVideoSize( 364 const CameraParameters& params, 365 int32_t width, int32_t height) { 366 367 // The actual video size is the same as the preview size 368 // if the camera hal does not support separate video and 369 // preview output. In this case, we retrieve the video 370 // size from preview. 371 int32_t frameWidthActual = -1; 372 int32_t frameHeightActual = -1; 373 Vector<Size> sizes; 374 params.getSupportedVideoSizes(sizes); 375 if (sizes.size() == 0) { 376 // video size is the same as preview size 377 params.getPreviewSize(&frameWidthActual, &frameHeightActual); 378 } else { 379 // video size may not be the same as preview 380 params.getVideoSize(&frameWidthActual, &frameHeightActual); 381 } 382 if (frameWidthActual < 0 || frameHeightActual < 0) { 383 LOGE("Failed to retrieve video frame size (%dx%d)", 384 frameWidthActual, frameHeightActual); 385 return UNKNOWN_ERROR; 386 } 387 388 // Check the actual video frame size against the target/requested 389 // video frame size. 390 if (width != -1 && height != -1) { 391 if (frameWidthActual != width || frameHeightActual != height) { 392 LOGE("Failed to set video frame size to %dx%d. " 393 "The actual video size is %dx%d ", width, height, 394 frameWidthActual, frameHeightActual); 395 return UNKNOWN_ERROR; 396 } 397 } 398 399 // Good now. 400 mVideoSize.width = frameWidthActual; 401 mVideoSize.height = frameHeightActual; 402 return OK; 403} 404 405/* 406 * Check the requested frame rate has been successfully configured or not. 407 * If the target frameRate is -1, check on the current frame rate value 408 * setting is performed. 409 * 410 * @param params CameraParameters to retrieve the information 411 * @param the target video frame rate to check against 412 * @return OK if no error. 413 */ 414status_t CameraSource::checkFrameRate( 415 const CameraParameters& params, 416 int32_t frameRate) { 417 418 int32_t frameRateActual = params.getPreviewFrameRate(); 419 if (frameRateActual < 0) { 420 LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual); 421 return UNKNOWN_ERROR; 422 } 423 424 // Check the actual video frame rate against the target/requested 425 // video frame rate. 426 if (frameRate != -1 && (frameRateActual - frameRate) != 0) { 427 LOGE("Failed to set preview frame rate to %d fps. The actual " 428 "frame rate is %d", frameRate, frameRateActual); 429 return UNKNOWN_ERROR; 430 } 431 432 // Good now. 433 mVideoFrameRate = frameRateActual; 434 return OK; 435} 436 437/* 438 * Initialize the CameraSource to so that it becomes 439 * ready for providing the video input streams as requested. 440 * @param camera the camera object used for the video source 441 * @param cameraId if camera == 0, use camera with this id 442 * as the video source 443 * @param videoSize the target video frame size. If both 444 * width and height in videoSize is -1, use the current 445 * width and heigth settings by the camera 446 * @param frameRate the target frame rate in frames per second. 447 * if it is -1, use the current camera frame rate setting. 448 * @param storeMetaDataInVideoBuffers request to store meta 449 * data or real YUV data in video buffers. Request to 450 * store meta data in video buffers may not be honored 451 * if the source does not support this feature. 452 * 453 * @return OK if no error. 454 */ 455status_t CameraSource::init( 456 const sp<ICamera>& camera, 457 const sp<ICameraRecordingProxy>& proxy, 458 int32_t cameraId, 459 Size videoSize, 460 int32_t frameRate, 461 bool storeMetaDataInVideoBuffers) { 462 463 status_t err = OK; 464 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 465 err = initWithCameraAccess(camera, proxy, cameraId, 466 videoSize, frameRate, 467 storeMetaDataInVideoBuffers); 468 IPCThreadState::self()->restoreCallingIdentity(token); 469 return err; 470} 471 472status_t CameraSource::initWithCameraAccess( 473 const sp<ICamera>& camera, 474 const sp<ICameraRecordingProxy>& proxy, 475 int32_t cameraId, 476 Size videoSize, 477 int32_t frameRate, 478 bool storeMetaDataInVideoBuffers) { 479 status_t err = OK; 480 481 if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) { 482 LOGE("Camera connection could not be established."); 483 return err; 484 } 485 CameraParameters params(mCamera->getParameters()); 486 if ((err = isCameraColorFormatSupported(params)) != OK) { 487 return err; 488 } 489 490 // Set the camera to use the requested video frame size 491 // and/or frame rate. 492 if ((err = configureCamera(¶ms, 493 videoSize.width, videoSize.height, 494 frameRate))) { 495 return err; 496 } 497 498 // Check on video frame size and frame rate. 499 CameraParameters newCameraParams(mCamera->getParameters()); 500 if ((err = checkVideoSize(newCameraParams, 501 videoSize.width, videoSize.height)) != OK) { 502 return err; 503 } 504 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) { 505 return err; 506 } 507 508 // This CHECK is good, since we just passed the lock/unlock 509 // check earlier by calling mCamera->setParameters(). 510 CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface)); 511 512 // By default, do not store metadata in video buffers 513 mIsMetaDataStoredInVideoBuffers = false; 514 mCamera->storeMetaDataInBuffers(false); 515 if (storeMetaDataInVideoBuffers) { 516 if (OK == mCamera->storeMetaDataInBuffers(true)) { 517 mIsMetaDataStoredInVideoBuffers = true; 518 } 519 } 520 521 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate); 522 if (glitchDurationUs > mGlitchDurationThresholdUs) { 523 mGlitchDurationThresholdUs = glitchDurationUs; 524 } 525 526 // XXX: query camera for the stride and slice height 527 // when the capability becomes available. 528 mMeta = new MetaData; 529 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 530 mMeta->setInt32(kKeyColorFormat, mColorFormat); 531 mMeta->setInt32(kKeyWidth, mVideoSize.width); 532 mMeta->setInt32(kKeyHeight, mVideoSize.height); 533 mMeta->setInt32(kKeyStride, mVideoSize.width); 534 mMeta->setInt32(kKeySliceHeight, mVideoSize.height); 535 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate); 536 return OK; 537} 538 539CameraSource::~CameraSource() { 540 if (mStarted) { 541 stop(); 542 } else if (mInitCheck == OK) { 543 // Camera is initialized but because start() is never called, 544 // the lock on Camera is never released(). This makes sure 545 // Camera's lock is released in this case. 546 releaseCamera(); 547 } 548} 549 550void CameraSource::startCameraRecording() { 551 // Reset the identity to the current thread because media server owns the 552 // camera and recording is started by the applications. The applications 553 // will connect to the camera in ICameraRecordingProxy::startRecording. 554 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 555 mCamera->unlock(); 556 mCamera.clear(); 557 IPCThreadState::self()->restoreCallingIdentity(token); 558 CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this))); 559} 560 561status_t CameraSource::start(MetaData *meta) { 562 CHECK(!mStarted); 563 if (mInitCheck != OK) { 564 LOGE("CameraSource is not initialized yet"); 565 return mInitCheck; 566 } 567 568 char value[PROPERTY_VALUE_MAX]; 569 if (property_get("media.stagefright.record-stats", value, NULL) 570 && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { 571 mCollectStats = true; 572 } 573 574 mStartTimeUs = 0; 575 int64_t startTimeUs; 576 if (meta && meta->findInt64(kKeyTime, &startTimeUs)) { 577 mStartTimeUs = startTimeUs; 578 } 579 580 startCameraRecording(); 581 582 mStarted = true; 583 return OK; 584} 585 586void CameraSource::stopCameraRecording() { 587 mCameraRecordingProxy->stopRecording(); 588} 589 590void CameraSource::releaseCamera() { 591 LOGV("releaseCamera"); 592 if (mCamera != 0) { 593 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 594 if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) { 595 LOGV("Camera was cold when we started, stopping preview"); 596 mCamera->stopPreview(); 597 mCamera->disconnect(); 598 } else { 599 // Unlock the camera so the application can lock it back. 600 mCamera->unlock(); 601 } 602 mCamera.clear(); 603 IPCThreadState::self()->restoreCallingIdentity(token); 604 } 605 if (mCameraRecordingProxy != 0) { 606 mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier); 607 mCameraRecordingProxy.clear(); 608 } 609 mCameraFlags = 0; 610} 611 612status_t CameraSource::stop() { 613 LOGD("stop: E"); 614 Mutex::Autolock autoLock(mLock); 615 mStarted = false; 616 mFrameAvailableCondition.signal(); 617 618 releaseQueuedFrames(); 619 while (!mFramesBeingEncoded.empty()) { 620 if (NO_ERROR != 621 mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) { 622 LOGW("Timed out waiting for outstanding frames being encoded: %d", 623 mFramesBeingEncoded.size()); 624 } 625 } 626 stopCameraRecording(); 627 releaseCamera(); 628 629 if (mCollectStats) { 630 LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", 631 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 632 mLastFrameTimestampUs - mFirstFrameTimeUs); 633 } 634 635 if (mNumGlitches > 0) { 636 LOGW("%d long delays between neighboring video frames", mNumGlitches); 637 } 638 639 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 640 LOGD("stop: X"); 641 return OK; 642} 643 644void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { 645 if (mCameraRecordingProxy != NULL) { 646 mCameraRecordingProxy->releaseRecordingFrame(frame); 647 } 648} 649 650void CameraSource::releaseQueuedFrames() { 651 List<sp<IMemory> >::iterator it; 652 while (!mFramesReceived.empty()) { 653 it = mFramesReceived.begin(); 654 releaseRecordingFrame(*it); 655 mFramesReceived.erase(it); 656 ++mNumFramesDropped; 657 } 658} 659 660sp<MetaData> CameraSource::getFormat() { 661 return mMeta; 662} 663 664void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { 665 releaseRecordingFrame(frame); 666} 667 668void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 669 LOGV("signalBufferReturned: %p", buffer->data()); 670 Mutex::Autolock autoLock(mLock); 671 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 672 it != mFramesBeingEncoded.end(); ++it) { 673 if ((*it)->pointer() == buffer->data()) { 674 releaseOneRecordingFrame((*it)); 675 mFramesBeingEncoded.erase(it); 676 ++mNumFramesEncoded; 677 buffer->setObserver(0); 678 buffer->release(); 679 mFrameCompleteCondition.signal(); 680 return; 681 } 682 } 683 CHECK_EQ(0, "signalBufferReturned: bogus buffer"); 684} 685 686status_t CameraSource::read( 687 MediaBuffer **buffer, const ReadOptions *options) { 688 LOGV("read"); 689 690 *buffer = NULL; 691 692 int64_t seekTimeUs; 693 ReadOptions::SeekMode mode; 694 if (options && options->getSeekTo(&seekTimeUs, &mode)) { 695 return ERROR_UNSUPPORTED; 696 } 697 698 sp<IMemory> frame; 699 int64_t frameTime; 700 701 { 702 Mutex::Autolock autoLock(mLock); 703 while (mStarted && mFramesReceived.empty()) { 704 if (NO_ERROR != 705 mFrameAvailableCondition.waitRelative(mLock, 1000000000LL)) { 706 if (!mCameraRecordingProxy->asBinder()->isBinderAlive()) { 707 LOGW("camera recording proxy is gone"); 708 return ERROR_END_OF_STREAM; 709 } 710 LOGW("Timed out waiting for incoming camera video frames: %lld us", 711 mLastFrameTimestampUs); 712 } 713 } 714 if (!mStarted) { 715 return OK; 716 } 717 frame = *mFramesReceived.begin(); 718 mFramesReceived.erase(mFramesReceived.begin()); 719 720 frameTime = *mFrameTimes.begin(); 721 mFrameTimes.erase(mFrameTimes.begin()); 722 mFramesBeingEncoded.push_back(frame); 723 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 724 (*buffer)->setObserver(this); 725 (*buffer)->add_ref(); 726 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 727 } 728 return OK; 729} 730 731void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 732 int32_t msgType, const sp<IMemory> &data) { 733 LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 734 Mutex::Autolock autoLock(mLock); 735 if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { 736 LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs); 737 releaseOneRecordingFrame(data); 738 return; 739 } 740 741 if (mNumFramesReceived > 0) { 742 CHECK(timestampUs > mLastFrameTimestampUs); 743 if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { 744 ++mNumGlitches; 745 } 746 } 747 748 // May need to skip frame or modify timestamp. Currently implemented 749 // by the subclass CameraSourceTimeLapse. 750 if (skipCurrentFrame(timestampUs)) { 751 releaseOneRecordingFrame(data); 752 return; 753 } 754 755 mLastFrameTimestampUs = timestampUs; 756 if (mNumFramesReceived == 0) { 757 mFirstFrameTimeUs = timestampUs; 758 // Initial delay 759 if (mStartTimeUs > 0) { 760 if (timestampUs < mStartTimeUs) { 761 // Frame was captured before recording was started 762 // Drop it without updating the statistical data. 763 releaseOneRecordingFrame(data); 764 return; 765 } 766 mStartTimeUs = timestampUs - mStartTimeUs; 767 } 768 } 769 ++mNumFramesReceived; 770 771 CHECK(data != NULL && data->size() > 0); 772 mFramesReceived.push_back(data); 773 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 774 mFrameTimes.push_back(timeUs); 775 LOGV("initial delay: %lld, current time stamp: %lld", 776 mStartTimeUs, timeUs); 777 mFrameAvailableCondition.signal(); 778} 779 780bool CameraSource::isMetaDataStoredInVideoBuffers() const { 781 LOGV("isMetaDataStoredInVideoBuffers"); 782 return mIsMetaDataStoredInVideoBuffers; 783} 784 785CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) { 786 mSource = source; 787} 788 789void CameraSource::ProxyListener::dataCallbackTimestamp( 790 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 791 mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr); 792} 793 794void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) { 795 LOGI("Camera recording proxy died"); 796} 797 798} // namespace android 799