CameraSource.cpp revision 41152efd144ccf70c380d5c9a32105c02a039f43
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22#include <binder/IPCThreadState.h> 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/MediaDebug.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <surfaceflinger/Surface.h> 31#include <utils/String8.h> 32#include <cutils/properties.h> 33 34namespace android { 35 36struct CameraSourceListener : public CameraListener { 37 CameraSourceListener(const sp<CameraSource> &source); 38 39 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 40 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr); 41 42 virtual void postDataTimestamp( 43 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 44 45protected: 46 virtual ~CameraSourceListener(); 47 48private: 49 wp<CameraSource> mSource; 50 51 CameraSourceListener(const CameraSourceListener &); 52 CameraSourceListener &operator=(const CameraSourceListener &); 53}; 54 55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 56 : mSource(source) { 57} 58 59CameraSourceListener::~CameraSourceListener() { 60} 61 62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 63 LOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 64} 65 66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { 67 LOGV("postData(%d, ptr:%p, size:%d)", 68 msgType, dataPtr->pointer(), dataPtr->size()); 69 70 sp<CameraSource> source = mSource.promote(); 71 if (source.get() != NULL) { 72 source->dataCallback(msgType, dataPtr); 73 } 74} 75 76void CameraSourceListener::postDataTimestamp( 77 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 78 79 sp<CameraSource> source = mSource.promote(); 80 if (source.get() != NULL) { 81 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 82 } 83} 84 85static int32_t getColorFormat(const char* colorFormat) { 86 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) { 87 return OMX_COLOR_FormatYUV420Planar; 88 } 89 90 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 91 return OMX_COLOR_FormatYUV422SemiPlanar; 92 } 93 94 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 95 return OMX_COLOR_FormatYUV420SemiPlanar; 96 } 97 98 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 99 return OMX_COLOR_FormatYCbYCr; 100 } 101 102 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 103 return OMX_COLOR_Format16bitRGB565; 104 } 105 106 LOGE("Uknown color format (%s), please add it to " 107 "CameraSource::getColorFormat", colorFormat); 108 109 CHECK_EQ(0, "Unknown color format"); 110} 111 112CameraSource *CameraSource::Create() { 113 Size size; 114 size.width = -1; 115 size.height = -1; 116 117 sp<ICamera> camera; 118 return new CameraSource(camera, 0, size, -1, NULL, false); 119} 120 121// static 122CameraSource *CameraSource::CreateFromCamera( 123 const sp<ICamera>& camera, 124 int32_t cameraId, 125 Size videoSize, 126 int32_t frameRate, 127 const sp<Surface>& surface, 128 bool storeMetaDataInVideoBuffers) { 129 130 CameraSource *source = new CameraSource(camera, cameraId, 131 videoSize, frameRate, surface, 132 storeMetaDataInVideoBuffers); 133 return source; 134} 135 136CameraSource::CameraSource( 137 const sp<ICamera>& camera, 138 int32_t cameraId, 139 Size videoSize, 140 int32_t frameRate, 141 const sp<Surface>& surface, 142 bool storeMetaDataInVideoBuffers) 143 : mCameraFlags(0), 144 mVideoFrameRate(-1), 145 mCamera(0), 146 mSurface(surface), 147 mNumFramesReceived(0), 148 mLastFrameTimestampUs(0), 149 mStarted(false), 150 mFirstFrameTimeUs(0), 151 mNumFramesEncoded(0), 152 mNumFramesDropped(0), 153 mNumGlitches(0), 154 mGlitchDurationThresholdUs(200000), 155 mCollectStats(false) { 156 157 mVideoSize.width = -1; 158 mVideoSize.height = -1; 159 160 mInitCheck = init(camera, cameraId, 161 videoSize, frameRate, 162 storeMetaDataInVideoBuffers); 163} 164 165status_t CameraSource::initCheck() const { 166 return mInitCheck; 167} 168 169status_t CameraSource::isCameraAvailable( 170 const sp<ICamera>& camera, int32_t cameraId) { 171 172 if (camera == 0) { 173 mCamera = Camera::connect(cameraId); 174 mCameraFlags &= ~FLAGS_HOT_CAMERA; 175 } else { 176 mCamera = Camera::create(camera); 177 mCameraFlags |= FLAGS_HOT_CAMERA; 178 } 179 180 // Is camera available? 181 if (mCamera == 0) { 182 LOGE("Camera connection could not be established."); 183 return -EBUSY; 184 } 185 if (!(mCameraFlags & FLAGS_HOT_CAMERA)) { 186 mCamera->lock(); 187 } 188 return OK; 189} 190 191 192/* 193 * Check to see whether the requested video width and height is one 194 * of the supported sizes. 195 * @param width the video frame width in pixels 196 * @param height the video frame height in pixels 197 * @param suppportedSizes the vector of sizes that we check against 198 * @return true if the dimension (width and height) is supported. 199 */ 200static bool isVideoSizeSupported( 201 int32_t width, int32_t height, 202 const Vector<Size>& supportedSizes) { 203 204 LOGV("isVideoSizeSupported"); 205 for (size_t i = 0; i < supportedSizes.size(); ++i) { 206 if (width == supportedSizes[i].width && 207 height == supportedSizes[i].height) { 208 return true; 209 } 210 } 211 return false; 212} 213 214/* 215 * If the preview and video output is separate, we only set the 216 * the video size, and applications should set the preview size 217 * to some proper value, and the recording framework will not 218 * change the preview size; otherwise, if the video and preview 219 * output is the same, we need to set the preview to be the same 220 * as the requested video size. 221 * 222 */ 223/* 224 * Query the camera to retrieve the supported video frame sizes 225 * and also to see whether CameraParameters::setVideoSize() 226 * is supported or not. 227 * @param params CameraParameters to retrieve the information 228 * @@param isSetVideoSizeSupported retunrs whether method 229 * CameraParameters::setVideoSize() is supported or not. 230 * @param sizes returns the vector of Size objects for the 231 * supported video frame sizes advertised by the camera. 232 */ 233static void getSupportedVideoSizes( 234 const CameraParameters& params, 235 bool *isSetVideoSizeSupported, 236 Vector<Size>& sizes) { 237 238 *isSetVideoSizeSupported = true; 239 params.getSupportedVideoSizes(sizes); 240 if (sizes.size() == 0) { 241 LOGD("Camera does not support setVideoSize()"); 242 params.getSupportedPreviewSizes(sizes); 243 *isSetVideoSizeSupported = false; 244 } 245} 246 247/* 248 * Check whether the camera has the supported color format 249 * @param params CameraParameters to retrieve the information 250 * @return OK if no error. 251 */ 252status_t CameraSource::isCameraColorFormatSupported( 253 const CameraParameters& params) { 254 mColorFormat = getColorFormat(params.get( 255 CameraParameters::KEY_VIDEO_FRAME_FORMAT)); 256 if (mColorFormat == -1) { 257 return BAD_VALUE; 258 } 259 return OK; 260} 261 262/* 263 * Configure the camera to use the requested video size 264 * (width and height) and/or frame rate. If both width and 265 * height are -1, configuration on the video size is skipped. 266 * if frameRate is -1, configuration on the frame rate 267 * is skipped. Skipping the configuration allows one to 268 * use the current camera setting without the need to 269 * actually know the specific values (see Create() method). 270 * 271 * @param params the CameraParameters to be configured 272 * @param width the target video frame width in pixels 273 * @param height the target video frame height in pixels 274 * @param frameRate the target frame rate in frames per second. 275 * @return OK if no error. 276 */ 277status_t CameraSource::configureCamera( 278 CameraParameters* params, 279 int32_t width, int32_t height, 280 int32_t frameRate) { 281 282 Vector<Size> sizes; 283 bool isSetVideoSizeSupportedByCamera = true; 284 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes); 285 bool isCameraParamChanged = false; 286 if (width != -1 && height != -1) { 287 if (!isVideoSizeSupported(width, height, sizes)) { 288 LOGE("Video dimension (%dx%d) is unsupported", width, height); 289 releaseCamera(); 290 return BAD_VALUE; 291 } 292 if (isSetVideoSizeSupportedByCamera) { 293 params->setVideoSize(width, height); 294 } else { 295 params->setPreviewSize(width, height); 296 } 297 isCameraParamChanged = true; 298 } else if ((width == -1 && height != -1) || 299 (width != -1 && height == -1)) { 300 // If one and only one of the width and height is -1 301 // we reject such a request. 302 LOGE("Requested video size (%dx%d) is not supported", width, height); 303 releaseCamera(); 304 return BAD_VALUE; 305 } else { // width == -1 && height == -1 306 // Do not configure the camera. 307 // Use the current width and height value setting from the camera. 308 } 309 310 if (frameRate != -1) { 311 CHECK(frameRate > 0 && frameRate <= 120); 312 const char* supportedFrameRates = 313 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); 314 CHECK(supportedFrameRates != NULL); 315 LOGV("Supported frame rates: %s", supportedFrameRates); 316 char buf[4]; 317 snprintf(buf, 4, "%d", frameRate); 318 if (strstr(supportedFrameRates, buf) == NULL) { 319 LOGE("Requested frame rate (%d) is not supported: %s", 320 frameRate, supportedFrameRates); 321 releaseCamera(); 322 return BAD_VALUE; 323 } 324 325 // The frame rate is supported, set the camera to the requested value. 326 params->setPreviewFrameRate(frameRate); 327 isCameraParamChanged = true; 328 } else { // frameRate == -1 329 // Do not configure the camera. 330 // Use the current frame rate value setting from the camera 331 } 332 333 if (isCameraParamChanged) { 334 // Either frame rate or frame size needs to be changed. 335 String8 s = params->flatten(); 336 if (OK != mCamera->setParameters(s)) { 337 LOGE("Could not change settings." 338 " Someone else is using camera %p?", mCamera.get()); 339 return -EBUSY; 340 } 341 } 342 return OK; 343} 344 345/* 346 * Check whether the requested video frame size 347 * has been successfully configured or not. If both width and height 348 * are -1, check on the current width and height value setting 349 * is performed. 350 * 351 * @param params CameraParameters to retrieve the information 352 * @param the target video frame width in pixels to check against 353 * @param the target video frame height in pixels to check against 354 * @return OK if no error 355 */ 356status_t CameraSource::checkVideoSize( 357 const CameraParameters& params, 358 int32_t width, int32_t height) { 359 360 // The actual video size is the same as the preview size 361 // if the camera hal does not support separate video and 362 // preview output. In this case, we retrieve the video 363 // size from preview. 364 int32_t frameWidthActual = -1; 365 int32_t frameHeightActual = -1; 366 Vector<Size> sizes; 367 params.getSupportedVideoSizes(sizes); 368 if (sizes.size() == 0) { 369 // video size is the same as preview size 370 params.getPreviewSize(&frameWidthActual, &frameHeightActual); 371 } else { 372 // video size may not be the same as preview 373 params.getVideoSize(&frameWidthActual, &frameHeightActual); 374 } 375 if (frameWidthActual < 0 || frameHeightActual < 0) { 376 LOGE("Failed to retrieve video frame size (%dx%d)", 377 frameWidthActual, frameHeightActual); 378 return UNKNOWN_ERROR; 379 } 380 381 // Check the actual video frame size against the target/requested 382 // video frame size. 383 if (width != -1 && height != -1) { 384 if (frameWidthActual != width || frameHeightActual != height) { 385 LOGE("Failed to set video frame size to %dx%d. " 386 "The actual video size is %dx%d ", width, height, 387 frameWidthActual, frameHeightActual); 388 return UNKNOWN_ERROR; 389 } 390 } 391 392 // Good now. 393 mVideoSize.width = frameWidthActual; 394 mVideoSize.height = frameHeightActual; 395 return OK; 396} 397 398/* 399 * Check the requested frame rate has been successfully configured or not. 400 * If the target frameRate is -1, check on the current frame rate value 401 * setting is performed. 402 * 403 * @param params CameraParameters to retrieve the information 404 * @param the target video frame rate to check against 405 * @return OK if no error. 406 */ 407status_t CameraSource::checkFrameRate( 408 const CameraParameters& params, 409 int32_t frameRate) { 410 411 int32_t frameRateActual = params.getPreviewFrameRate(); 412 if (frameRateActual < 0) { 413 LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual); 414 return UNKNOWN_ERROR; 415 } 416 417 // Check the actual video frame rate against the target/requested 418 // video frame rate. 419 if (frameRate != -1 && (frameRateActual - frameRate) != 0) { 420 LOGE("Failed to set preview frame rate to %d fps. The actual " 421 "frame rate is %d", frameRate, frameRateActual); 422 return UNKNOWN_ERROR; 423 } 424 425 // Good now. 426 mVideoFrameRate = frameRateActual; 427 return OK; 428} 429 430/* 431 * Initialize the CameraSource to so that it becomes 432 * ready for providing the video input streams as requested. 433 * @param camera the camera object used for the video source 434 * @param cameraId if camera == 0, use camera with this id 435 * as the video source 436 * @param videoSize the target video frame size. If both 437 * width and height in videoSize is -1, use the current 438 * width and heigth settings by the camera 439 * @param frameRate the target frame rate in frames per second. 440 * if it is -1, use the current camera frame rate setting. 441 * @param storeMetaDataInVideoBuffers request to store meta 442 * data or real YUV data in video buffers. Request to 443 * store meta data in video buffers may not be honored 444 * if the source does not support this feature. 445 * 446 * @return OK if no error. 447 */ 448status_t CameraSource::init( 449 const sp<ICamera>& camera, 450 int32_t cameraId, 451 Size videoSize, 452 int32_t frameRate, 453 bool storeMetaDataInVideoBuffers) { 454 455 status_t err = OK; 456 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 457 458 if ((err = isCameraAvailable(camera, cameraId)) != OK) { 459 return err; 460 } 461 CameraParameters params(mCamera->getParameters()); 462 if ((err = isCameraColorFormatSupported(params)) != OK) { 463 return err; 464 } 465 466 // Set the camera to use the requested video frame size 467 // and/or frame rate. 468 if ((err = configureCamera(¶ms, 469 videoSize.width, videoSize.height, 470 frameRate))) { 471 return err; 472 } 473 474 // Check on video frame size and frame rate. 475 CameraParameters newCameraParams(mCamera->getParameters()); 476 if ((err = checkVideoSize(newCameraParams, 477 videoSize.width, videoSize.height)) != OK) { 478 return err; 479 } 480 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) { 481 return err; 482 } 483 484 // This CHECK is good, since we just passed the lock/unlock 485 // check earlier by calling mCamera->setParameters(). 486 CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface)); 487 488 // By default, do not store metadata in video buffers 489 mIsMetaDataStoredInVideoBuffers = false; 490 mCamera->storeMetaDataInBuffers(false); 491 if (storeMetaDataInVideoBuffers) { 492 if (OK == mCamera->storeMetaDataInBuffers(true)) { 493 mIsMetaDataStoredInVideoBuffers = true; 494 } 495 } 496 497 IPCThreadState::self()->restoreCallingIdentity(token); 498 499 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate); 500 if (glitchDurationUs > mGlitchDurationThresholdUs) { 501 mGlitchDurationThresholdUs = glitchDurationUs; 502 } 503 504 // XXX: query camera for the stride and slice height 505 // when the capability becomes available. 506 mMeta = new MetaData; 507 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 508 mMeta->setInt32(kKeyColorFormat, mColorFormat); 509 mMeta->setInt32(kKeyWidth, mVideoSize.width); 510 mMeta->setInt32(kKeyHeight, mVideoSize.height); 511 mMeta->setInt32(kKeyStride, mVideoSize.width); 512 mMeta->setInt32(kKeySliceHeight, mVideoSize.height); 513 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate); 514 return OK; 515} 516 517CameraSource::~CameraSource() { 518 if (mStarted) { 519 stop(); 520 } 521} 522 523void CameraSource::startCameraRecording() { 524 CHECK_EQ(OK, mCamera->startRecording()); 525 CHECK(mCamera->recordingEnabled()); 526} 527 528status_t CameraSource::start(MetaData *meta) { 529 CHECK(!mStarted); 530 if (mInitCheck != OK) { 531 LOGE("CameraSource is not initialized yet"); 532 return mInitCheck; 533 } 534 535 char value[PROPERTY_VALUE_MAX]; 536 if (property_get("media.stagefright.record-stats", value, NULL) 537 && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { 538 mCollectStats = true; 539 } 540 541 mStartTimeUs = 0; 542 int64_t startTimeUs; 543 if (meta && meta->findInt64(kKeyTime, &startTimeUs)) { 544 mStartTimeUs = startTimeUs; 545 } 546 547 // Call setListener first before calling startCameraRecording() 548 // to avoid recording frames being dropped. 549 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 550 mCamera->setListener(new CameraSourceListener(this)); 551 startCameraRecording(); 552 IPCThreadState::self()->restoreCallingIdentity(token); 553 554 mStarted = true; 555 return OK; 556} 557 558void CameraSource::stopCameraRecording() { 559 mCamera->setListener(NULL); 560 mCamera->stopRecording(); 561} 562 563void CameraSource::releaseCamera() { 564 LOGV("releaseCamera"); 565 if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) { 566 LOGV("Camera was cold when we started, stopping preview"); 567 mCamera->stopPreview(); 568 } 569 mCamera->unlock(); 570 mCamera.clear(); 571 mCamera = 0; 572 mCameraFlags = 0; 573} 574 575status_t CameraSource::stop() { 576 LOGD("stop: E"); 577 Mutex::Autolock autoLock(mLock); 578 mStarted = false; 579 mFrameAvailableCondition.signal(); 580 581 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 582 releaseQueuedFrames(); 583 while (!mFramesBeingEncoded.empty()) { 584 if (NO_ERROR != 585 mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) { 586 LOGW("Timed out waiting for outstanding frames being encoded: %d", 587 mFramesBeingEncoded.size()); 588 } 589 } 590 stopCameraRecording(); 591 releaseCamera(); 592 IPCThreadState::self()->restoreCallingIdentity(token); 593 594 if (mCollectStats) { 595 LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", 596 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 597 mLastFrameTimestampUs - mFirstFrameTimeUs); 598 } 599 600 if (mNumGlitches > 0) { 601 LOGW("%d long delays between neighboring video frames during", 602 mNumGlitches); 603 } 604 605 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 606 LOGD("stop: X"); 607 return OK; 608} 609 610void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { 611 if (mCamera != NULL) { 612 mCamera->releaseRecordingFrame(frame); 613 } 614} 615 616void CameraSource::releaseQueuedFrames() { 617 List<sp<IMemory> >::iterator it; 618 while (!mFramesReceived.empty()) { 619 it = mFramesReceived.begin(); 620 releaseRecordingFrame(*it); 621 mFramesReceived.erase(it); 622 ++mNumFramesDropped; 623 } 624} 625 626sp<MetaData> CameraSource::getFormat() { 627 return mMeta; 628} 629 630void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { 631 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 632 releaseRecordingFrame(frame); 633 IPCThreadState::self()->restoreCallingIdentity(token); 634} 635 636void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 637 LOGV("signalBufferReturned: %p", buffer->data()); 638 Mutex::Autolock autoLock(mLock); 639 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 640 it != mFramesBeingEncoded.end(); ++it) { 641 if ((*it)->pointer() == buffer->data()) { 642 releaseOneRecordingFrame((*it)); 643 mFramesBeingEncoded.erase(it); 644 ++mNumFramesEncoded; 645 buffer->setObserver(0); 646 buffer->release(); 647 mFrameCompleteCondition.signal(); 648 return; 649 } 650 } 651 CHECK_EQ(0, "signalBufferReturned: bogus buffer"); 652} 653 654status_t CameraSource::read( 655 MediaBuffer **buffer, const ReadOptions *options) { 656 LOGV("read"); 657 658 *buffer = NULL; 659 660 int64_t seekTimeUs; 661 ReadOptions::SeekMode mode; 662 if (options && options->getSeekTo(&seekTimeUs, &mode)) { 663 return ERROR_UNSUPPORTED; 664 } 665 666 sp<IMemory> frame; 667 int64_t frameTime; 668 669 { 670 Mutex::Autolock autoLock(mLock); 671 while (mStarted && mFramesReceived.empty()) { 672 if (NO_ERROR != 673 mFrameAvailableCondition.waitRelative(mLock, 3000000000LL)) { 674 LOGW("Timed out waiting for incoming camera video frames: %lld us", 675 mLastFrameTimestampUs); 676 } 677 } 678 if (!mStarted) { 679 return OK; 680 } 681 frame = *mFramesReceived.begin(); 682 mFramesReceived.erase(mFramesReceived.begin()); 683 684 frameTime = *mFrameTimes.begin(); 685 mFrameTimes.erase(mFrameTimes.begin()); 686 mFramesBeingEncoded.push_back(frame); 687 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 688 (*buffer)->setObserver(this); 689 (*buffer)->add_ref(); 690 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 691 } 692 return OK; 693} 694 695void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 696 int32_t msgType, const sp<IMemory> &data) { 697 LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 698 Mutex::Autolock autoLock(mLock); 699 if (!mStarted) { 700 releaseOneRecordingFrame(data); 701 ++mNumFramesReceived; 702 ++mNumFramesDropped; 703 return; 704 } 705 706 if (mNumFramesReceived > 0 && 707 timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { 708 if (mNumGlitches % 10 == 0) { // Don't spam the log 709 LOGV("Long delay detected in video recording"); 710 } 711 ++mNumGlitches; 712 } 713 714 // May need to skip frame or modify timestamp. Currently implemented 715 // by the subclass CameraSourceTimeLapse. 716 if (skipCurrentFrame(timestampUs)) { 717 releaseOneRecordingFrame(data); 718 return; 719 } 720 721 mLastFrameTimestampUs = timestampUs; 722 if (mNumFramesReceived == 0) { 723 mFirstFrameTimeUs = timestampUs; 724 // Initial delay 725 if (mStartTimeUs > 0) { 726 if (timestampUs < mStartTimeUs) { 727 // Frame was captured before recording was started 728 // Drop it without updating the statistical data. 729 releaseOneRecordingFrame(data); 730 return; 731 } 732 mStartTimeUs = timestampUs - mStartTimeUs; 733 } 734 } 735 ++mNumFramesReceived; 736 737 mFramesReceived.push_back(data); 738 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 739 mFrameTimes.push_back(timeUs); 740 LOGV("initial delay: %lld, current time stamp: %lld", 741 mStartTimeUs, timeUs); 742 mFrameAvailableCondition.signal(); 743} 744 745size_t CameraSource::getNumberOfVideoBuffers() const { 746 LOGV("getNumberOfVideoBuffers"); 747 size_t nBuffers = 0; 748 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 749 if (mInitCheck == OK && mCamera != 0) { 750 nBuffers = mCamera->getNumberOfVideoBuffers(); 751 } 752 IPCThreadState::self()->restoreCallingIdentity(token); 753 return nBuffers; 754} 755 756sp<IMemory> CameraSource::getVideoBuffer(size_t index) const { 757 LOGV("getVideoBuffer: %d", index); 758 sp<IMemory> buffer = 0; 759 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 760 if (mInitCheck == OK && mCamera != 0) { 761 buffer = mCamera->getVideoBuffer(index); 762 } 763 IPCThreadState::self()->restoreCallingIdentity(token); 764 return buffer; 765} 766 767bool CameraSource::isMetaDataStoredInVideoBuffers() const { 768 LOGV("isMetaDataStoredInVideoBuffers"); 769 return mIsMetaDataStoredInVideoBuffers; 770} 771 772} // namespace android 773