CameraSource.cpp revision df712ea86e6350f7005a02ab0e1c60c28a343ed0
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22#include <binder/IPCThreadState.h> 23#include <media/stagefright/foundation/ADebug.h> 24#include <media/stagefright/CameraSource.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <gui/Surface.h> 31#include <utils/String8.h> 32#include <cutils/properties.h> 33 34namespace android { 35 36static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL; 37 38struct CameraSourceListener : public CameraListener { 39 CameraSourceListener(const sp<CameraSource> &source); 40 41 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 42 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr, 43 camera_frame_metadata_t *metadata); 44 45 virtual void postDataTimestamp( 46 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 47 48protected: 49 virtual ~CameraSourceListener(); 50 51private: 52 wp<CameraSource> mSource; 53 54 CameraSourceListener(const CameraSourceListener &); 55 CameraSourceListener &operator=(const CameraSourceListener &); 56}; 57 58CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 59 : mSource(source) { 60} 61 62CameraSourceListener::~CameraSourceListener() { 63} 64 65void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 66 ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 67} 68 69void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr, 70 camera_frame_metadata_t *metadata) { 71 ALOGV("postData(%d, ptr:%p, size:%d)", 72 msgType, dataPtr->pointer(), dataPtr->size()); 73 74 sp<CameraSource> source = mSource.promote(); 75 if (source.get() != NULL) { 76 source->dataCallback(msgType, dataPtr); 77 } 78} 79 80void CameraSourceListener::postDataTimestamp( 81 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 82 83 sp<CameraSource> source = mSource.promote(); 84 if (source.get() != NULL) { 85 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 86 } 87} 88 89static int32_t getColorFormat(const char* colorFormat) { 90 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) { 91 return OMX_COLOR_FormatYUV420Planar; 92 } 93 94 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 95 return OMX_COLOR_FormatYUV422SemiPlanar; 96 } 97 98 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 99 return OMX_COLOR_FormatYUV420SemiPlanar; 100 } 101 102 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 103 return OMX_COLOR_FormatYCbYCr; 104 } 105 106 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 107 return OMX_COLOR_Format16bitRGB565; 108 } 109 110 if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) { 111 return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar; 112 } 113 114 ALOGE("Uknown color format (%s), please add it to " 115 "CameraSource::getColorFormat", colorFormat); 116 117 CHECK(!"Unknown color format"); 118} 119 120CameraSource *CameraSource::Create() { 121 Size size; 122 size.width = -1; 123 size.height = -1; 124 125 sp<ICamera> camera; 126 return new CameraSource(camera, NULL, 0, size, -1, NULL, false); 127} 128 129// static 130CameraSource *CameraSource::CreateFromCamera( 131 const sp<ICamera>& camera, 132 const sp<ICameraRecordingProxy>& proxy, 133 int32_t cameraId, 134 Size videoSize, 135 int32_t frameRate, 136 const sp<Surface>& surface, 137 bool storeMetaDataInVideoBuffers) { 138 139 CameraSource *source = new CameraSource(camera, proxy, cameraId, 140 videoSize, frameRate, surface, 141 storeMetaDataInVideoBuffers); 142 return source; 143} 144 145CameraSource::CameraSource( 146 const sp<ICamera>& camera, 147 const sp<ICameraRecordingProxy>& proxy, 148 int32_t cameraId, 149 Size videoSize, 150 int32_t frameRate, 151 const sp<Surface>& surface, 152 bool storeMetaDataInVideoBuffers) 153 : mCameraFlags(0), 154 mVideoFrameRate(-1), 155 mCamera(0), 156 mSurface(surface), 157 mNumFramesReceived(0), 158 mLastFrameTimestampUs(0), 159 mStarted(false), 160 mNumFramesEncoded(0), 161 mTimeBetweenFrameCaptureUs(0), 162 mFirstFrameTimeUs(0), 163 mNumFramesDropped(0), 164 mNumGlitches(0), 165 mGlitchDurationThresholdUs(200000), 166 mCollectStats(false) { 167 mVideoSize.width = -1; 168 mVideoSize.height = -1; 169 170 mInitCheck = init(camera, proxy, cameraId, 171 videoSize, frameRate, 172 storeMetaDataInVideoBuffers); 173 if (mInitCheck != OK) releaseCamera(); 174} 175 176status_t CameraSource::initCheck() const { 177 return mInitCheck; 178} 179 180status_t CameraSource::isCameraAvailable( 181 const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, 182 int32_t cameraId) { 183 184 if (camera == 0) { 185 mCamera = Camera::connect(cameraId); 186 if (mCamera == 0) return -EBUSY; 187 mCameraFlags &= ~FLAGS_HOT_CAMERA; 188 } else { 189 // We get the proxy from Camera, not ICamera. We need to get the proxy 190 // to the remote Camera owned by the application. Here mCamera is a 191 // local Camera object created by us. We cannot use the proxy from 192 // mCamera here. 193 mCamera = Camera::create(camera); 194 if (mCamera == 0) return -EBUSY; 195 mCameraRecordingProxy = proxy; 196 mCameraFlags |= FLAGS_HOT_CAMERA; 197 mDeathNotifier = new DeathNotifier(); 198 // isBinderAlive needs linkToDeath to work. 199 mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier); 200 } 201 202 mCamera->lock(); 203 204 return OK; 205} 206 207 208/* 209 * Check to see whether the requested video width and height is one 210 * of the supported sizes. 211 * @param width the video frame width in pixels 212 * @param height the video frame height in pixels 213 * @param suppportedSizes the vector of sizes that we check against 214 * @return true if the dimension (width and height) is supported. 215 */ 216static bool isVideoSizeSupported( 217 int32_t width, int32_t height, 218 const Vector<Size>& supportedSizes) { 219 220 ALOGV("isVideoSizeSupported"); 221 for (size_t i = 0; i < supportedSizes.size(); ++i) { 222 if (width == supportedSizes[i].width && 223 height == supportedSizes[i].height) { 224 return true; 225 } 226 } 227 return false; 228} 229 230/* 231 * If the preview and video output is separate, we only set the 232 * the video size, and applications should set the preview size 233 * to some proper value, and the recording framework will not 234 * change the preview size; otherwise, if the video and preview 235 * output is the same, we need to set the preview to be the same 236 * as the requested video size. 237 * 238 */ 239/* 240 * Query the camera to retrieve the supported video frame sizes 241 * and also to see whether CameraParameters::setVideoSize() 242 * is supported or not. 243 * @param params CameraParameters to retrieve the information 244 * @@param isSetVideoSizeSupported retunrs whether method 245 * CameraParameters::setVideoSize() is supported or not. 246 * @param sizes returns the vector of Size objects for the 247 * supported video frame sizes advertised by the camera. 248 */ 249static void getSupportedVideoSizes( 250 const CameraParameters& params, 251 bool *isSetVideoSizeSupported, 252 Vector<Size>& sizes) { 253 254 *isSetVideoSizeSupported = true; 255 params.getSupportedVideoSizes(sizes); 256 if (sizes.size() == 0) { 257 ALOGD("Camera does not support setVideoSize()"); 258 params.getSupportedPreviewSizes(sizes); 259 *isSetVideoSizeSupported = false; 260 } 261} 262 263/* 264 * Check whether the camera has the supported color format 265 * @param params CameraParameters to retrieve the information 266 * @return OK if no error. 267 */ 268status_t CameraSource::isCameraColorFormatSupported( 269 const CameraParameters& params) { 270 mColorFormat = getColorFormat(params.get( 271 CameraParameters::KEY_VIDEO_FRAME_FORMAT)); 272 if (mColorFormat == -1) { 273 return BAD_VALUE; 274 } 275 return OK; 276} 277 278/* 279 * Configure the camera to use the requested video size 280 * (width and height) and/or frame rate. If both width and 281 * height are -1, configuration on the video size is skipped. 282 * if frameRate is -1, configuration on the frame rate 283 * is skipped. Skipping the configuration allows one to 284 * use the current camera setting without the need to 285 * actually know the specific values (see Create() method). 286 * 287 * @param params the CameraParameters to be configured 288 * @param width the target video frame width in pixels 289 * @param height the target video frame height in pixels 290 * @param frameRate the target frame rate in frames per second. 291 * @return OK if no error. 292 */ 293status_t CameraSource::configureCamera( 294 CameraParameters* params, 295 int32_t width, int32_t height, 296 int32_t frameRate) { 297 ALOGV("configureCamera"); 298 Vector<Size> sizes; 299 bool isSetVideoSizeSupportedByCamera = true; 300 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes); 301 bool isCameraParamChanged = false; 302 if (width != -1 && height != -1) { 303 if (!isVideoSizeSupported(width, height, sizes)) { 304 ALOGE("Video dimension (%dx%d) is unsupported", width, height); 305 return BAD_VALUE; 306 } 307 if (isSetVideoSizeSupportedByCamera) { 308 params->setVideoSize(width, height); 309 } else { 310 params->setPreviewSize(width, height); 311 } 312 isCameraParamChanged = true; 313 } else if ((width == -1 && height != -1) || 314 (width != -1 && height == -1)) { 315 // If one and only one of the width and height is -1 316 // we reject such a request. 317 ALOGE("Requested video size (%dx%d) is not supported", width, height); 318 return BAD_VALUE; 319 } else { // width == -1 && height == -1 320 // Do not configure the camera. 321 // Use the current width and height value setting from the camera. 322 } 323 324 if (frameRate != -1) { 325 CHECK(frameRate > 0 && frameRate <= 120); 326 const char* supportedFrameRates = 327 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); 328 CHECK(supportedFrameRates != NULL); 329 ALOGV("Supported frame rates: %s", supportedFrameRates); 330 char buf[4]; 331 snprintf(buf, 4, "%d", frameRate); 332 if (strstr(supportedFrameRates, buf) == NULL) { 333 ALOGE("Requested frame rate (%d) is not supported: %s", 334 frameRate, supportedFrameRates); 335 return BAD_VALUE; 336 } 337 338 // The frame rate is supported, set the camera to the requested value. 339 params->setPreviewFrameRate(frameRate); 340 isCameraParamChanged = true; 341 } else { // frameRate == -1 342 // Do not configure the camera. 343 // Use the current frame rate value setting from the camera 344 } 345 346 if (isCameraParamChanged) { 347 // Either frame rate or frame size needs to be changed. 348 String8 s = params->flatten(); 349 if (OK != mCamera->setParameters(s)) { 350 ALOGE("Could not change settings." 351 " Someone else is using camera %p?", mCamera.get()); 352 return -EBUSY; 353 } 354 } 355 return OK; 356} 357 358/* 359 * Check whether the requested video frame size 360 * has been successfully configured or not. If both width and height 361 * are -1, check on the current width and height value setting 362 * is performed. 363 * 364 * @param params CameraParameters to retrieve the information 365 * @param the target video frame width in pixels to check against 366 * @param the target video frame height in pixels to check against 367 * @return OK if no error 368 */ 369status_t CameraSource::checkVideoSize( 370 const CameraParameters& params, 371 int32_t width, int32_t height) { 372 373 ALOGV("checkVideoSize"); 374 // The actual video size is the same as the preview size 375 // if the camera hal does not support separate video and 376 // preview output. In this case, we retrieve the video 377 // size from preview. 378 int32_t frameWidthActual = -1; 379 int32_t frameHeightActual = -1; 380 Vector<Size> sizes; 381 params.getSupportedVideoSizes(sizes); 382 if (sizes.size() == 0) { 383 // video size is the same as preview size 384 params.getPreviewSize(&frameWidthActual, &frameHeightActual); 385 } else { 386 // video size may not be the same as preview 387 params.getVideoSize(&frameWidthActual, &frameHeightActual); 388 } 389 if (frameWidthActual < 0 || frameHeightActual < 0) { 390 ALOGE("Failed to retrieve video frame size (%dx%d)", 391 frameWidthActual, frameHeightActual); 392 return UNKNOWN_ERROR; 393 } 394 395 // Check the actual video frame size against the target/requested 396 // video frame size. 397 if (width != -1 && height != -1) { 398 if (frameWidthActual != width || frameHeightActual != height) { 399 ALOGE("Failed to set video frame size to %dx%d. " 400 "The actual video size is %dx%d ", width, height, 401 frameWidthActual, frameHeightActual); 402 return UNKNOWN_ERROR; 403 } 404 } 405 406 // Good now. 407 mVideoSize.width = frameWidthActual; 408 mVideoSize.height = frameHeightActual; 409 return OK; 410} 411 412/* 413 * Check the requested frame rate has been successfully configured or not. 414 * If the target frameRate is -1, check on the current frame rate value 415 * setting is performed. 416 * 417 * @param params CameraParameters to retrieve the information 418 * @param the target video frame rate to check against 419 * @return OK if no error. 420 */ 421status_t CameraSource::checkFrameRate( 422 const CameraParameters& params, 423 int32_t frameRate) { 424 425 ALOGV("checkFrameRate"); 426 int32_t frameRateActual = params.getPreviewFrameRate(); 427 if (frameRateActual < 0) { 428 ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual); 429 return UNKNOWN_ERROR; 430 } 431 432 // Check the actual video frame rate against the target/requested 433 // video frame rate. 434 if (frameRate != -1 && (frameRateActual - frameRate) != 0) { 435 ALOGE("Failed to set preview frame rate to %d fps. The actual " 436 "frame rate is %d", frameRate, frameRateActual); 437 return UNKNOWN_ERROR; 438 } 439 440 // Good now. 441 mVideoFrameRate = frameRateActual; 442 return OK; 443} 444 445/* 446 * Initialize the CameraSource to so that it becomes 447 * ready for providing the video input streams as requested. 448 * @param camera the camera object used for the video source 449 * @param cameraId if camera == 0, use camera with this id 450 * as the video source 451 * @param videoSize the target video frame size. If both 452 * width and height in videoSize is -1, use the current 453 * width and heigth settings by the camera 454 * @param frameRate the target frame rate in frames per second. 455 * if it is -1, use the current camera frame rate setting. 456 * @param storeMetaDataInVideoBuffers request to store meta 457 * data or real YUV data in video buffers. Request to 458 * store meta data in video buffers may not be honored 459 * if the source does not support this feature. 460 * 461 * @return OK if no error. 462 */ 463status_t CameraSource::init( 464 const sp<ICamera>& camera, 465 const sp<ICameraRecordingProxy>& proxy, 466 int32_t cameraId, 467 Size videoSize, 468 int32_t frameRate, 469 bool storeMetaDataInVideoBuffers) { 470 471 ALOGV("init"); 472 status_t err = OK; 473 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 474 err = initWithCameraAccess(camera, proxy, cameraId, 475 videoSize, frameRate, 476 storeMetaDataInVideoBuffers); 477 IPCThreadState::self()->restoreCallingIdentity(token); 478 return err; 479} 480 481status_t CameraSource::initWithCameraAccess( 482 const sp<ICamera>& camera, 483 const sp<ICameraRecordingProxy>& proxy, 484 int32_t cameraId, 485 Size videoSize, 486 int32_t frameRate, 487 bool storeMetaDataInVideoBuffers) { 488 ALOGV("initWithCameraAccess"); 489 status_t err = OK; 490 491 if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) { 492 ALOGE("Camera connection could not be established."); 493 return err; 494 } 495 CameraParameters params(mCamera->getParameters()); 496 if ((err = isCameraColorFormatSupported(params)) != OK) { 497 return err; 498 } 499 500 // Set the camera to use the requested video frame size 501 // and/or frame rate. 502 if ((err = configureCamera(¶ms, 503 videoSize.width, videoSize.height, 504 frameRate))) { 505 return err; 506 } 507 508 // Check on video frame size and frame rate. 509 CameraParameters newCameraParams(mCamera->getParameters()); 510 if ((err = checkVideoSize(newCameraParams, 511 videoSize.width, videoSize.height)) != OK) { 512 return err; 513 } 514 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) { 515 return err; 516 } 517 518 // This CHECK is good, since we just passed the lock/unlock 519 // check earlier by calling mCamera->setParameters(). 520 CHECK_EQ((status_t)OK, mCamera->setPreviewDisplay(mSurface)); 521 522 // By default, do not store metadata in video buffers 523 mIsMetaDataStoredInVideoBuffers = false; 524 mCamera->storeMetaDataInBuffers(false); 525 if (storeMetaDataInVideoBuffers) { 526 if (OK == mCamera->storeMetaDataInBuffers(true)) { 527 mIsMetaDataStoredInVideoBuffers = true; 528 } 529 } 530 531 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate); 532 if (glitchDurationUs > mGlitchDurationThresholdUs) { 533 mGlitchDurationThresholdUs = glitchDurationUs; 534 } 535 536 // XXX: query camera for the stride and slice height 537 // when the capability becomes available. 538 mMeta = new MetaData; 539 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 540 mMeta->setInt32(kKeyColorFormat, mColorFormat); 541 mMeta->setInt32(kKeyWidth, mVideoSize.width); 542 mMeta->setInt32(kKeyHeight, mVideoSize.height); 543 mMeta->setInt32(kKeyStride, mVideoSize.width); 544 mMeta->setInt32(kKeySliceHeight, mVideoSize.height); 545 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate); 546 return OK; 547} 548 549CameraSource::~CameraSource() { 550 if (mStarted) { 551 reset(); 552 } else if (mInitCheck == OK) { 553 // Camera is initialized but because start() is never called, 554 // the lock on Camera is never released(). This makes sure 555 // Camera's lock is released in this case. 556 releaseCamera(); 557 } 558} 559 560void CameraSource::startCameraRecording() { 561 ALOGV("startCameraRecording"); 562 // Reset the identity to the current thread because media server owns the 563 // camera and recording is started by the applications. The applications 564 // will connect to the camera in ICameraRecordingProxy::startRecording. 565 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 566 if (mCameraFlags & FLAGS_HOT_CAMERA) { 567 mCamera->unlock(); 568 mCamera.clear(); 569 CHECK_EQ((status_t)OK, 570 mCameraRecordingProxy->startRecording(new ProxyListener(this))); 571 } else { 572 mCamera->setListener(new CameraSourceListener(this)); 573 mCamera->startRecording(); 574 CHECK(mCamera->recordingEnabled()); 575 } 576 IPCThreadState::self()->restoreCallingIdentity(token); 577} 578 579status_t CameraSource::start(MetaData *meta) { 580 ALOGV("start"); 581 CHECK(!mStarted); 582 if (mInitCheck != OK) { 583 ALOGE("CameraSource is not initialized yet"); 584 return mInitCheck; 585 } 586 587 char value[PROPERTY_VALUE_MAX]; 588 if (property_get("media.stagefright.record-stats", value, NULL) 589 && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { 590 mCollectStats = true; 591 } 592 593 mStartTimeUs = 0; 594 int64_t startTimeUs; 595 if (meta && meta->findInt64(kKeyTime, &startTimeUs)) { 596 mStartTimeUs = startTimeUs; 597 } 598 599 startCameraRecording(); 600 601 mStarted = true; 602 return OK; 603} 604 605void CameraSource::stopCameraRecording() { 606 ALOGV("stopCameraRecording"); 607 if (mCameraFlags & FLAGS_HOT_CAMERA) { 608 mCameraRecordingProxy->stopRecording(); 609 } else { 610 mCamera->setListener(NULL); 611 mCamera->stopRecording(); 612 } 613} 614 615void CameraSource::releaseCamera() { 616 ALOGV("releaseCamera"); 617 if (mCamera != 0) { 618 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 619 if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) { 620 ALOGV("Camera was cold when we started, stopping preview"); 621 mCamera->stopPreview(); 622 mCamera->disconnect(); 623 } 624 mCamera->unlock(); 625 mCamera.clear(); 626 mCamera = 0; 627 IPCThreadState::self()->restoreCallingIdentity(token); 628 } 629 if (mCameraRecordingProxy != 0) { 630 mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier); 631 mCameraRecordingProxy.clear(); 632 } 633 mCameraFlags = 0; 634} 635 636status_t CameraSource::reset() { 637 ALOGD("reset: E"); 638 Mutex::Autolock autoLock(mLock); 639 mStarted = false; 640 mFrameAvailableCondition.signal(); 641 642 int64_t token; 643 bool isTokenValid = false; 644 if (mCamera != 0) { 645 token = IPCThreadState::self()->clearCallingIdentity(); 646 isTokenValid = true; 647 } 648 releaseQueuedFrames(); 649 while (!mFramesBeingEncoded.empty()) { 650 if (NO_ERROR != 651 mFrameCompleteCondition.waitRelative(mLock, 652 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) { 653 ALOGW("Timed out waiting for outstanding frames being encoded: %d", 654 mFramesBeingEncoded.size()); 655 } 656 } 657 stopCameraRecording(); 658 releaseCamera(); 659 if (isTokenValid) { 660 IPCThreadState::self()->restoreCallingIdentity(token); 661 } 662 663 if (mCollectStats) { 664 ALOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us", 665 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 666 mLastFrameTimestampUs - mFirstFrameTimeUs); 667 } 668 669 if (mNumGlitches > 0) { 670 ALOGW("%d long delays between neighboring video frames", mNumGlitches); 671 } 672 673 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 674 ALOGD("reset: X"); 675 return OK; 676} 677 678void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { 679 ALOGV("releaseRecordingFrame"); 680 if (mCameraRecordingProxy != NULL) { 681 mCameraRecordingProxy->releaseRecordingFrame(frame); 682 } else if (mCamera != NULL) { 683 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 684 mCamera->releaseRecordingFrame(frame); 685 IPCThreadState::self()->restoreCallingIdentity(token); 686 } 687} 688 689void CameraSource::releaseQueuedFrames() { 690 List<sp<IMemory> >::iterator it; 691 while (!mFramesReceived.empty()) { 692 it = mFramesReceived.begin(); 693 releaseRecordingFrame(*it); 694 mFramesReceived.erase(it); 695 ++mNumFramesDropped; 696 } 697} 698 699sp<MetaData> CameraSource::getFormat() { 700 return mMeta; 701} 702 703void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { 704 releaseRecordingFrame(frame); 705} 706 707void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 708 ALOGV("signalBufferReturned: %p", buffer->data()); 709 Mutex::Autolock autoLock(mLock); 710 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 711 it != mFramesBeingEncoded.end(); ++it) { 712 if ((*it)->pointer() == buffer->data()) { 713 releaseOneRecordingFrame((*it)); 714 mFramesBeingEncoded.erase(it); 715 ++mNumFramesEncoded; 716 buffer->setObserver(0); 717 buffer->release(); 718 mFrameCompleteCondition.signal(); 719 return; 720 } 721 } 722 CHECK(!"signalBufferReturned: bogus buffer"); 723} 724 725status_t CameraSource::read( 726 MediaBuffer **buffer, const ReadOptions *options) { 727 ALOGV("read"); 728 729 *buffer = NULL; 730 731 int64_t seekTimeUs; 732 ReadOptions::SeekMode mode; 733 if (options && options->getSeekTo(&seekTimeUs, &mode)) { 734 return ERROR_UNSUPPORTED; 735 } 736 737 sp<IMemory> frame; 738 int64_t frameTime; 739 740 { 741 Mutex::Autolock autoLock(mLock); 742 while (mStarted && mFramesReceived.empty()) { 743 if (NO_ERROR != 744 mFrameAvailableCondition.waitRelative(mLock, 745 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) { 746 if (mCameraRecordingProxy != 0 && 747 !mCameraRecordingProxy->asBinder()->isBinderAlive()) { 748 ALOGW("camera recording proxy is gone"); 749 return ERROR_END_OF_STREAM; 750 } 751 ALOGW("Timed out waiting for incoming camera video frames: %lld us", 752 mLastFrameTimestampUs); 753 } 754 } 755 if (!mStarted) { 756 return OK; 757 } 758 frame = *mFramesReceived.begin(); 759 mFramesReceived.erase(mFramesReceived.begin()); 760 761 frameTime = *mFrameTimes.begin(); 762 mFrameTimes.erase(mFrameTimes.begin()); 763 mFramesBeingEncoded.push_back(frame); 764 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 765 (*buffer)->setObserver(this); 766 (*buffer)->add_ref(); 767 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 768 } 769 return OK; 770} 771 772void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 773 int32_t msgType, const sp<IMemory> &data) { 774 ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 775 Mutex::Autolock autoLock(mLock); 776 if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { 777 ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs); 778 releaseOneRecordingFrame(data); 779 return; 780 } 781 782 if (mNumFramesReceived > 0) { 783 CHECK(timestampUs > mLastFrameTimestampUs); 784 if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { 785 ++mNumGlitches; 786 } 787 } 788 789 // May need to skip frame or modify timestamp. Currently implemented 790 // by the subclass CameraSourceTimeLapse. 791 if (skipCurrentFrame(timestampUs)) { 792 releaseOneRecordingFrame(data); 793 return; 794 } 795 796 mLastFrameTimestampUs = timestampUs; 797 if (mNumFramesReceived == 0) { 798 mFirstFrameTimeUs = timestampUs; 799 // Initial delay 800 if (mStartTimeUs > 0) { 801 if (timestampUs < mStartTimeUs) { 802 // Frame was captured before recording was started 803 // Drop it without updating the statistical data. 804 releaseOneRecordingFrame(data); 805 return; 806 } 807 mStartTimeUs = timestampUs - mStartTimeUs; 808 } 809 } 810 ++mNumFramesReceived; 811 812 CHECK(data != NULL && data->size() > 0); 813 mFramesReceived.push_back(data); 814 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 815 mFrameTimes.push_back(timeUs); 816 ALOGV("initial delay: %lld, current time stamp: %lld", 817 mStartTimeUs, timeUs); 818 mFrameAvailableCondition.signal(); 819} 820 821bool CameraSource::isMetaDataStoredInVideoBuffers() const { 822 ALOGV("isMetaDataStoredInVideoBuffers"); 823 return mIsMetaDataStoredInVideoBuffers; 824} 825 826CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) { 827 mSource = source; 828} 829 830void CameraSource::ProxyListener::dataCallbackTimestamp( 831 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 832 mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr); 833} 834 835void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) { 836 ALOGI("Camera recording proxy died"); 837} 838 839} // namespace android 840