CameraSource.cpp revision 98a668f6ea51e4d894d2ebb61a0e18287fb14008
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include <inttypes.h> 18 19//#define LOG_NDEBUG 0 20#define LOG_TAG "CameraSource" 21#include <utils/Log.h> 22 23#include <OMX_Component.h> 24#include <binder/IPCThreadState.h> 25#include <binder/MemoryBase.h> 26#include <binder/MemoryHeapBase.h> 27#include <media/hardware/HardwareAPI.h> 28#include <media/stagefright/foundation/ADebug.h> 29#include <media/stagefright/CameraSource.h> 30#include <media/stagefright/MediaDefs.h> 31#include <media/stagefright/MediaErrors.h> 32#include <media/stagefright/MetaData.h> 33#include <camera/Camera.h> 34#include <camera/CameraParameters.h> 35#include <gui/Surface.h> 36#include <utils/String8.h> 37#include <cutils/properties.h> 38 39#if LOG_NDEBUG 40#define UNUSED_UNLESS_VERBOSE(x) (void)(x) 41#else 42#define UNUSED_UNLESS_VERBOSE(x) 43#endif 44 45namespace android { 46 47static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL; 48 49struct CameraSourceListener : public CameraListener { 50 CameraSourceListener(const sp<CameraSource> &source); 51 52 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 53 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr, 54 camera_frame_metadata_t *metadata); 55 56 virtual void postDataTimestamp( 57 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 58 59protected: 60 virtual ~CameraSourceListener(); 61 62private: 63 wp<CameraSource> mSource; 64 65 CameraSourceListener(const CameraSourceListener &); 66 CameraSourceListener &operator=(const CameraSourceListener &); 67}; 68 69CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 70 : mSource(source) { 71} 72 73CameraSourceListener::~CameraSourceListener() { 74} 75 76void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 77 UNUSED_UNLESS_VERBOSE(msgType); 78 UNUSED_UNLESS_VERBOSE(ext1); 79 UNUSED_UNLESS_VERBOSE(ext2); 80 ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 81} 82 83void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr, 84 camera_frame_metadata_t * /* metadata */) { 85 ALOGV("postData(%d, ptr:%p, size:%zu)", 86 msgType, dataPtr->pointer(), dataPtr->size()); 87 88 sp<CameraSource> source = mSource.promote(); 89 if (source.get() != NULL) { 90 source->dataCallback(msgType, dataPtr); 91 } 92} 93 94void CameraSourceListener::postDataTimestamp( 95 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 96 97 sp<CameraSource> source = mSource.promote(); 98 if (source.get() != NULL) { 99 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 100 } 101} 102 103static int32_t getColorFormat(const char* colorFormat) { 104 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) { 105 return OMX_COLOR_FormatYUV420Planar; 106 } 107 108 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 109 return OMX_COLOR_FormatYUV422SemiPlanar; 110 } 111 112 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 113 return OMX_COLOR_FormatYUV420SemiPlanar; 114 } 115 116 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 117 return OMX_COLOR_FormatYCbYCr; 118 } 119 120 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 121 return OMX_COLOR_Format16bitRGB565; 122 } 123 124 if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) { 125 return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar; 126 } 127 128 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) { 129 return OMX_COLOR_FormatAndroidOpaque; 130 } 131 132 ALOGE("Uknown color format (%s), please add it to " 133 "CameraSource::getColorFormat", colorFormat); 134 135 CHECK(!"Unknown color format"); 136 return -1; 137} 138 139CameraSource *CameraSource::Create(const String16 &clientName) { 140 Size size; 141 size.width = -1; 142 size.height = -1; 143 144 sp<ICamera> camera; 145 return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID, 146 Camera::USE_CALLING_PID, size, -1, NULL, false); 147} 148 149// static 150CameraSource *CameraSource::CreateFromCamera( 151 const sp<ICamera>& camera, 152 const sp<ICameraRecordingProxy>& proxy, 153 int32_t cameraId, 154 const String16& clientName, 155 uid_t clientUid, 156 pid_t clientPid, 157 Size videoSize, 158 int32_t frameRate, 159 const sp<IGraphicBufferProducer>& surface, 160 bool storeMetaDataInVideoBuffers) { 161 162 CameraSource *source = new CameraSource(camera, proxy, cameraId, 163 clientName, clientUid, clientPid, videoSize, frameRate, surface, 164 storeMetaDataInVideoBuffers); 165 return source; 166} 167 168CameraSource::CameraSource( 169 const sp<ICamera>& camera, 170 const sp<ICameraRecordingProxy>& proxy, 171 int32_t cameraId, 172 const String16& clientName, 173 uid_t clientUid, 174 pid_t clientPid, 175 Size videoSize, 176 int32_t frameRate, 177 const sp<IGraphicBufferProducer>& surface, 178 bool storeMetaDataInVideoBuffers) 179 : mCameraFlags(0), 180 mNumInputBuffers(0), 181 mVideoFrameRate(-1), 182 mCamera(0), 183 mSurface(surface), 184 mNumFramesReceived(0), 185 mLastFrameTimestampUs(0), 186 mStarted(false), 187 mNumFramesEncoded(0), 188 mTimeBetweenFrameCaptureUs(0), 189 mFirstFrameTimeUs(0), 190 mNumFramesDropped(0), 191 mNumGlitches(0), 192 mGlitchDurationThresholdUs(200000), 193 mCollectStats(false) { 194 mVideoSize.width = -1; 195 mVideoSize.height = -1; 196 197 mInitCheck = init(camera, proxy, cameraId, 198 clientName, clientUid, clientPid, 199 videoSize, frameRate, 200 storeMetaDataInVideoBuffers); 201 if (mInitCheck != OK) releaseCamera(); 202} 203 204status_t CameraSource::initCheck() const { 205 return mInitCheck; 206} 207 208status_t CameraSource::isCameraAvailable( 209 const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, 210 int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) { 211 212 if (camera == 0) { 213 mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid); 214 if (mCamera == 0) return -EBUSY; 215 mCameraFlags &= ~FLAGS_HOT_CAMERA; 216 } else { 217 // We get the proxy from Camera, not ICamera. We need to get the proxy 218 // to the remote Camera owned by the application. Here mCamera is a 219 // local Camera object created by us. We cannot use the proxy from 220 // mCamera here. 221 mCamera = Camera::create(camera); 222 if (mCamera == 0) return -EBUSY; 223 mCameraRecordingProxy = proxy; 224 mCameraFlags |= FLAGS_HOT_CAMERA; 225 mDeathNotifier = new DeathNotifier(); 226 // isBinderAlive needs linkToDeath to work. 227 IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier); 228 } 229 230 mCamera->lock(); 231 232 return OK; 233} 234 235 236/* 237 * Check to see whether the requested video width and height is one 238 * of the supported sizes. 239 * @param width the video frame width in pixels 240 * @param height the video frame height in pixels 241 * @param suppportedSizes the vector of sizes that we check against 242 * @return true if the dimension (width and height) is supported. 243 */ 244static bool isVideoSizeSupported( 245 int32_t width, int32_t height, 246 const Vector<Size>& supportedSizes) { 247 248 ALOGV("isVideoSizeSupported"); 249 for (size_t i = 0; i < supportedSizes.size(); ++i) { 250 if (width == supportedSizes[i].width && 251 height == supportedSizes[i].height) { 252 return true; 253 } 254 } 255 return false; 256} 257 258/* 259 * If the preview and video output is separate, we only set the 260 * the video size, and applications should set the preview size 261 * to some proper value, and the recording framework will not 262 * change the preview size; otherwise, if the video and preview 263 * output is the same, we need to set the preview to be the same 264 * as the requested video size. 265 * 266 */ 267/* 268 * Query the camera to retrieve the supported video frame sizes 269 * and also to see whether CameraParameters::setVideoSize() 270 * is supported or not. 271 * @param params CameraParameters to retrieve the information 272 * @@param isSetVideoSizeSupported retunrs whether method 273 * CameraParameters::setVideoSize() is supported or not. 274 * @param sizes returns the vector of Size objects for the 275 * supported video frame sizes advertised by the camera. 276 */ 277static void getSupportedVideoSizes( 278 const CameraParameters& params, 279 bool *isSetVideoSizeSupported, 280 Vector<Size>& sizes) { 281 282 *isSetVideoSizeSupported = true; 283 params.getSupportedVideoSizes(sizes); 284 if (sizes.size() == 0) { 285 ALOGD("Camera does not support setVideoSize()"); 286 params.getSupportedPreviewSizes(sizes); 287 *isSetVideoSizeSupported = false; 288 } 289} 290 291/* 292 * Check whether the camera has the supported color format 293 * @param params CameraParameters to retrieve the information 294 * @return OK if no error. 295 */ 296status_t CameraSource::isCameraColorFormatSupported( 297 const CameraParameters& params) { 298 mColorFormat = getColorFormat(params.get( 299 CameraParameters::KEY_VIDEO_FRAME_FORMAT)); 300 if (mColorFormat == -1) { 301 return BAD_VALUE; 302 } 303 return OK; 304} 305 306/* 307 * Configure the camera to use the requested video size 308 * (width and height) and/or frame rate. If both width and 309 * height are -1, configuration on the video size is skipped. 310 * if frameRate is -1, configuration on the frame rate 311 * is skipped. Skipping the configuration allows one to 312 * use the current camera setting without the need to 313 * actually know the specific values (see Create() method). 314 * 315 * @param params the CameraParameters to be configured 316 * @param width the target video frame width in pixels 317 * @param height the target video frame height in pixels 318 * @param frameRate the target frame rate in frames per second. 319 * @return OK if no error. 320 */ 321status_t CameraSource::configureCamera( 322 CameraParameters* params, 323 int32_t width, int32_t height, 324 int32_t frameRate) { 325 ALOGV("configureCamera"); 326 Vector<Size> sizes; 327 bool isSetVideoSizeSupportedByCamera = true; 328 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes); 329 bool isCameraParamChanged = false; 330 if (width != -1 && height != -1) { 331 if (!isVideoSizeSupported(width, height, sizes)) { 332 ALOGE("Video dimension (%dx%d) is unsupported", width, height); 333 return BAD_VALUE; 334 } 335 if (isSetVideoSizeSupportedByCamera) { 336 params->setVideoSize(width, height); 337 } else { 338 params->setPreviewSize(width, height); 339 } 340 isCameraParamChanged = true; 341 } else if ((width == -1 && height != -1) || 342 (width != -1 && height == -1)) { 343 // If one and only one of the width and height is -1 344 // we reject such a request. 345 ALOGE("Requested video size (%dx%d) is not supported", width, height); 346 return BAD_VALUE; 347 } else { // width == -1 && height == -1 348 // Do not configure the camera. 349 // Use the current width and height value setting from the camera. 350 } 351 352 if (frameRate != -1) { 353 CHECK(frameRate > 0 && frameRate <= 120); 354 const char* supportedFrameRates = 355 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); 356 CHECK(supportedFrameRates != NULL); 357 ALOGV("Supported frame rates: %s", supportedFrameRates); 358 char buf[4]; 359 snprintf(buf, 4, "%d", frameRate); 360 if (strstr(supportedFrameRates, buf) == NULL) { 361 ALOGE("Requested frame rate (%d) is not supported: %s", 362 frameRate, supportedFrameRates); 363 return BAD_VALUE; 364 } 365 366 // The frame rate is supported, set the camera to the requested value. 367 params->setPreviewFrameRate(frameRate); 368 isCameraParamChanged = true; 369 } else { // frameRate == -1 370 // Do not configure the camera. 371 // Use the current frame rate value setting from the camera 372 } 373 374 if (isCameraParamChanged) { 375 // Either frame rate or frame size needs to be changed. 376 String8 s = params->flatten(); 377 if (OK != mCamera->setParameters(s)) { 378 ALOGE("Could not change settings." 379 " Someone else is using camera %p?", mCamera.get()); 380 return -EBUSY; 381 } 382 } 383 return OK; 384} 385 386/* 387 * Check whether the requested video frame size 388 * has been successfully configured or not. If both width and height 389 * are -1, check on the current width and height value setting 390 * is performed. 391 * 392 * @param params CameraParameters to retrieve the information 393 * @param the target video frame width in pixels to check against 394 * @param the target video frame height in pixels to check against 395 * @return OK if no error 396 */ 397status_t CameraSource::checkVideoSize( 398 const CameraParameters& params, 399 int32_t width, int32_t height) { 400 401 ALOGV("checkVideoSize"); 402 // The actual video size is the same as the preview size 403 // if the camera hal does not support separate video and 404 // preview output. In this case, we retrieve the video 405 // size from preview. 406 int32_t frameWidthActual = -1; 407 int32_t frameHeightActual = -1; 408 Vector<Size> sizes; 409 params.getSupportedVideoSizes(sizes); 410 if (sizes.size() == 0) { 411 // video size is the same as preview size 412 params.getPreviewSize(&frameWidthActual, &frameHeightActual); 413 } else { 414 // video size may not be the same as preview 415 params.getVideoSize(&frameWidthActual, &frameHeightActual); 416 } 417 if (frameWidthActual < 0 || frameHeightActual < 0) { 418 ALOGE("Failed to retrieve video frame size (%dx%d)", 419 frameWidthActual, frameHeightActual); 420 return UNKNOWN_ERROR; 421 } 422 423 // Check the actual video frame size against the target/requested 424 // video frame size. 425 if (width != -1 && height != -1) { 426 if (frameWidthActual != width || frameHeightActual != height) { 427 ALOGE("Failed to set video frame size to %dx%d. " 428 "The actual video size is %dx%d ", width, height, 429 frameWidthActual, frameHeightActual); 430 return UNKNOWN_ERROR; 431 } 432 } 433 434 // Good now. 435 mVideoSize.width = frameWidthActual; 436 mVideoSize.height = frameHeightActual; 437 return OK; 438} 439 440/* 441 * Check the requested frame rate has been successfully configured or not. 442 * If the target frameRate is -1, check on the current frame rate value 443 * setting is performed. 444 * 445 * @param params CameraParameters to retrieve the information 446 * @param the target video frame rate to check against 447 * @return OK if no error. 448 */ 449status_t CameraSource::checkFrameRate( 450 const CameraParameters& params, 451 int32_t frameRate) { 452 453 ALOGV("checkFrameRate"); 454 int32_t frameRateActual = params.getPreviewFrameRate(); 455 if (frameRateActual < 0) { 456 ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual); 457 return UNKNOWN_ERROR; 458 } 459 460 // Check the actual video frame rate against the target/requested 461 // video frame rate. 462 if (frameRate != -1 && (frameRateActual - frameRate) != 0) { 463 ALOGE("Failed to set preview frame rate to %d fps. The actual " 464 "frame rate is %d", frameRate, frameRateActual); 465 return UNKNOWN_ERROR; 466 } 467 468 // Good now. 469 mVideoFrameRate = frameRateActual; 470 return OK; 471} 472 473/* 474 * Initialize the CameraSource to so that it becomes 475 * ready for providing the video input streams as requested. 476 * @param camera the camera object used for the video source 477 * @param cameraId if camera == 0, use camera with this id 478 * as the video source 479 * @param videoSize the target video frame size. If both 480 * width and height in videoSize is -1, use the current 481 * width and heigth settings by the camera 482 * @param frameRate the target frame rate in frames per second. 483 * if it is -1, use the current camera frame rate setting. 484 * @param storeMetaDataInVideoBuffers request to store meta 485 * data or real YUV data in video buffers. Request to 486 * store meta data in video buffers may not be honored 487 * if the source does not support this feature. 488 * 489 * @return OK if no error. 490 */ 491status_t CameraSource::init( 492 const sp<ICamera>& camera, 493 const sp<ICameraRecordingProxy>& proxy, 494 int32_t cameraId, 495 const String16& clientName, 496 uid_t clientUid, 497 pid_t clientPid, 498 Size videoSize, 499 int32_t frameRate, 500 bool storeMetaDataInVideoBuffers) { 501 502 ALOGV("init"); 503 status_t err = OK; 504 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 505 err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid, 506 videoSize, frameRate, 507 storeMetaDataInVideoBuffers); 508 IPCThreadState::self()->restoreCallingIdentity(token); 509 return err; 510} 511 512status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height, 513 uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) { 514 ALOGV("initBufferQueue"); 515 516 if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) { 517 ALOGE("%s: Buffer queue already exists", __FUNCTION__); 518 return ALREADY_EXISTS; 519 } 520 521 // Create a buffer queue. 522 sp<IGraphicBufferProducer> producer; 523 sp<IGraphicBufferConsumer> consumer; 524 BufferQueue::createBufferQueue(&producer, &consumer); 525 526 uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN; 527 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 528 usage = GRALLOC_USAGE_HW_VIDEO_ENCODER; 529 } 530 531 bufferCount += kConsumerBufferCount; 532 533 mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount); 534 mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource")); 535 mVideoBufferProducer = producer; 536 537 status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height); 538 if (res != OK) { 539 ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height, 540 strerror(-res), res); 541 return res; 542 } 543 544 res = mVideoBufferConsumer->setDefaultBufferFormat(format); 545 if (res != OK) { 546 ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format, 547 strerror(-res), res); 548 return res; 549 } 550 551 res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace); 552 if (res != OK) { 553 ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace, 554 strerror(-res), res); 555 return res; 556 } 557 558 res = mCamera->setVideoTarget(mVideoBufferProducer); 559 if (res != OK) { 560 ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res); 561 return res; 562 } 563 564 // Create memory heap to store buffers as VideoNativeMetadata. 565 size_t bufferSize = sizeof(VideoNativeMetadata); 566 mMemoryHeapBase = new MemoryHeapBase(bufferSize * bufferCount, 0, 567 "StageFright-CameraSource-BufferHeap"); 568 for (uint32_t i = 0; i < bufferCount; i++) { 569 mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * bufferSize, bufferSize)); 570 } 571 572 mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this); 573 res = mBufferQueueListener->run("CameraSource-BufferQueueListener"); 574 if (res != OK) { 575 ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__, 576 strerror(-res), res); 577 return res; 578 } 579 580 return OK; 581} 582 583status_t CameraSource::initWithCameraAccess( 584 const sp<ICamera>& camera, 585 const sp<ICameraRecordingProxy>& proxy, 586 int32_t cameraId, 587 const String16& clientName, 588 uid_t clientUid, 589 pid_t clientPid, 590 Size videoSize, 591 int32_t frameRate, 592 bool storeMetaDataInVideoBuffers) { 593 ALOGV("initWithCameraAccess"); 594 status_t err = OK; 595 596 if ((err = isCameraAvailable(camera, proxy, cameraId, 597 clientName, clientUid, clientPid)) != OK) { 598 ALOGE("Camera connection could not be established."); 599 return err; 600 } 601 CameraParameters params(mCamera->getParameters()); 602 if ((err = isCameraColorFormatSupported(params)) != OK) { 603 return err; 604 } 605 606 // Set the camera to use the requested video frame size 607 // and/or frame rate. 608 if ((err = configureCamera(¶ms, 609 videoSize.width, videoSize.height, 610 frameRate))) { 611 return err; 612 } 613 614 // Check on video frame size and frame rate. 615 CameraParameters newCameraParams(mCamera->getParameters()); 616 if ((err = checkVideoSize(newCameraParams, 617 videoSize.width, videoSize.height)) != OK) { 618 return err; 619 } 620 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) { 621 return err; 622 } 623 624 // Set the preview display. Skip this if mSurface is null because 625 // applications may already set a surface to the camera. 626 if (mSurface != NULL) { 627 // This CHECK is good, since we just passed the lock/unlock 628 // check earlier by calling mCamera->setParameters(). 629 CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface)); 630 } 631 632 // By default, store real data in video buffers. 633 mVideoBufferMode = ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV; 634 if (storeMetaDataInVideoBuffers) { 635 if (OK == mCamera->setVideoBufferMode(ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) { 636 mVideoBufferMode = ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE; 637 } else if (OK == mCamera->setVideoBufferMode( 638 ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) { 639 mVideoBufferMode = ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA; 640 } 641 } 642 643 if (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) { 644 err = mCamera->setVideoBufferMode(ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV); 645 if (err != OK) { 646 ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: " 647 "%s (err=%d)", __FUNCTION__, strerror(-err), err); 648 return err; 649 } 650 } 651 652 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate); 653 if (glitchDurationUs > mGlitchDurationThresholdUs) { 654 mGlitchDurationThresholdUs = glitchDurationUs; 655 } 656 657 // XXX: query camera for the stride and slice height 658 // when the capability becomes available. 659 mMeta = new MetaData; 660 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 661 mMeta->setInt32(kKeyColorFormat, mColorFormat); 662 mMeta->setInt32(kKeyWidth, mVideoSize.width); 663 mMeta->setInt32(kKeyHeight, mVideoSize.height); 664 mMeta->setInt32(kKeyStride, mVideoSize.width); 665 mMeta->setInt32(kKeySliceHeight, mVideoSize.height); 666 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate); 667 return OK; 668} 669 670CameraSource::~CameraSource() { 671 if (mStarted) { 672 reset(); 673 } else if (mInitCheck == OK) { 674 // Camera is initialized but because start() is never called, 675 // the lock on Camera is never released(). This makes sure 676 // Camera's lock is released in this case. 677 releaseCamera(); 678 } 679} 680 681status_t CameraSource::startCameraRecording() { 682 ALOGV("startCameraRecording"); 683 // Reset the identity to the current thread because media server owns the 684 // camera and recording is started by the applications. The applications 685 // will connect to the camera in ICameraRecordingProxy::startRecording. 686 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 687 status_t err; 688 689 if (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) { 690 // Initialize buffer queue. 691 err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat, 692 (android_dataspace_t)mEncoderDataSpace, 693 mNumInputBuffers > 0 ? mNumInputBuffers : 1); 694 if (err != OK) { 695 ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__, 696 strerror(-err), err); 697 return err; 698 } 699 } else { 700 if (mNumInputBuffers > 0) { 701 err = mCamera->sendCommand( 702 CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0); 703 704 // This could happen for CameraHAL1 clients; thus the failure is 705 // not a fatal error 706 if (err != OK) { 707 ALOGW("Failed to set video buffer count to %d due to %d", 708 mNumInputBuffers, err); 709 } 710 } 711 712 err = mCamera->sendCommand( 713 CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace); 714 715 // This could happen for CameraHAL1 clients; thus the failure is 716 // not a fatal error 717 if (err != OK) { 718 ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d", 719 mEncoderFormat, mEncoderDataSpace, err); 720 } 721 } 722 723 err = OK; 724 if (mCameraFlags & FLAGS_HOT_CAMERA) { 725 mCamera->unlock(); 726 mCamera.clear(); 727 if ((err = mCameraRecordingProxy->startRecording( 728 new ProxyListener(this))) != OK) { 729 ALOGE("Failed to start recording, received error: %s (%d)", 730 strerror(-err), err); 731 } 732 } else { 733 mCamera->setListener(new CameraSourceListener(this)); 734 mCamera->startRecording(); 735 if (!mCamera->recordingEnabled()) { 736 err = -EINVAL; 737 ALOGE("Failed to start recording"); 738 } 739 } 740 IPCThreadState::self()->restoreCallingIdentity(token); 741 return err; 742} 743 744status_t CameraSource::start(MetaData *meta) { 745 ALOGV("start"); 746 CHECK(!mStarted); 747 if (mInitCheck != OK) { 748 ALOGE("CameraSource is not initialized yet"); 749 return mInitCheck; 750 } 751 752 char value[PROPERTY_VALUE_MAX]; 753 if (property_get("media.stagefright.record-stats", value, NULL) 754 && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { 755 mCollectStats = true; 756 } 757 758 mStartTimeUs = 0; 759 mNumInputBuffers = 0; 760 mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; 761 mEncoderDataSpace = HAL_DATASPACE_BT709; 762 763 if (meta) { 764 int64_t startTimeUs; 765 if (meta->findInt64(kKeyTime, &startTimeUs)) { 766 mStartTimeUs = startTimeUs; 767 } 768 769 int32_t nBuffers; 770 if (meta->findInt32(kKeyNumBuffers, &nBuffers)) { 771 CHECK_GT(nBuffers, 0); 772 mNumInputBuffers = nBuffers; 773 } 774 775 // apply encoder color format if specified 776 if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) { 777 ALOGV("Using encoder format: %#x", mEncoderFormat); 778 } 779 if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) { 780 ALOGV("Using encoder data space: %#x", mEncoderDataSpace); 781 } 782 } 783 784 status_t err; 785 if ((err = startCameraRecording()) == OK) { 786 mStarted = true; 787 } 788 789 return err; 790} 791 792void CameraSource::stopCameraRecording() { 793 ALOGV("stopCameraRecording"); 794 if (mCameraFlags & FLAGS_HOT_CAMERA) { 795 mCameraRecordingProxy->stopRecording(); 796 } else { 797 mCamera->setListener(NULL); 798 mCamera->stopRecording(); 799 } 800} 801 802void CameraSource::releaseCamera() { 803 ALOGV("releaseCamera"); 804 sp<Camera> camera; 805 bool coldCamera = false; 806 { 807 Mutex::Autolock autoLock(mLock); 808 // get a local ref and clear ref to mCamera now 809 camera = mCamera; 810 mCamera.clear(); 811 coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0; 812 } 813 814 if (camera != 0) { 815 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 816 if (coldCamera) { 817 ALOGV("Camera was cold when we started, stopping preview"); 818 camera->stopPreview(); 819 camera->disconnect(); 820 } 821 camera->unlock(); 822 IPCThreadState::self()->restoreCallingIdentity(token); 823 } 824 825 { 826 Mutex::Autolock autoLock(mLock); 827 if (mCameraRecordingProxy != 0) { 828 IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier); 829 mCameraRecordingProxy.clear(); 830 } 831 mCameraFlags = 0; 832 } 833} 834 835status_t CameraSource::reset() { 836 ALOGD("reset: E"); 837 838 { 839 Mutex::Autolock autoLock(mLock); 840 mStarted = false; 841 mFrameAvailableCondition.signal(); 842 843 int64_t token; 844 bool isTokenValid = false; 845 if (mCamera != 0) { 846 token = IPCThreadState::self()->clearCallingIdentity(); 847 isTokenValid = true; 848 } 849 releaseQueuedFrames(); 850 while (!mFramesBeingEncoded.empty()) { 851 if (NO_ERROR != 852 mFrameCompleteCondition.waitRelative(mLock, 853 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) { 854 ALOGW("Timed out waiting for outstanding frames being encoded: %zu", 855 mFramesBeingEncoded.size()); 856 } 857 } 858 stopCameraRecording(); 859 if (isTokenValid) { 860 IPCThreadState::self()->restoreCallingIdentity(token); 861 } 862 863 if (mCollectStats) { 864 ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us", 865 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 866 mLastFrameTimestampUs - mFirstFrameTimeUs); 867 } 868 869 if (mNumGlitches > 0) { 870 ALOGW("%d long delays between neighboring video frames", mNumGlitches); 871 } 872 873 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 874 } 875 876 if (mBufferQueueListener != nullptr) { 877 mBufferQueueListener->requestExit(); 878 mBufferQueueListener->join(); 879 mBufferQueueListener.clear(); 880 } 881 882 mVideoBufferConsumer.clear(); 883 mVideoBufferProducer.clear(); 884 releaseCamera(); 885 886 ALOGD("reset: X"); 887 return OK; 888} 889 890void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { 891 ALOGV("releaseRecordingFrame"); 892 893 if (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) { 894 // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode. 895 ssize_t offset; 896 size_t size; 897 sp<IMemoryHeap> heap = frame->getMemory(&offset, &size); 898 if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) { 899 ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__, 900 heap->getHeapID(), mMemoryHeapBase->getHeapID()); 901 return; 902 } 903 904 VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>( 905 (uint8_t*)heap->getBase() + offset); 906 907 // Find the corresponding buffer item for the native window buffer. 908 ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer); 909 if (index == NAME_NOT_FOUND) { 910 ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer); 911 return; 912 } 913 914 BufferItem buffer = mReceivedBufferItemMap.valueAt(index); 915 mReceivedBufferItemMap.removeItemsAt(index); 916 mVideoBufferConsumer->releaseBuffer(buffer); 917 mMemoryBases.push_back(frame); 918 } else if (mCameraRecordingProxy != NULL) { 919 mCameraRecordingProxy->releaseRecordingFrame(frame); 920 } else if (mCamera != NULL) { 921 int64_t token = IPCThreadState::self()->clearCallingIdentity(); 922 mCamera->releaseRecordingFrame(frame); 923 IPCThreadState::self()->restoreCallingIdentity(token); 924 } 925} 926 927void CameraSource::releaseQueuedFrames() { 928 List<sp<IMemory> >::iterator it; 929 while (!mFramesReceived.empty()) { 930 it = mFramesReceived.begin(); 931 releaseRecordingFrame(*it); 932 mFramesReceived.erase(it); 933 ++mNumFramesDropped; 934 } 935} 936 937sp<MetaData> CameraSource::getFormat() { 938 return mMeta; 939} 940 941void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { 942 releaseRecordingFrame(frame); 943} 944 945void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 946 ALOGV("signalBufferReturned: %p", buffer->data()); 947 Mutex::Autolock autoLock(mLock); 948 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 949 it != mFramesBeingEncoded.end(); ++it) { 950 if ((*it)->pointer() == buffer->data()) { 951 releaseOneRecordingFrame((*it)); 952 mFramesBeingEncoded.erase(it); 953 ++mNumFramesEncoded; 954 buffer->setObserver(0); 955 buffer->release(); 956 mFrameCompleteCondition.signal(); 957 return; 958 } 959 } 960 CHECK(!"signalBufferReturned: bogus buffer"); 961} 962 963status_t CameraSource::read( 964 MediaBuffer **buffer, const ReadOptions *options) { 965 ALOGV("read"); 966 967 *buffer = NULL; 968 969 int64_t seekTimeUs; 970 ReadOptions::SeekMode mode; 971 if (options && options->getSeekTo(&seekTimeUs, &mode)) { 972 return ERROR_UNSUPPORTED; 973 } 974 975 sp<IMemory> frame; 976 int64_t frameTime; 977 978 { 979 Mutex::Autolock autoLock(mLock); 980 while (mStarted && mFramesReceived.empty()) { 981 if (NO_ERROR != 982 mFrameAvailableCondition.waitRelative(mLock, 983 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) { 984 if (mCameraRecordingProxy != 0 && 985 !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) { 986 ALOGW("camera recording proxy is gone"); 987 return ERROR_END_OF_STREAM; 988 } 989 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us", 990 mLastFrameTimestampUs); 991 } 992 } 993 if (!mStarted) { 994 return OK; 995 } 996 frame = *mFramesReceived.begin(); 997 mFramesReceived.erase(mFramesReceived.begin()); 998 999 frameTime = *mFrameTimes.begin(); 1000 mFrameTimes.erase(mFrameTimes.begin()); 1001 mFramesBeingEncoded.push_back(frame); 1002 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 1003 (*buffer)->setObserver(this); 1004 (*buffer)->add_ref(); 1005 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 1006 } 1007 return OK; 1008} 1009 1010bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) { 1011 if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { 1012 ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs); 1013 return true; 1014 } 1015 1016 // May need to skip frame or modify timestamp. Currently implemented 1017 // by the subclass CameraSourceTimeLapse. 1018 if (skipCurrentFrame(timestampUs)) { 1019 return true; 1020 } 1021 1022 if (mNumFramesReceived > 0) { 1023 if (timestampUs <= mLastFrameTimestampUs) { 1024 ALOGW("Dropping frame with backward timestamp %lld (last %lld)", 1025 (long long)timestampUs, (long long)mLastFrameTimestampUs); 1026 return true; 1027 } 1028 if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { 1029 ++mNumGlitches; 1030 } 1031 } 1032 1033 mLastFrameTimestampUs = timestampUs; 1034 if (mNumFramesReceived == 0) { 1035 mFirstFrameTimeUs = timestampUs; 1036 // Initial delay 1037 if (mStartTimeUs > 0) { 1038 if (timestampUs < mStartTimeUs) { 1039 // Frame was captured before recording was started 1040 // Drop it without updating the statistical data. 1041 return true; 1042 } 1043 mStartTimeUs = timestampUs - mStartTimeUs; 1044 } 1045 } 1046 1047 return false; 1048} 1049 1050void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 1051 int32_t msgType __unused, const sp<IMemory> &data) { 1052 ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs); 1053 Mutex::Autolock autoLock(mLock); 1054 1055 if (shouldSkipFrameLocked(timestampUs)) { 1056 releaseOneRecordingFrame(data); 1057 return; 1058 } 1059 1060 ++mNumFramesReceived; 1061 1062 CHECK(data != NULL && data->size() > 0); 1063 mFramesReceived.push_back(data); 1064 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 1065 mFrameTimes.push_back(timeUs); 1066 ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, 1067 mStartTimeUs, timeUs); 1068 mFrameAvailableCondition.signal(); 1069} 1070 1071CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer, 1072 const sp<CameraSource>& cameraSource) { 1073 mConsumer = consumer; 1074 mConsumer->setFrameAvailableListener(this); 1075 mCameraSource = cameraSource; 1076} 1077 1078void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) { 1079 ALOGV("%s: onFrameAvailable", __FUNCTION__); 1080 1081 Mutex::Autolock l(mLock); 1082 1083 if (!mFrameAvailable) { 1084 mFrameAvailable = true; 1085 mFrameAvailableSignal.signal(); 1086 } 1087} 1088 1089bool CameraSource::BufferQueueListener::threadLoop() { 1090 if (mConsumer == nullptr || mCameraSource == nullptr) { 1091 return false; 1092 } 1093 1094 { 1095 Mutex::Autolock l(mLock); 1096 while (!mFrameAvailable) { 1097 if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) { 1098 return true; 1099 } 1100 } 1101 mFrameAvailable = false; 1102 } 1103 1104 BufferItem buffer; 1105 while (mConsumer->acquireBuffer(&buffer, 0) == OK) { 1106 mCameraSource->processBufferQueueFrame(buffer); 1107 } 1108 1109 return true; 1110} 1111 1112void CameraSource::processBufferQueueFrame(const BufferItem& buffer) { 1113 Mutex::Autolock autoLock(mLock); 1114 1115 int64_t timestampUs = buffer.mTimestamp / 1000; 1116 if (shouldSkipFrameLocked(timestampUs)) { 1117 mVideoBufferConsumer->releaseBuffer(buffer); 1118 return; 1119 } 1120 1121 if (mMemoryBases.empty()) { 1122 ALOGW("%s: No available memory base. Dropping a recording frame.", __FUNCTION__); 1123 mVideoBufferConsumer->releaseBuffer(buffer); 1124 return; 1125 } 1126 1127 ++mNumFramesReceived; 1128 1129 // Find a available memory slot to store the buffer as VideoNativeMetadata. 1130 sp<IMemory> data = *mMemoryBases.begin(); 1131 mMemoryBases.erase(mMemoryBases.begin()); 1132 1133 ssize_t offset; 1134 size_t size; 1135 sp<IMemoryHeap> heap = data->getMemory(&offset, &size); 1136 VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>( 1137 (uint8_t*)heap->getBase() + offset); 1138 memset(payload, 0, sizeof(VideoNativeMetadata)); 1139 payload->eType = kMetadataBufferTypeANWBuffer; 1140 payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer(); 1141 payload->nFenceFd = -1; 1142 1143 // Add the mapping so we can find the corresponding buffer item to release to the buffer queue 1144 // when the encoder returns the native window buffer. 1145 mReceivedBufferItemMap.add(payload->pBuffer, buffer); 1146 1147 mFramesReceived.push_back(data); 1148 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs); 1149 mFrameTimes.push_back(timeUs); 1150 ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, 1151 mStartTimeUs, timeUs); 1152 mFrameAvailableCondition.signal(); 1153} 1154 1155bool CameraSource::isMetaDataStoredInVideoBuffers() const { 1156 ALOGV("isMetaDataStoredInVideoBuffers"); 1157 1158 // Output buffers will contain metadata if camera sends us buffer in metadata mode or via 1159 // buffer queue. 1160 return (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA || 1161 mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE); 1162} 1163 1164CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) { 1165 mSource = source; 1166} 1167 1168void CameraSource::ProxyListener::dataCallbackTimestamp( 1169 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 1170 mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr); 1171} 1172 1173void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) { 1174 ALOGI("Camera recording proxy died"); 1175} 1176 1177} // namespace android 1178