CameraSource.cpp revision 6e0c00ba53d8e5ad14d9e447c3c040ce6fd81a47
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <binder/MemoryBase.h>
26#include <binder/MemoryHeapBase.h>
27#include <media/hardware/HardwareAPI.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/CameraSource.h>
30#include <media/stagefright/MediaDefs.h>
31#include <media/stagefright/MediaErrors.h>
32#include <media/stagefright/MetaData.h>
33#include <camera/Camera.h>
34#include <camera/CameraParameters.h>
35#include <gui/Surface.h>
36#include <utils/String8.h>
37#include <cutils/properties.h>
38
39#if LOG_NDEBUG
40#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41#else
42#define UNUSED_UNLESS_VERBOSE(x)
43#endif
44
45namespace android {
46
47static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48
49struct CameraSourceListener : public CameraListener {
50    CameraSourceListener(const sp<CameraSource> &source);
51
52    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
53    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
54                          camera_frame_metadata_t *metadata);
55
56    virtual void postDataTimestamp(
57            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
58
59protected:
60    virtual ~CameraSourceListener();
61
62private:
63    wp<CameraSource> mSource;
64
65    CameraSourceListener(const CameraSourceListener &);
66    CameraSourceListener &operator=(const CameraSourceListener &);
67};
68
69CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
70    : mSource(source) {
71}
72
73CameraSourceListener::~CameraSourceListener() {
74}
75
76void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
77    UNUSED_UNLESS_VERBOSE(msgType);
78    UNUSED_UNLESS_VERBOSE(ext1);
79    UNUSED_UNLESS_VERBOSE(ext2);
80    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
81}
82
83void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
84                                    camera_frame_metadata_t * /* metadata */) {
85    ALOGV("postData(%d, ptr:%p, size:%zu)",
86         msgType, dataPtr->pointer(), dataPtr->size());
87
88    sp<CameraSource> source = mSource.promote();
89    if (source.get() != NULL) {
90        source->dataCallback(msgType, dataPtr);
91    }
92}
93
94void CameraSourceListener::postDataTimestamp(
95        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
96
97    sp<CameraSource> source = mSource.promote();
98    if (source.get() != NULL) {
99        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
100    }
101}
102
103static int32_t getColorFormat(const char* colorFormat) {
104    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
105       return OMX_COLOR_FormatYUV420Planar;
106    }
107
108    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
109       return OMX_COLOR_FormatYUV422SemiPlanar;
110    }
111
112    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
113        return OMX_COLOR_FormatYUV420SemiPlanar;
114    }
115
116    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
117        return OMX_COLOR_FormatYCbYCr;
118    }
119
120    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
121       return OMX_COLOR_Format16bitRGB565;
122    }
123
124    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
125       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
126    }
127
128    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
129        return OMX_COLOR_FormatAndroidOpaque;
130    }
131
132    ALOGE("Uknown color format (%s), please add it to "
133         "CameraSource::getColorFormat", colorFormat);
134
135    CHECK(!"Unknown color format");
136    return -1;
137}
138
139CameraSource *CameraSource::Create(const String16 &clientName) {
140    Size size;
141    size.width = -1;
142    size.height = -1;
143
144    sp<hardware::ICamera> camera;
145    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
146            Camera::USE_CALLING_PID, size, -1, NULL, false);
147}
148
149// static
150CameraSource *CameraSource::CreateFromCamera(
151    const sp<hardware::ICamera>& camera,
152    const sp<ICameraRecordingProxy>& proxy,
153    int32_t cameraId,
154    const String16& clientName,
155    uid_t clientUid,
156    pid_t clientPid,
157    Size videoSize,
158    int32_t frameRate,
159    const sp<IGraphicBufferProducer>& surface,
160    bool storeMetaDataInVideoBuffers) {
161
162    CameraSource *source = new CameraSource(camera, proxy, cameraId,
163            clientName, clientUid, clientPid, videoSize, frameRate, surface,
164            storeMetaDataInVideoBuffers);
165    return source;
166}
167
168CameraSource::CameraSource(
169    const sp<hardware::ICamera>& camera,
170    const sp<ICameraRecordingProxy>& proxy,
171    int32_t cameraId,
172    const String16& clientName,
173    uid_t clientUid,
174    pid_t clientPid,
175    Size videoSize,
176    int32_t frameRate,
177    const sp<IGraphicBufferProducer>& surface,
178    bool storeMetaDataInVideoBuffers)
179    : mCameraFlags(0),
180      mNumInputBuffers(0),
181      mVideoFrameRate(-1),
182      mCamera(0),
183      mSurface(surface),
184      mNumFramesReceived(0),
185      mLastFrameTimestampUs(0),
186      mStarted(false),
187      mNumFramesEncoded(0),
188      mTimeBetweenFrameCaptureUs(0),
189      mFirstFrameTimeUs(0),
190      mNumFramesDropped(0),
191      mNumGlitches(0),
192      mGlitchDurationThresholdUs(200000),
193      mCollectStats(false) {
194    mVideoSize.width  = -1;
195    mVideoSize.height = -1;
196
197    mInitCheck = init(camera, proxy, cameraId,
198                    clientName, clientUid, clientPid,
199                    videoSize, frameRate,
200                    storeMetaDataInVideoBuffers);
201    if (mInitCheck != OK) releaseCamera();
202}
203
204status_t CameraSource::initCheck() const {
205    return mInitCheck;
206}
207
208status_t CameraSource::isCameraAvailable(
209    const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
210    int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
211
212    if (camera == 0) {
213        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid);
214        if (mCamera == 0) return -EBUSY;
215        mCameraFlags &= ~FLAGS_HOT_CAMERA;
216    } else {
217        // We get the proxy from Camera, not ICamera. We need to get the proxy
218        // to the remote Camera owned by the application. Here mCamera is a
219        // local Camera object created by us. We cannot use the proxy from
220        // mCamera here.
221        mCamera = Camera::create(camera);
222        if (mCamera == 0) return -EBUSY;
223        mCameraRecordingProxy = proxy;
224        mCameraFlags |= FLAGS_HOT_CAMERA;
225        mDeathNotifier = new DeathNotifier();
226        // isBinderAlive needs linkToDeath to work.
227        IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
228    }
229
230    mCamera->lock();
231
232    return OK;
233}
234
235
236/*
237 * Check to see whether the requested video width and height is one
238 * of the supported sizes.
239 * @param width the video frame width in pixels
240 * @param height the video frame height in pixels
241 * @param suppportedSizes the vector of sizes that we check against
242 * @return true if the dimension (width and height) is supported.
243 */
244static bool isVideoSizeSupported(
245    int32_t width, int32_t height,
246    const Vector<Size>& supportedSizes) {
247
248    ALOGV("isVideoSizeSupported");
249    for (size_t i = 0; i < supportedSizes.size(); ++i) {
250        if (width  == supportedSizes[i].width &&
251            height == supportedSizes[i].height) {
252            return true;
253        }
254    }
255    return false;
256}
257
258/*
259 * If the preview and video output is separate, we only set the
260 * the video size, and applications should set the preview size
261 * to some proper value, and the recording framework will not
262 * change the preview size; otherwise, if the video and preview
263 * output is the same, we need to set the preview to be the same
264 * as the requested video size.
265 *
266 */
267/*
268 * Query the camera to retrieve the supported video frame sizes
269 * and also to see whether CameraParameters::setVideoSize()
270 * is supported or not.
271 * @param params CameraParameters to retrieve the information
272 * @@param isSetVideoSizeSupported retunrs whether method
273 *      CameraParameters::setVideoSize() is supported or not.
274 * @param sizes returns the vector of Size objects for the
275 *      supported video frame sizes advertised by the camera.
276 */
277static void getSupportedVideoSizes(
278    const CameraParameters& params,
279    bool *isSetVideoSizeSupported,
280    Vector<Size>& sizes) {
281
282    *isSetVideoSizeSupported = true;
283    params.getSupportedVideoSizes(sizes);
284    if (sizes.size() == 0) {
285        ALOGD("Camera does not support setVideoSize()");
286        params.getSupportedPreviewSizes(sizes);
287        *isSetVideoSizeSupported = false;
288    }
289}
290
291/*
292 * Check whether the camera has the supported color format
293 * @param params CameraParameters to retrieve the information
294 * @return OK if no error.
295 */
296status_t CameraSource::isCameraColorFormatSupported(
297        const CameraParameters& params) {
298    mColorFormat = getColorFormat(params.get(
299            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
300    if (mColorFormat == -1) {
301        return BAD_VALUE;
302    }
303    return OK;
304}
305
306/*
307 * Configure the camera to use the requested video size
308 * (width and height) and/or frame rate. If both width and
309 * height are -1, configuration on the video size is skipped.
310 * if frameRate is -1, configuration on the frame rate
311 * is skipped. Skipping the configuration allows one to
312 * use the current camera setting without the need to
313 * actually know the specific values (see Create() method).
314 *
315 * @param params the CameraParameters to be configured
316 * @param width the target video frame width in pixels
317 * @param height the target video frame height in pixels
318 * @param frameRate the target frame rate in frames per second.
319 * @return OK if no error.
320 */
321status_t CameraSource::configureCamera(
322        CameraParameters* params,
323        int32_t width, int32_t height,
324        int32_t frameRate) {
325    ALOGV("configureCamera");
326    Vector<Size> sizes;
327    bool isSetVideoSizeSupportedByCamera = true;
328    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
329    bool isCameraParamChanged = false;
330    if (width != -1 && height != -1) {
331        if (!isVideoSizeSupported(width, height, sizes)) {
332            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
333            return BAD_VALUE;
334        }
335        if (isSetVideoSizeSupportedByCamera) {
336            params->setVideoSize(width, height);
337        } else {
338            params->setPreviewSize(width, height);
339        }
340        isCameraParamChanged = true;
341    } else if ((width == -1 && height != -1) ||
342               (width != -1 && height == -1)) {
343        // If one and only one of the width and height is -1
344        // we reject such a request.
345        ALOGE("Requested video size (%dx%d) is not supported", width, height);
346        return BAD_VALUE;
347    } else {  // width == -1 && height == -1
348        // Do not configure the camera.
349        // Use the current width and height value setting from the camera.
350    }
351
352    if (frameRate != -1) {
353        CHECK(frameRate > 0 && frameRate <= 120);
354        const char* supportedFrameRates =
355                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
356        CHECK(supportedFrameRates != NULL);
357        ALOGV("Supported frame rates: %s", supportedFrameRates);
358        char buf[4];
359        snprintf(buf, 4, "%d", frameRate);
360        if (strstr(supportedFrameRates, buf) == NULL) {
361            ALOGE("Requested frame rate (%d) is not supported: %s",
362                frameRate, supportedFrameRates);
363            return BAD_VALUE;
364        }
365
366        // The frame rate is supported, set the camera to the requested value.
367        params->setPreviewFrameRate(frameRate);
368        isCameraParamChanged = true;
369    } else {  // frameRate == -1
370        // Do not configure the camera.
371        // Use the current frame rate value setting from the camera
372    }
373
374    if (isCameraParamChanged) {
375        // Either frame rate or frame size needs to be changed.
376        String8 s = params->flatten();
377        if (OK != mCamera->setParameters(s)) {
378            ALOGE("Could not change settings."
379                 " Someone else is using camera %p?", mCamera.get());
380            return -EBUSY;
381        }
382    }
383    return OK;
384}
385
386/*
387 * Check whether the requested video frame size
388 * has been successfully configured or not. If both width and height
389 * are -1, check on the current width and height value setting
390 * is performed.
391 *
392 * @param params CameraParameters to retrieve the information
393 * @param the target video frame width in pixels to check against
394 * @param the target video frame height in pixels to check against
395 * @return OK if no error
396 */
397status_t CameraSource::checkVideoSize(
398        const CameraParameters& params,
399        int32_t width, int32_t height) {
400
401    ALOGV("checkVideoSize");
402    // The actual video size is the same as the preview size
403    // if the camera hal does not support separate video and
404    // preview output. In this case, we retrieve the video
405    // size from preview.
406    int32_t frameWidthActual = -1;
407    int32_t frameHeightActual = -1;
408    Vector<Size> sizes;
409    params.getSupportedVideoSizes(sizes);
410    if (sizes.size() == 0) {
411        // video size is the same as preview size
412        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
413    } else {
414        // video size may not be the same as preview
415        params.getVideoSize(&frameWidthActual, &frameHeightActual);
416    }
417    if (frameWidthActual < 0 || frameHeightActual < 0) {
418        ALOGE("Failed to retrieve video frame size (%dx%d)",
419                frameWidthActual, frameHeightActual);
420        return UNKNOWN_ERROR;
421    }
422
423    // Check the actual video frame size against the target/requested
424    // video frame size.
425    if (width != -1 && height != -1) {
426        if (frameWidthActual != width || frameHeightActual != height) {
427            ALOGE("Failed to set video frame size to %dx%d. "
428                    "The actual video size is %dx%d ", width, height,
429                    frameWidthActual, frameHeightActual);
430            return UNKNOWN_ERROR;
431        }
432    }
433
434    // Good now.
435    mVideoSize.width = frameWidthActual;
436    mVideoSize.height = frameHeightActual;
437    return OK;
438}
439
440/*
441 * Check the requested frame rate has been successfully configured or not.
442 * If the target frameRate is -1, check on the current frame rate value
443 * setting is performed.
444 *
445 * @param params CameraParameters to retrieve the information
446 * @param the target video frame rate to check against
447 * @return OK if no error.
448 */
449status_t CameraSource::checkFrameRate(
450        const CameraParameters& params,
451        int32_t frameRate) {
452
453    ALOGV("checkFrameRate");
454    int32_t frameRateActual = params.getPreviewFrameRate();
455    if (frameRateActual < 0) {
456        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
457        return UNKNOWN_ERROR;
458    }
459
460    // Check the actual video frame rate against the target/requested
461    // video frame rate.
462    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
463        ALOGE("Failed to set preview frame rate to %d fps. The actual "
464                "frame rate is %d", frameRate, frameRateActual);
465        return UNKNOWN_ERROR;
466    }
467
468    // Good now.
469    mVideoFrameRate = frameRateActual;
470    return OK;
471}
472
473/*
474 * Initialize the CameraSource to so that it becomes
475 * ready for providing the video input streams as requested.
476 * @param camera the camera object used for the video source
477 * @param cameraId if camera == 0, use camera with this id
478 *      as the video source
479 * @param videoSize the target video frame size. If both
480 *      width and height in videoSize is -1, use the current
481 *      width and heigth settings by the camera
482 * @param frameRate the target frame rate in frames per second.
483 *      if it is -1, use the current camera frame rate setting.
484 * @param storeMetaDataInVideoBuffers request to store meta
485 *      data or real YUV data in video buffers. Request to
486 *      store meta data in video buffers may not be honored
487 *      if the source does not support this feature.
488 *
489 * @return OK if no error.
490 */
491status_t CameraSource::init(
492        const sp<hardware::ICamera>& camera,
493        const sp<ICameraRecordingProxy>& proxy,
494        int32_t cameraId,
495        const String16& clientName,
496        uid_t clientUid,
497        pid_t clientPid,
498        Size videoSize,
499        int32_t frameRate,
500        bool storeMetaDataInVideoBuffers) {
501
502    ALOGV("init");
503    status_t err = OK;
504    int64_t token = IPCThreadState::self()->clearCallingIdentity();
505    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
506                               videoSize, frameRate,
507                               storeMetaDataInVideoBuffers);
508    IPCThreadState::self()->restoreCallingIdentity(token);
509    return err;
510}
511
512status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
513        uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
514    ALOGV("initBufferQueue");
515
516    if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
517        ALOGE("%s: Buffer queue already exists", __FUNCTION__);
518        return ALREADY_EXISTS;
519    }
520
521    // Create a buffer queue.
522    sp<IGraphicBufferProducer> producer;
523    sp<IGraphicBufferConsumer> consumer;
524    BufferQueue::createBufferQueue(&producer, &consumer);
525
526    uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
527    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
528        usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
529    }
530
531    bufferCount += kConsumerBufferCount;
532
533    mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
534    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
535    mVideoBufferProducer = producer;
536
537    status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
538    if (res != OK) {
539        ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
540                strerror(-res), res);
541        return res;
542    }
543
544    res = mVideoBufferConsumer->setDefaultBufferFormat(format);
545    if (res != OK) {
546        ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
547                strerror(-res), res);
548        return res;
549    }
550
551    res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
552    if (res != OK) {
553        ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
554                strerror(-res), res);
555        return res;
556    }
557
558    res = mCamera->setVideoTarget(mVideoBufferProducer);
559    if (res != OK) {
560        ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
561        return res;
562    }
563
564    // Create memory heap to store buffers as VideoNativeMetadata.
565    size_t bufferSize = sizeof(VideoNativeMetadata);
566    mMemoryHeapBase = new MemoryHeapBase(bufferSize * bufferCount, 0,
567            "StageFright-CameraSource-BufferHeap");
568    for (uint32_t i = 0; i < bufferCount; i++) {
569        mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * bufferSize, bufferSize));
570    }
571
572    mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
573    res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
574    if (res != OK) {
575        ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
576                strerror(-res), res);
577        return res;
578    }
579
580    return OK;
581}
582
583status_t CameraSource::initWithCameraAccess(
584        const sp<hardware::ICamera>& camera,
585        const sp<ICameraRecordingProxy>& proxy,
586        int32_t cameraId,
587        const String16& clientName,
588        uid_t clientUid,
589        pid_t clientPid,
590        Size videoSize,
591        int32_t frameRate,
592        bool storeMetaDataInVideoBuffers) {
593    ALOGV("initWithCameraAccess");
594    status_t err = OK;
595
596    if ((err = isCameraAvailable(camera, proxy, cameraId,
597            clientName, clientUid, clientPid)) != OK) {
598        ALOGE("Camera connection could not be established.");
599        return err;
600    }
601    CameraParameters params(mCamera->getParameters());
602    if ((err = isCameraColorFormatSupported(params)) != OK) {
603        return err;
604    }
605
606    // Set the camera to use the requested video frame size
607    // and/or frame rate.
608    if ((err = configureCamera(&params,
609                    videoSize.width, videoSize.height,
610                    frameRate))) {
611        return err;
612    }
613
614    // Check on video frame size and frame rate.
615    CameraParameters newCameraParams(mCamera->getParameters());
616    if ((err = checkVideoSize(newCameraParams,
617                videoSize.width, videoSize.height)) != OK) {
618        return err;
619    }
620    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
621        return err;
622    }
623
624    // Set the preview display. Skip this if mSurface is null because
625    // applications may already set a surface to the camera.
626    if (mSurface != NULL) {
627        // This CHECK is good, since we just passed the lock/unlock
628        // check earlier by calling mCamera->setParameters().
629        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
630    }
631
632    // By default, store real data in video buffers.
633    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
634    if (storeMetaDataInVideoBuffers) {
635        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
636            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
637        } else if (OK == mCamera->setVideoBufferMode(
638                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
639            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
640        }
641    }
642
643    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
644        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
645        if (err != OK) {
646            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
647                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
648            return err;
649        }
650    }
651
652    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
653    if (glitchDurationUs > mGlitchDurationThresholdUs) {
654        mGlitchDurationThresholdUs = glitchDurationUs;
655    }
656
657    // XXX: query camera for the stride and slice height
658    // when the capability becomes available.
659    mMeta = new MetaData;
660    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
661    mMeta->setInt32(kKeyColorFormat, mColorFormat);
662    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
663    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
664    mMeta->setInt32(kKeyStride,      mVideoSize.width);
665    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
666    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
667    return OK;
668}
669
670CameraSource::~CameraSource() {
671    if (mStarted) {
672        reset();
673    } else if (mInitCheck == OK) {
674        // Camera is initialized but because start() is never called,
675        // the lock on Camera is never released(). This makes sure
676        // Camera's lock is released in this case.
677        releaseCamera();
678    }
679}
680
681status_t CameraSource::startCameraRecording() {
682    ALOGV("startCameraRecording");
683    // Reset the identity to the current thread because media server owns the
684    // camera and recording is started by the applications. The applications
685    // will connect to the camera in ICameraRecordingProxy::startRecording.
686    int64_t token = IPCThreadState::self()->clearCallingIdentity();
687    status_t err;
688
689    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
690        // Initialize buffer queue.
691        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
692                (android_dataspace_t)mEncoderDataSpace,
693                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
694        if (err != OK) {
695            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
696                    strerror(-err), err);
697            return err;
698        }
699    } else {
700        if (mNumInputBuffers > 0) {
701            err = mCamera->sendCommand(
702                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
703
704            // This could happen for CameraHAL1 clients; thus the failure is
705            // not a fatal error
706            if (err != OK) {
707                ALOGW("Failed to set video buffer count to %d due to %d",
708                    mNumInputBuffers, err);
709            }
710        }
711
712        err = mCamera->sendCommand(
713            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
714
715        // This could happen for CameraHAL1 clients; thus the failure is
716        // not a fatal error
717        if (err != OK) {
718            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
719                    mEncoderFormat, mEncoderDataSpace, err);
720        }
721    }
722
723    err = OK;
724    if (mCameraFlags & FLAGS_HOT_CAMERA) {
725        mCamera->unlock();
726        mCamera.clear();
727        if ((err = mCameraRecordingProxy->startRecording(
728                new ProxyListener(this))) != OK) {
729            ALOGE("Failed to start recording, received error: %s (%d)",
730                    strerror(-err), err);
731        }
732    } else {
733        mCamera->setListener(new CameraSourceListener(this));
734        mCamera->startRecording();
735        if (!mCamera->recordingEnabled()) {
736            err = -EINVAL;
737            ALOGE("Failed to start recording");
738        }
739    }
740    IPCThreadState::self()->restoreCallingIdentity(token);
741    return err;
742}
743
744status_t CameraSource::start(MetaData *meta) {
745    ALOGV("start");
746    CHECK(!mStarted);
747    if (mInitCheck != OK) {
748        ALOGE("CameraSource is not initialized yet");
749        return mInitCheck;
750    }
751
752    char value[PROPERTY_VALUE_MAX];
753    if (property_get("media.stagefright.record-stats", value, NULL)
754        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
755        mCollectStats = true;
756    }
757
758    mStartTimeUs = 0;
759    mNumInputBuffers = 0;
760    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
761    mEncoderDataSpace = HAL_DATASPACE_BT709;
762
763    if (meta) {
764        int64_t startTimeUs;
765        if (meta->findInt64(kKeyTime, &startTimeUs)) {
766            mStartTimeUs = startTimeUs;
767        }
768
769        int32_t nBuffers;
770        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
771            CHECK_GT(nBuffers, 0);
772            mNumInputBuffers = nBuffers;
773        }
774
775        // apply encoder color format if specified
776        if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
777            ALOGV("Using encoder format: %#x", mEncoderFormat);
778        }
779        if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
780            ALOGV("Using encoder data space: %#x", mEncoderDataSpace);
781        }
782    }
783
784    status_t err;
785    if ((err = startCameraRecording()) == OK) {
786        mStarted = true;
787    }
788
789    return err;
790}
791
792void CameraSource::stopCameraRecording() {
793    ALOGV("stopCameraRecording");
794    if (mCameraFlags & FLAGS_HOT_CAMERA) {
795        if (mCameraRecordingProxy != 0) {
796            mCameraRecordingProxy->stopRecording();
797        }
798    } else {
799        if (mCamera != 0) {
800            mCamera->setListener(NULL);
801            mCamera->stopRecording();
802        }
803    }
804}
805
806void CameraSource::releaseCamera() {
807    ALOGV("releaseCamera");
808    sp<Camera> camera;
809    bool coldCamera = false;
810    {
811        Mutex::Autolock autoLock(mLock);
812        // get a local ref and clear ref to mCamera now
813        camera = mCamera;
814        mCamera.clear();
815        coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
816    }
817
818    if (camera != 0) {
819        int64_t token = IPCThreadState::self()->clearCallingIdentity();
820        if (coldCamera) {
821            ALOGV("Camera was cold when we started, stopping preview");
822            camera->stopPreview();
823            camera->disconnect();
824        }
825        camera->unlock();
826        IPCThreadState::self()->restoreCallingIdentity(token);
827    }
828
829    {
830        Mutex::Autolock autoLock(mLock);
831        if (mCameraRecordingProxy != 0) {
832            IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
833            mCameraRecordingProxy.clear();
834        }
835        mCameraFlags = 0;
836    }
837}
838
839status_t CameraSource::reset() {
840    ALOGD("reset: E");
841
842    {
843        Mutex::Autolock autoLock(mLock);
844        mStarted = false;
845        mFrameAvailableCondition.signal();
846
847        int64_t token;
848        bool isTokenValid = false;
849        if (mCamera != 0) {
850            token = IPCThreadState::self()->clearCallingIdentity();
851            isTokenValid = true;
852        }
853        releaseQueuedFrames();
854        while (!mFramesBeingEncoded.empty()) {
855            if (NO_ERROR !=
856                mFrameCompleteCondition.waitRelative(mLock,
857                        mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
858                ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
859                    mFramesBeingEncoded.size());
860            }
861        }
862        stopCameraRecording();
863        if (isTokenValid) {
864            IPCThreadState::self()->restoreCallingIdentity(token);
865        }
866
867        if (mCollectStats) {
868            ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
869                    mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
870                    mLastFrameTimestampUs - mFirstFrameTimeUs);
871        }
872
873        if (mNumGlitches > 0) {
874            ALOGW("%d long delays between neighboring video frames", mNumGlitches);
875        }
876
877        CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
878    }
879
880    if (mBufferQueueListener != nullptr) {
881        mBufferQueueListener->requestExit();
882        mBufferQueueListener->join();
883        mBufferQueueListener.clear();
884    }
885
886    mVideoBufferConsumer.clear();
887    mVideoBufferProducer.clear();
888    releaseCamera();
889
890    ALOGD("reset: X");
891    return OK;
892}
893
894void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
895    ALOGV("releaseRecordingFrame");
896
897    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
898        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
899        ssize_t offset;
900        size_t size;
901        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
902        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
903            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
904                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
905            return;
906        }
907
908        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
909                (uint8_t*)heap->getBase() + offset);
910
911        // Find the corresponding buffer item for the native window buffer.
912        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
913        if (index == NAME_NOT_FOUND) {
914            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
915            return;
916        }
917
918        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
919        mReceivedBufferItemMap.removeItemsAt(index);
920        mVideoBufferConsumer->releaseBuffer(buffer);
921        mMemoryBases.push_back(frame);
922        mMemoryBaseAvailableCond.signal();
923    } else if (mCameraRecordingProxy != NULL) {
924        mCameraRecordingProxy->releaseRecordingFrame(frame);
925    } else if (mCamera != NULL) {
926        int64_t token = IPCThreadState::self()->clearCallingIdentity();
927        mCamera->releaseRecordingFrame(frame);
928        IPCThreadState::self()->restoreCallingIdentity(token);
929    }
930}
931
932void CameraSource::releaseQueuedFrames() {
933    List<sp<IMemory> >::iterator it;
934    while (!mFramesReceived.empty()) {
935        it = mFramesReceived.begin();
936        releaseRecordingFrame(*it);
937        mFramesReceived.erase(it);
938        ++mNumFramesDropped;
939    }
940}
941
942sp<MetaData> CameraSource::getFormat() {
943    return mMeta;
944}
945
946void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
947    releaseRecordingFrame(frame);
948}
949
950void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
951    ALOGV("signalBufferReturned: %p", buffer->data());
952    Mutex::Autolock autoLock(mLock);
953    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
954         it != mFramesBeingEncoded.end(); ++it) {
955        if ((*it)->pointer() ==  buffer->data()) {
956            releaseOneRecordingFrame((*it));
957            mFramesBeingEncoded.erase(it);
958            ++mNumFramesEncoded;
959            buffer->setObserver(0);
960            buffer->release();
961            mFrameCompleteCondition.signal();
962            return;
963        }
964    }
965    CHECK(!"signalBufferReturned: bogus buffer");
966}
967
968status_t CameraSource::read(
969        MediaBuffer **buffer, const ReadOptions *options) {
970    ALOGV("read");
971
972    *buffer = NULL;
973
974    int64_t seekTimeUs;
975    ReadOptions::SeekMode mode;
976    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
977        return ERROR_UNSUPPORTED;
978    }
979
980    sp<IMemory> frame;
981    int64_t frameTime;
982
983    {
984        Mutex::Autolock autoLock(mLock);
985        while (mStarted && mFramesReceived.empty()) {
986            if (NO_ERROR !=
987                mFrameAvailableCondition.waitRelative(mLock,
988                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
989                if (mCameraRecordingProxy != 0 &&
990                    !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
991                    ALOGW("camera recording proxy is gone");
992                    return ERROR_END_OF_STREAM;
993                }
994                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
995                    mLastFrameTimestampUs);
996            }
997        }
998        if (!mStarted) {
999            return OK;
1000        }
1001        frame = *mFramesReceived.begin();
1002        mFramesReceived.erase(mFramesReceived.begin());
1003
1004        frameTime = *mFrameTimes.begin();
1005        mFrameTimes.erase(mFrameTimes.begin());
1006        mFramesBeingEncoded.push_back(frame);
1007        *buffer = new MediaBuffer(frame->pointer(), frame->size());
1008        (*buffer)->setObserver(this);
1009        (*buffer)->add_ref();
1010        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
1011    }
1012    return OK;
1013}
1014
1015bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
1016    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
1017        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
1018        return true;
1019    }
1020
1021    // May need to skip frame or modify timestamp. Currently implemented
1022    // by the subclass CameraSourceTimeLapse.
1023    if (skipCurrentFrame(timestampUs)) {
1024        return true;
1025    }
1026
1027    if (mNumFramesReceived > 0) {
1028        if (timestampUs <= mLastFrameTimestampUs) {
1029            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
1030                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
1031            return true;
1032        }
1033        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
1034            ++mNumGlitches;
1035        }
1036    }
1037
1038    mLastFrameTimestampUs = timestampUs;
1039    if (mNumFramesReceived == 0) {
1040        mFirstFrameTimeUs = timestampUs;
1041        // Initial delay
1042        if (mStartTimeUs > 0) {
1043            if (timestampUs < mStartTimeUs) {
1044                // Frame was captured before recording was started
1045                // Drop it without updating the statistical data.
1046                return true;
1047            }
1048            mStartTimeUs = timestampUs - mStartTimeUs;
1049        }
1050    }
1051
1052    return false;
1053}
1054
1055void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
1056        int32_t msgType __unused, const sp<IMemory> &data) {
1057    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
1058    Mutex::Autolock autoLock(mLock);
1059
1060    if (shouldSkipFrameLocked(timestampUs)) {
1061        releaseOneRecordingFrame(data);
1062        return;
1063    }
1064
1065    ++mNumFramesReceived;
1066
1067    CHECK(data != NULL && data->size() > 0);
1068    mFramesReceived.push_back(data);
1069    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1070    mFrameTimes.push_back(timeUs);
1071    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1072        mStartTimeUs, timeUs);
1073    mFrameAvailableCondition.signal();
1074}
1075
1076CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1077        const sp<CameraSource>& cameraSource) {
1078    mConsumer = consumer;
1079    mConsumer->setFrameAvailableListener(this);
1080    mCameraSource = cameraSource;
1081}
1082
1083void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1084    ALOGV("%s: onFrameAvailable", __FUNCTION__);
1085
1086    Mutex::Autolock l(mLock);
1087
1088    if (!mFrameAvailable) {
1089        mFrameAvailable = true;
1090        mFrameAvailableSignal.signal();
1091    }
1092}
1093
1094bool CameraSource::BufferQueueListener::threadLoop() {
1095    if (mConsumer == nullptr || mCameraSource == nullptr) {
1096        return false;
1097    }
1098
1099    {
1100        Mutex::Autolock l(mLock);
1101        while (!mFrameAvailable) {
1102            if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1103                return true;
1104            }
1105        }
1106        mFrameAvailable = false;
1107    }
1108
1109    BufferItem buffer;
1110    while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1111        mCameraSource->processBufferQueueFrame(buffer);
1112    }
1113
1114    return true;
1115}
1116
1117void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1118    Mutex::Autolock autoLock(mLock);
1119
1120    int64_t timestampUs = buffer.mTimestamp / 1000;
1121    if (shouldSkipFrameLocked(timestampUs)) {
1122        mVideoBufferConsumer->releaseBuffer(buffer);
1123        return;
1124    }
1125
1126    while (mMemoryBases.empty()) {
1127        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1128                TIMED_OUT) {
1129            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1130            mVideoBufferConsumer->releaseBuffer(buffer);
1131            return;
1132        }
1133    }
1134
1135    ++mNumFramesReceived;
1136
1137    // Find a available memory slot to store the buffer as VideoNativeMetadata.
1138    sp<IMemory> data = *mMemoryBases.begin();
1139    mMemoryBases.erase(mMemoryBases.begin());
1140
1141    ssize_t offset;
1142    size_t size;
1143    sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1144    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1145        (uint8_t*)heap->getBase() + offset);
1146    memset(payload, 0, sizeof(VideoNativeMetadata));
1147    payload->eType = kMetadataBufferTypeANWBuffer;
1148    payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1149    payload->nFenceFd = -1;
1150
1151    // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1152    // when the encoder returns the native window buffer.
1153    mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1154
1155    mFramesReceived.push_back(data);
1156    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1157    mFrameTimes.push_back(timeUs);
1158    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1159        mStartTimeUs, timeUs);
1160    mFrameAvailableCondition.signal();
1161}
1162
1163bool CameraSource::isMetaDataStoredInVideoBuffers() const {
1164    ALOGV("isMetaDataStoredInVideoBuffers");
1165
1166    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
1167    // buffer queue.
1168    return (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA ||
1169            mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
1170}
1171
1172CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
1173    mSource = source;
1174}
1175
1176void CameraSource::ProxyListener::dataCallbackTimestamp(
1177        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
1178    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
1179}
1180
1181void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1182    ALOGI("Camera recording proxy died");
1183}
1184
1185}  // namespace android
1186