CameraSource.cpp revision 8cca0750a84c2d97224c0cfef7cf255308ee80b3
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <binder/MemoryBase.h>
26#include <binder/MemoryHeapBase.h>
27#include <media/hardware/HardwareAPI.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/CameraSource.h>
30#include <media/stagefright/MediaDefs.h>
31#include <media/stagefright/MediaErrors.h>
32#include <media/stagefright/MetaData.h>
33#include <camera/Camera.h>
34#include <camera/CameraParameters.h>
35#include <gui/Surface.h>
36#include <utils/String8.h>
37#include <cutils/properties.h>
38
39#if LOG_NDEBUG
40#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41#else
42#define UNUSED_UNLESS_VERBOSE(x)
43#endif
44
45namespace android {
46
47static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48
49struct CameraSourceListener : public CameraListener {
50    CameraSourceListener(const sp<CameraSource> &source);
51
52    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
53    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
54                          camera_frame_metadata_t *metadata);
55
56    virtual void postDataTimestamp(
57            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
58
59protected:
60    virtual ~CameraSourceListener();
61
62private:
63    wp<CameraSource> mSource;
64
65    CameraSourceListener(const CameraSourceListener &);
66    CameraSourceListener &operator=(const CameraSourceListener &);
67};
68
69CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
70    : mSource(source) {
71}
72
73CameraSourceListener::~CameraSourceListener() {
74}
75
76void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
77    UNUSED_UNLESS_VERBOSE(msgType);
78    UNUSED_UNLESS_VERBOSE(ext1);
79    UNUSED_UNLESS_VERBOSE(ext2);
80    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
81}
82
83void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
84                                    camera_frame_metadata_t * /* metadata */) {
85    ALOGV("postData(%d, ptr:%p, size:%zu)",
86         msgType, dataPtr->pointer(), dataPtr->size());
87
88    sp<CameraSource> source = mSource.promote();
89    if (source.get() != NULL) {
90        source->dataCallback(msgType, dataPtr);
91    }
92}
93
94void CameraSourceListener::postDataTimestamp(
95        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
96
97    sp<CameraSource> source = mSource.promote();
98    if (source.get() != NULL) {
99        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
100    }
101}
102
103static int32_t getColorFormat(const char* colorFormat) {
104    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
105       return OMX_COLOR_FormatYUV420Planar;
106    }
107
108    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
109       return OMX_COLOR_FormatYUV422SemiPlanar;
110    }
111
112    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
113        return OMX_COLOR_FormatYUV420SemiPlanar;
114    }
115
116    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
117        return OMX_COLOR_FormatYCbYCr;
118    }
119
120    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
121       return OMX_COLOR_Format16bitRGB565;
122    }
123
124    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
125       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
126    }
127
128    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
129        return OMX_COLOR_FormatAndroidOpaque;
130    }
131
132    ALOGE("Uknown color format (%s), please add it to "
133         "CameraSource::getColorFormat", colorFormat);
134
135    CHECK(!"Unknown color format");
136    return -1;
137}
138
139CameraSource *CameraSource::Create(const String16 &clientName) {
140    Size size;
141    size.width = -1;
142    size.height = -1;
143
144    sp<ICamera> camera;
145    return new CameraSource(camera, NULL, 0, clientName, -1,
146            size, -1, NULL, false);
147}
148
149// static
150CameraSource *CameraSource::CreateFromCamera(
151    const sp<ICamera>& camera,
152    const sp<ICameraRecordingProxy>& proxy,
153    int32_t cameraId,
154    const String16& clientName,
155    uid_t clientUid,
156    Size videoSize,
157    int32_t frameRate,
158    const sp<IGraphicBufferProducer>& surface,
159    bool storeMetaDataInVideoBuffers) {
160
161    CameraSource *source = new CameraSource(camera, proxy, cameraId,
162            clientName, clientUid, videoSize, frameRate, surface,
163            storeMetaDataInVideoBuffers);
164    return source;
165}
166
167CameraSource::CameraSource(
168    const sp<ICamera>& camera,
169    const sp<ICameraRecordingProxy>& proxy,
170    int32_t cameraId,
171    const String16& clientName,
172    uid_t clientUid,
173    Size videoSize,
174    int32_t frameRate,
175    const sp<IGraphicBufferProducer>& surface,
176    bool storeMetaDataInVideoBuffers)
177    : mCameraFlags(0),
178      mNumInputBuffers(0),
179      mVideoFrameRate(-1),
180      mCamera(0),
181      mSurface(surface),
182      mNumFramesReceived(0),
183      mLastFrameTimestampUs(0),
184      mStarted(false),
185      mNumFramesEncoded(0),
186      mTimeBetweenFrameCaptureUs(0),
187      mFirstFrameTimeUs(0),
188      mNumFramesDropped(0),
189      mNumGlitches(0),
190      mGlitchDurationThresholdUs(200000),
191      mCollectStats(false) {
192    mVideoSize.width  = -1;
193    mVideoSize.height = -1;
194
195    mInitCheck = init(camera, proxy, cameraId,
196                    clientName, clientUid,
197                    videoSize, frameRate,
198                    storeMetaDataInVideoBuffers);
199    if (mInitCheck != OK) releaseCamera();
200}
201
202status_t CameraSource::initCheck() const {
203    return mInitCheck;
204}
205
206status_t CameraSource::isCameraAvailable(
207    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
208    int32_t cameraId, const String16& clientName, uid_t clientUid) {
209
210    if (camera == 0) {
211        mCamera = Camera::connect(cameraId, clientName, clientUid);
212        if (mCamera == 0) return -EBUSY;
213        mCameraFlags &= ~FLAGS_HOT_CAMERA;
214    } else {
215        // We get the proxy from Camera, not ICamera. We need to get the proxy
216        // to the remote Camera owned by the application. Here mCamera is a
217        // local Camera object created by us. We cannot use the proxy from
218        // mCamera here.
219        mCamera = Camera::create(camera);
220        if (mCamera == 0) return -EBUSY;
221        mCameraRecordingProxy = proxy;
222        mCameraFlags |= FLAGS_HOT_CAMERA;
223        mDeathNotifier = new DeathNotifier();
224        // isBinderAlive needs linkToDeath to work.
225        IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
226    }
227
228    mCamera->lock();
229
230    return OK;
231}
232
233
234/*
235 * Check to see whether the requested video width and height is one
236 * of the supported sizes.
237 * @param width the video frame width in pixels
238 * @param height the video frame height in pixels
239 * @param suppportedSizes the vector of sizes that we check against
240 * @return true if the dimension (width and height) is supported.
241 */
242static bool isVideoSizeSupported(
243    int32_t width, int32_t height,
244    const Vector<Size>& supportedSizes) {
245
246    ALOGV("isVideoSizeSupported");
247    for (size_t i = 0; i < supportedSizes.size(); ++i) {
248        if (width  == supportedSizes[i].width &&
249            height == supportedSizes[i].height) {
250            return true;
251        }
252    }
253    return false;
254}
255
256/*
257 * If the preview and video output is separate, we only set the
258 * the video size, and applications should set the preview size
259 * to some proper value, and the recording framework will not
260 * change the preview size; otherwise, if the video and preview
261 * output is the same, we need to set the preview to be the same
262 * as the requested video size.
263 *
264 */
265/*
266 * Query the camera to retrieve the supported video frame sizes
267 * and also to see whether CameraParameters::setVideoSize()
268 * is supported or not.
269 * @param params CameraParameters to retrieve the information
270 * @@param isSetVideoSizeSupported retunrs whether method
271 *      CameraParameters::setVideoSize() is supported or not.
272 * @param sizes returns the vector of Size objects for the
273 *      supported video frame sizes advertised by the camera.
274 */
275static void getSupportedVideoSizes(
276    const CameraParameters& params,
277    bool *isSetVideoSizeSupported,
278    Vector<Size>& sizes) {
279
280    *isSetVideoSizeSupported = true;
281    params.getSupportedVideoSizes(sizes);
282    if (sizes.size() == 0) {
283        ALOGD("Camera does not support setVideoSize()");
284        params.getSupportedPreviewSizes(sizes);
285        *isSetVideoSizeSupported = false;
286    }
287}
288
289/*
290 * Check whether the camera has the supported color format
291 * @param params CameraParameters to retrieve the information
292 * @return OK if no error.
293 */
294status_t CameraSource::isCameraColorFormatSupported(
295        const CameraParameters& params) {
296    mColorFormat = getColorFormat(params.get(
297            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
298    if (mColorFormat == -1) {
299        return BAD_VALUE;
300    }
301    return OK;
302}
303
304/*
305 * Configure the camera to use the requested video size
306 * (width and height) and/or frame rate. If both width and
307 * height are -1, configuration on the video size is skipped.
308 * if frameRate is -1, configuration on the frame rate
309 * is skipped. Skipping the configuration allows one to
310 * use the current camera setting without the need to
311 * actually know the specific values (see Create() method).
312 *
313 * @param params the CameraParameters to be configured
314 * @param width the target video frame width in pixels
315 * @param height the target video frame height in pixels
316 * @param frameRate the target frame rate in frames per second.
317 * @return OK if no error.
318 */
319status_t CameraSource::configureCamera(
320        CameraParameters* params,
321        int32_t width, int32_t height,
322        int32_t frameRate) {
323    ALOGV("configureCamera");
324    Vector<Size> sizes;
325    bool isSetVideoSizeSupportedByCamera = true;
326    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
327    bool isCameraParamChanged = false;
328    if (width != -1 && height != -1) {
329        if (!isVideoSizeSupported(width, height, sizes)) {
330            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
331            return BAD_VALUE;
332        }
333        if (isSetVideoSizeSupportedByCamera) {
334            params->setVideoSize(width, height);
335        } else {
336            params->setPreviewSize(width, height);
337        }
338        isCameraParamChanged = true;
339    } else if ((width == -1 && height != -1) ||
340               (width != -1 && height == -1)) {
341        // If one and only one of the width and height is -1
342        // we reject such a request.
343        ALOGE("Requested video size (%dx%d) is not supported", width, height);
344        return BAD_VALUE;
345    } else {  // width == -1 && height == -1
346        // Do not configure the camera.
347        // Use the current width and height value setting from the camera.
348    }
349
350    if (frameRate != -1) {
351        CHECK(frameRate > 0 && frameRate <= 120);
352        const char* supportedFrameRates =
353                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
354        CHECK(supportedFrameRates != NULL);
355        ALOGV("Supported frame rates: %s", supportedFrameRates);
356        char buf[4];
357        snprintf(buf, 4, "%d", frameRate);
358        if (strstr(supportedFrameRates, buf) == NULL) {
359            ALOGE("Requested frame rate (%d) is not supported: %s",
360                frameRate, supportedFrameRates);
361            return BAD_VALUE;
362        }
363
364        // The frame rate is supported, set the camera to the requested value.
365        params->setPreviewFrameRate(frameRate);
366        isCameraParamChanged = true;
367    } else {  // frameRate == -1
368        // Do not configure the camera.
369        // Use the current frame rate value setting from the camera
370    }
371
372    if (isCameraParamChanged) {
373        // Either frame rate or frame size needs to be changed.
374        String8 s = params->flatten();
375        if (OK != mCamera->setParameters(s)) {
376            ALOGE("Could not change settings."
377                 " Someone else is using camera %p?", mCamera.get());
378            return -EBUSY;
379        }
380    }
381    return OK;
382}
383
384/*
385 * Check whether the requested video frame size
386 * has been successfully configured or not. If both width and height
387 * are -1, check on the current width and height value setting
388 * is performed.
389 *
390 * @param params CameraParameters to retrieve the information
391 * @param the target video frame width in pixels to check against
392 * @param the target video frame height in pixels to check against
393 * @return OK if no error
394 */
395status_t CameraSource::checkVideoSize(
396        const CameraParameters& params,
397        int32_t width, int32_t height) {
398
399    ALOGV("checkVideoSize");
400    // The actual video size is the same as the preview size
401    // if the camera hal does not support separate video and
402    // preview output. In this case, we retrieve the video
403    // size from preview.
404    int32_t frameWidthActual = -1;
405    int32_t frameHeightActual = -1;
406    Vector<Size> sizes;
407    params.getSupportedVideoSizes(sizes);
408    if (sizes.size() == 0) {
409        // video size is the same as preview size
410        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
411    } else {
412        // video size may not be the same as preview
413        params.getVideoSize(&frameWidthActual, &frameHeightActual);
414    }
415    if (frameWidthActual < 0 || frameHeightActual < 0) {
416        ALOGE("Failed to retrieve video frame size (%dx%d)",
417                frameWidthActual, frameHeightActual);
418        return UNKNOWN_ERROR;
419    }
420
421    // Check the actual video frame size against the target/requested
422    // video frame size.
423    if (width != -1 && height != -1) {
424        if (frameWidthActual != width || frameHeightActual != height) {
425            ALOGE("Failed to set video frame size to %dx%d. "
426                    "The actual video size is %dx%d ", width, height,
427                    frameWidthActual, frameHeightActual);
428            return UNKNOWN_ERROR;
429        }
430    }
431
432    // Good now.
433    mVideoSize.width = frameWidthActual;
434    mVideoSize.height = frameHeightActual;
435    return OK;
436}
437
438/*
439 * Check the requested frame rate has been successfully configured or not.
440 * If the target frameRate is -1, check on the current frame rate value
441 * setting is performed.
442 *
443 * @param params CameraParameters to retrieve the information
444 * @param the target video frame rate to check against
445 * @return OK if no error.
446 */
447status_t CameraSource::checkFrameRate(
448        const CameraParameters& params,
449        int32_t frameRate) {
450
451    ALOGV("checkFrameRate");
452    int32_t frameRateActual = params.getPreviewFrameRate();
453    if (frameRateActual < 0) {
454        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
455        return UNKNOWN_ERROR;
456    }
457
458    // Check the actual video frame rate against the target/requested
459    // video frame rate.
460    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
461        ALOGE("Failed to set preview frame rate to %d fps. The actual "
462                "frame rate is %d", frameRate, frameRateActual);
463        return UNKNOWN_ERROR;
464    }
465
466    // Good now.
467    mVideoFrameRate = frameRateActual;
468    return OK;
469}
470
471/*
472 * Initialize the CameraSource to so that it becomes
473 * ready for providing the video input streams as requested.
474 * @param camera the camera object used for the video source
475 * @param cameraId if camera == 0, use camera with this id
476 *      as the video source
477 * @param videoSize the target video frame size. If both
478 *      width and height in videoSize is -1, use the current
479 *      width and heigth settings by the camera
480 * @param frameRate the target frame rate in frames per second.
481 *      if it is -1, use the current camera frame rate setting.
482 * @param storeMetaDataInVideoBuffers request to store meta
483 *      data or real YUV data in video buffers. Request to
484 *      store meta data in video buffers may not be honored
485 *      if the source does not support this feature.
486 *
487 * @return OK if no error.
488 */
489status_t CameraSource::init(
490        const sp<ICamera>& camera,
491        const sp<ICameraRecordingProxy>& proxy,
492        int32_t cameraId,
493        const String16& clientName,
494        uid_t clientUid,
495        Size videoSize,
496        int32_t frameRate,
497        bool storeMetaDataInVideoBuffers) {
498
499    ALOGV("init");
500    status_t err = OK;
501    int64_t token = IPCThreadState::self()->clearCallingIdentity();
502    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
503                               videoSize, frameRate,
504                               storeMetaDataInVideoBuffers);
505    IPCThreadState::self()->restoreCallingIdentity(token);
506    return err;
507}
508
509status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
510        uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
511    ALOGV("initBufferQueue");
512
513    if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
514        ALOGE("%s: Buffer queue already exists", __FUNCTION__);
515        return ALREADY_EXISTS;
516    }
517
518    // Create a buffer queue.
519    sp<IGraphicBufferProducer> producer;
520    sp<IGraphicBufferConsumer> consumer;
521    BufferQueue::createBufferQueue(&producer, &consumer);
522
523    uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
524    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
525        usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
526    }
527
528    bufferCount += kConsumerBufferCount;
529
530    mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
531    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
532    mVideoBufferProducer = producer;
533
534    status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
535    if (res != OK) {
536        ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
537                strerror(-res), res);
538        return res;
539    }
540
541    res = mVideoBufferConsumer->setDefaultBufferFormat(format);
542    if (res != OK) {
543        ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
544                strerror(-res), res);
545        return res;
546    }
547
548    res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
549    if (res != OK) {
550        ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
551                strerror(-res), res);
552        return res;
553    }
554
555    res = mCamera->setVideoTarget(mVideoBufferProducer);
556    if (res != OK) {
557        ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
558        return res;
559    }
560
561    // Create memory heap to store buffers as VideoNativeMetadata.
562    size_t bufferSize = sizeof(VideoNativeMetadata);
563    mMemoryHeapBase = new MemoryHeapBase(bufferSize * bufferCount, 0,
564            "StageFright-CameraSource-BufferHeap");
565    for (uint32_t i = 0; i < bufferCount; i++) {
566        mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * bufferSize, bufferSize));
567    }
568
569    mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
570    res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
571    if (res != OK) {
572        ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
573                strerror(-res), res);
574        return res;
575    }
576
577    return OK;
578}
579
580status_t CameraSource::initWithCameraAccess(
581        const sp<ICamera>& camera,
582        const sp<ICameraRecordingProxy>& proxy,
583        int32_t cameraId,
584        const String16& clientName,
585        uid_t clientUid,
586        Size videoSize,
587        int32_t frameRate,
588        bool storeMetaDataInVideoBuffers) {
589    ALOGV("initWithCameraAccess");
590    status_t err = OK;
591
592    if ((err = isCameraAvailable(camera, proxy, cameraId,
593            clientName, clientUid)) != OK) {
594        ALOGE("Camera connection could not be established.");
595        return err;
596    }
597    CameraParameters params(mCamera->getParameters());
598    if ((err = isCameraColorFormatSupported(params)) != OK) {
599        return err;
600    }
601
602    // Set the camera to use the requested video frame size
603    // and/or frame rate.
604    if ((err = configureCamera(&params,
605                    videoSize.width, videoSize.height,
606                    frameRate))) {
607        return err;
608    }
609
610    // Check on video frame size and frame rate.
611    CameraParameters newCameraParams(mCamera->getParameters());
612    if ((err = checkVideoSize(newCameraParams,
613                videoSize.width, videoSize.height)) != OK) {
614        return err;
615    }
616    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
617        return err;
618    }
619
620    // Set the preview display. Skip this if mSurface is null because
621    // applications may already set a surface to the camera.
622    if (mSurface != NULL) {
623        // This CHECK is good, since we just passed the lock/unlock
624        // check earlier by calling mCamera->setParameters().
625        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
626    }
627
628    // By default, store real data in video buffers.
629    mVideoBufferMode = ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
630    if (storeMetaDataInVideoBuffers) {
631        if (OK == mCamera->setVideoBufferMode(ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
632            mVideoBufferMode = ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
633        } else if (OK == mCamera->setVideoBufferMode(
634                ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
635            mVideoBufferMode = ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
636        }
637    }
638
639    if (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
640        err = mCamera->setVideoBufferMode(ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
641        if (err != OK) {
642            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
643                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
644            return err;
645        }
646    }
647
648    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
649    if (glitchDurationUs > mGlitchDurationThresholdUs) {
650        mGlitchDurationThresholdUs = glitchDurationUs;
651    }
652
653    // XXX: query camera for the stride and slice height
654    // when the capability becomes available.
655    mMeta = new MetaData;
656    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
657    mMeta->setInt32(kKeyColorFormat, mColorFormat);
658    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
659    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
660    mMeta->setInt32(kKeyStride,      mVideoSize.width);
661    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
662    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
663    return OK;
664}
665
666CameraSource::~CameraSource() {
667    if (mStarted) {
668        reset();
669    } else if (mInitCheck == OK) {
670        // Camera is initialized but because start() is never called,
671        // the lock on Camera is never released(). This makes sure
672        // Camera's lock is released in this case.
673        releaseCamera();
674    }
675}
676
677status_t CameraSource::startCameraRecording() {
678    ALOGV("startCameraRecording");
679    // Reset the identity to the current thread because media server owns the
680    // camera and recording is started by the applications. The applications
681    // will connect to the camera in ICameraRecordingProxy::startRecording.
682    int64_t token = IPCThreadState::self()->clearCallingIdentity();
683    status_t err;
684
685    if (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
686        // Initialize buffer queue.
687        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
688                (android_dataspace_t)mEncoderDataSpace,
689                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
690        if (err != OK) {
691            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
692                    strerror(-err), err);
693            return err;
694        }
695    } else {
696        if (mNumInputBuffers > 0) {
697            err = mCamera->sendCommand(
698                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
699
700            // This could happen for CameraHAL1 clients; thus the failure is
701            // not a fatal error
702            if (err != OK) {
703                ALOGW("Failed to set video buffer count to %d due to %d",
704                    mNumInputBuffers, err);
705            }
706        }
707
708        err = mCamera->sendCommand(
709            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
710
711        // This could happen for CameraHAL1 clients; thus the failure is
712        // not a fatal error
713        if (err != OK) {
714            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
715                    mEncoderFormat, mEncoderDataSpace, err);
716        }
717    }
718
719    err = OK;
720    if (mCameraFlags & FLAGS_HOT_CAMERA) {
721        mCamera->unlock();
722        mCamera.clear();
723        if ((err = mCameraRecordingProxy->startRecording(
724                new ProxyListener(this))) != OK) {
725            ALOGE("Failed to start recording, received error: %s (%d)",
726                    strerror(-err), err);
727        }
728    } else {
729        mCamera->setListener(new CameraSourceListener(this));
730        mCamera->startRecording();
731        if (!mCamera->recordingEnabled()) {
732            err = -EINVAL;
733            ALOGE("Failed to start recording");
734        }
735    }
736    IPCThreadState::self()->restoreCallingIdentity(token);
737    return err;
738}
739
740status_t CameraSource::start(MetaData *meta) {
741    ALOGV("start");
742    CHECK(!mStarted);
743    if (mInitCheck != OK) {
744        ALOGE("CameraSource is not initialized yet");
745        return mInitCheck;
746    }
747
748    char value[PROPERTY_VALUE_MAX];
749    if (property_get("media.stagefright.record-stats", value, NULL)
750        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
751        mCollectStats = true;
752    }
753
754    mStartTimeUs = 0;
755    mNumInputBuffers = 0;
756    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
757    mEncoderDataSpace = HAL_DATASPACE_BT709;
758
759    if (meta) {
760        int64_t startTimeUs;
761        if (meta->findInt64(kKeyTime, &startTimeUs)) {
762            mStartTimeUs = startTimeUs;
763        }
764
765        int32_t nBuffers;
766        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
767            CHECK_GT(nBuffers, 0);
768            mNumInputBuffers = nBuffers;
769        }
770
771        // apply encoder color format if specified
772        if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
773            ALOGV("Using encoder format: %#x", mEncoderFormat);
774        }
775        if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
776            ALOGV("Using encoder data space: %#x", mEncoderDataSpace);
777        }
778    }
779
780    status_t err;
781    if ((err = startCameraRecording()) == OK) {
782        mStarted = true;
783    }
784
785    return err;
786}
787
788void CameraSource::stopCameraRecording() {
789    ALOGV("stopCameraRecording");
790    if (mCameraFlags & FLAGS_HOT_CAMERA) {
791        mCameraRecordingProxy->stopRecording();
792    } else {
793        mCamera->setListener(NULL);
794        mCamera->stopRecording();
795    }
796}
797
798void CameraSource::releaseCamera() {
799    ALOGV("releaseCamera");
800    sp<Camera> camera;
801    bool coldCamera = false;
802    {
803        Mutex::Autolock autoLock(mLock);
804        // get a local ref and clear ref to mCamera now
805        camera = mCamera;
806        mCamera.clear();
807        coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
808    }
809
810    if (camera != 0) {
811        int64_t token = IPCThreadState::self()->clearCallingIdentity();
812        if (coldCamera) {
813            ALOGV("Camera was cold when we started, stopping preview");
814            camera->stopPreview();
815            camera->disconnect();
816        }
817        camera->unlock();
818        IPCThreadState::self()->restoreCallingIdentity(token);
819    }
820
821    {
822        Mutex::Autolock autoLock(mLock);
823        if (mCameraRecordingProxy != 0) {
824            IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
825            mCameraRecordingProxy.clear();
826        }
827        mCameraFlags = 0;
828    }
829}
830
831status_t CameraSource::reset() {
832    ALOGD("reset: E");
833
834    {
835        Mutex::Autolock autoLock(mLock);
836        mStarted = false;
837        mFrameAvailableCondition.signal();
838
839        int64_t token;
840        bool isTokenValid = false;
841        if (mCamera != 0) {
842            token = IPCThreadState::self()->clearCallingIdentity();
843            isTokenValid = true;
844        }
845        releaseQueuedFrames();
846        while (!mFramesBeingEncoded.empty()) {
847            if (NO_ERROR !=
848                mFrameCompleteCondition.waitRelative(mLock,
849                        mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
850                ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
851                    mFramesBeingEncoded.size());
852            }
853        }
854        stopCameraRecording();
855        if (isTokenValid) {
856            IPCThreadState::self()->restoreCallingIdentity(token);
857        }
858
859        if (mCollectStats) {
860            ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
861                    mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
862                    mLastFrameTimestampUs - mFirstFrameTimeUs);
863        }
864
865        if (mNumGlitches > 0) {
866            ALOGW("%d long delays between neighboring video frames", mNumGlitches);
867        }
868
869        CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
870    }
871
872    if (mBufferQueueListener != nullptr) {
873        mBufferQueueListener->requestExit();
874        mBufferQueueListener->join();
875        mBufferQueueListener.clear();
876    }
877
878    mVideoBufferConsumer.clear();
879    mVideoBufferProducer.clear();
880    releaseCamera();
881
882    ALOGD("reset: X");
883    return OK;
884}
885
886void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
887    ALOGV("releaseRecordingFrame");
888
889    if (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
890        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
891        ssize_t offset;
892        size_t size;
893        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
894        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
895            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
896                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
897            return;
898        }
899
900        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
901                (uint8_t*)heap->getBase() + offset);
902
903        // Find the corresponding buffer item for the native window buffer.
904        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
905        if (index == NAME_NOT_FOUND) {
906            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
907            return;
908        }
909
910        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
911        mReceivedBufferItemMap.removeItemsAt(index);
912        mVideoBufferConsumer->releaseBuffer(buffer);
913        mMemoryBases.push_back(frame);
914    } else if (mCameraRecordingProxy != NULL) {
915        mCameraRecordingProxy->releaseRecordingFrame(frame);
916    } else if (mCamera != NULL) {
917        int64_t token = IPCThreadState::self()->clearCallingIdentity();
918        mCamera->releaseRecordingFrame(frame);
919        IPCThreadState::self()->restoreCallingIdentity(token);
920    }
921}
922
923void CameraSource::releaseQueuedFrames() {
924    List<sp<IMemory> >::iterator it;
925    while (!mFramesReceived.empty()) {
926        it = mFramesReceived.begin();
927        releaseRecordingFrame(*it);
928        mFramesReceived.erase(it);
929        ++mNumFramesDropped;
930    }
931}
932
933sp<MetaData> CameraSource::getFormat() {
934    return mMeta;
935}
936
937void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
938    releaseRecordingFrame(frame);
939}
940
941void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
942    ALOGV("signalBufferReturned: %p", buffer->data());
943    Mutex::Autolock autoLock(mLock);
944    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
945         it != mFramesBeingEncoded.end(); ++it) {
946        if ((*it)->pointer() ==  buffer->data()) {
947            releaseOneRecordingFrame((*it));
948            mFramesBeingEncoded.erase(it);
949            ++mNumFramesEncoded;
950            buffer->setObserver(0);
951            buffer->release();
952            mFrameCompleteCondition.signal();
953            return;
954        }
955    }
956    CHECK(!"signalBufferReturned: bogus buffer");
957}
958
959status_t CameraSource::read(
960        MediaBuffer **buffer, const ReadOptions *options) {
961    ALOGV("read");
962
963    *buffer = NULL;
964
965    int64_t seekTimeUs;
966    ReadOptions::SeekMode mode;
967    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
968        return ERROR_UNSUPPORTED;
969    }
970
971    sp<IMemory> frame;
972    int64_t frameTime;
973
974    {
975        Mutex::Autolock autoLock(mLock);
976        while (mStarted && mFramesReceived.empty()) {
977            if (NO_ERROR !=
978                mFrameAvailableCondition.waitRelative(mLock,
979                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
980                if (mCameraRecordingProxy != 0 &&
981                    !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
982                    ALOGW("camera recording proxy is gone");
983                    return ERROR_END_OF_STREAM;
984                }
985                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
986                    mLastFrameTimestampUs);
987            }
988        }
989        if (!mStarted) {
990            return OK;
991        }
992        frame = *mFramesReceived.begin();
993        mFramesReceived.erase(mFramesReceived.begin());
994
995        frameTime = *mFrameTimes.begin();
996        mFrameTimes.erase(mFrameTimes.begin());
997        mFramesBeingEncoded.push_back(frame);
998        *buffer = new MediaBuffer(frame->pointer(), frame->size());
999        (*buffer)->setObserver(this);
1000        (*buffer)->add_ref();
1001        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
1002    }
1003    return OK;
1004}
1005
1006bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
1007    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
1008        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
1009        return true;
1010    }
1011
1012    // May need to skip frame or modify timestamp. Currently implemented
1013    // by the subclass CameraSourceTimeLapse.
1014    if (skipCurrentFrame(timestampUs)) {
1015        return true;
1016    }
1017
1018    if (mNumFramesReceived > 0) {
1019        if (timestampUs <= mLastFrameTimestampUs) {
1020            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
1021                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
1022            return true;
1023        }
1024        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
1025            ++mNumGlitches;
1026        }
1027    }
1028
1029    mLastFrameTimestampUs = timestampUs;
1030    if (mNumFramesReceived == 0) {
1031        mFirstFrameTimeUs = timestampUs;
1032        // Initial delay
1033        if (mStartTimeUs > 0) {
1034            if (timestampUs < mStartTimeUs) {
1035                // Frame was captured before recording was started
1036                // Drop it without updating the statistical data.
1037                return true;
1038            }
1039            mStartTimeUs = timestampUs - mStartTimeUs;
1040        }
1041    }
1042
1043    return false;
1044}
1045
1046void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
1047        int32_t msgType __unused, const sp<IMemory> &data) {
1048    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
1049    Mutex::Autolock autoLock(mLock);
1050
1051    if (shouldSkipFrameLocked(timestampUs)) {
1052        releaseOneRecordingFrame(data);
1053        return;
1054    }
1055
1056    ++mNumFramesReceived;
1057
1058    CHECK(data != NULL && data->size() > 0);
1059    mFramesReceived.push_back(data);
1060    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1061    mFrameTimes.push_back(timeUs);
1062    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1063        mStartTimeUs, timeUs);
1064    mFrameAvailableCondition.signal();
1065}
1066
1067CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1068        const sp<CameraSource>& cameraSource) {
1069    mConsumer = consumer;
1070    mConsumer->setFrameAvailableListener(this);
1071    mCameraSource = cameraSource;
1072}
1073
1074void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1075    ALOGV("%s: onFrameAvailable", __FUNCTION__);
1076
1077    Mutex::Autolock l(mLock);
1078
1079    if (!mFrameAvailable) {
1080        mFrameAvailable = true;
1081        mFrameAvailableSignal.signal();
1082    }
1083}
1084
1085bool CameraSource::BufferQueueListener::threadLoop() {
1086    if (mConsumer == nullptr || mCameraSource == nullptr) {
1087        return false;
1088    }
1089
1090    {
1091        Mutex::Autolock l(mLock);
1092        while (!mFrameAvailable) {
1093            if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1094                return true;
1095            }
1096        }
1097        mFrameAvailable = false;
1098    }
1099
1100    BufferItem buffer;
1101    while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1102        mCameraSource->processBufferQueueFrame(buffer);
1103    }
1104
1105    return true;
1106}
1107
1108void CameraSource::processBufferQueueFrame(const BufferItem& buffer) {
1109    Mutex::Autolock autoLock(mLock);
1110
1111    int64_t timestampUs = buffer.mTimestamp / 1000;
1112    if (shouldSkipFrameLocked(timestampUs)) {
1113        mVideoBufferConsumer->releaseBuffer(buffer);
1114        return;
1115    }
1116
1117    if (mMemoryBases.empty()) {
1118        ALOGW("%s: No available memory base. Dropping a recording frame.", __FUNCTION__);
1119        mVideoBufferConsumer->releaseBuffer(buffer);
1120        return;
1121    }
1122
1123    ++mNumFramesReceived;
1124
1125    // Find a available memory slot to store the buffer as VideoNativeMetadata.
1126    sp<IMemory> data = *mMemoryBases.begin();
1127    mMemoryBases.erase(mMemoryBases.begin());
1128
1129    ssize_t offset;
1130    size_t size;
1131    sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1132    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1133        (uint8_t*)heap->getBase() + offset);
1134    memset(payload, 0, sizeof(VideoNativeMetadata));
1135    payload->eType = kMetadataBufferTypeANWBuffer;
1136    payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1137    payload->nFenceFd = -1;
1138
1139    // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1140    // when the encoder returns the native window buffer.
1141    mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1142
1143    mFramesReceived.push_back(data);
1144    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1145    mFrameTimes.push_back(timeUs);
1146    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1147        mStartTimeUs, timeUs);
1148    mFrameAvailableCondition.signal();
1149}
1150
1151bool CameraSource::isMetaDataStoredInVideoBuffers() const {
1152    ALOGV("isMetaDataStoredInVideoBuffers");
1153
1154    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
1155    // buffer queue.
1156    return (mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA ||
1157            mVideoBufferMode == ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
1158}
1159
1160CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
1161    mSource = source;
1162}
1163
1164void CameraSource::ProxyListener::dataCallbackTimestamp(
1165        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
1166    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
1167}
1168
1169void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1170    ALOGI("Camera recording proxy died");
1171}
1172
1173}  // namespace android
1174