1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <binder/MemoryBase.h>
26#include <binder/MemoryHeapBase.h>
27#include <media/hardware/HardwareAPI.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/CameraSource.h>
30#include <media/stagefright/MediaDefs.h>
31#include <media/stagefright/MediaErrors.h>
32#include <media/stagefright/MetaData.h>
33#include <camera/Camera.h>
34#include <camera/CameraParameters.h>
35#include <gui/Surface.h>
36#include <utils/String8.h>
37#include <cutils/properties.h>
38
39#if LOG_NDEBUG
40#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41#else
42#define UNUSED_UNLESS_VERBOSE(x)
43#endif
44
45namespace android {
46
47static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48
49struct CameraSourceListener : public CameraListener {
50    CameraSourceListener(const sp<CameraSource> &source);
51
52    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
53    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
54                          camera_frame_metadata_t *metadata);
55
56    virtual void postDataTimestamp(
57            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
58
59    virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
60
61protected:
62    virtual ~CameraSourceListener();
63
64private:
65    wp<CameraSource> mSource;
66
67    CameraSourceListener(const CameraSourceListener &);
68    CameraSourceListener &operator=(const CameraSourceListener &);
69};
70
71CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
72    : mSource(source) {
73}
74
75CameraSourceListener::~CameraSourceListener() {
76}
77
78void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
79    UNUSED_UNLESS_VERBOSE(msgType);
80    UNUSED_UNLESS_VERBOSE(ext1);
81    UNUSED_UNLESS_VERBOSE(ext2);
82    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
83}
84
85void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
86                                    camera_frame_metadata_t * /* metadata */) {
87    ALOGV("postData(%d, ptr:%p, size:%zu)",
88         msgType, dataPtr->pointer(), dataPtr->size());
89
90    sp<CameraSource> source = mSource.promote();
91    if (source.get() != NULL) {
92        source->dataCallback(msgType, dataPtr);
93    }
94}
95
96void CameraSourceListener::postDataTimestamp(
97        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
98
99    sp<CameraSource> source = mSource.promote();
100    if (source.get() != NULL) {
101        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
102    }
103}
104
105void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
106        native_handle_t* handle) {
107    sp<CameraSource> source = mSource.promote();
108    if (source.get() != nullptr) {
109        source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
110    }
111}
112
113static int32_t getColorFormat(const char* colorFormat) {
114    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
115       return OMX_COLOR_FormatYUV420Planar;
116    }
117
118    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
119       return OMX_COLOR_FormatYUV422SemiPlanar;
120    }
121
122    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
123        return OMX_COLOR_FormatYUV420SemiPlanar;
124    }
125
126    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
127        return OMX_COLOR_FormatYCbYCr;
128    }
129
130    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
131       return OMX_COLOR_Format16bitRGB565;
132    }
133
134    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
135       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
136    }
137
138    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
139        return OMX_COLOR_FormatAndroidOpaque;
140    }
141
142    ALOGE("Uknown color format (%s), please add it to "
143         "CameraSource::getColorFormat", colorFormat);
144
145    CHECK(!"Unknown color format");
146    return -1;
147}
148
149CameraSource *CameraSource::Create(const String16 &clientName) {
150    Size size;
151    size.width = -1;
152    size.height = -1;
153
154    sp<hardware::ICamera> camera;
155    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
156            Camera::USE_CALLING_PID, size, -1, NULL, false);
157}
158
159// static
160CameraSource *CameraSource::CreateFromCamera(
161    const sp<hardware::ICamera>& camera,
162    const sp<ICameraRecordingProxy>& proxy,
163    int32_t cameraId,
164    const String16& clientName,
165    uid_t clientUid,
166    pid_t clientPid,
167    Size videoSize,
168    int32_t frameRate,
169    const sp<IGraphicBufferProducer>& surface,
170    bool storeMetaDataInVideoBuffers) {
171
172    CameraSource *source = new CameraSource(camera, proxy, cameraId,
173            clientName, clientUid, clientPid, videoSize, frameRate, surface,
174            storeMetaDataInVideoBuffers);
175    return source;
176}
177
178CameraSource::CameraSource(
179    const sp<hardware::ICamera>& camera,
180    const sp<ICameraRecordingProxy>& proxy,
181    int32_t cameraId,
182    const String16& clientName,
183    uid_t clientUid,
184    pid_t clientPid,
185    Size videoSize,
186    int32_t frameRate,
187    const sp<IGraphicBufferProducer>& surface,
188    bool storeMetaDataInVideoBuffers)
189    : mCameraFlags(0),
190      mNumInputBuffers(0),
191      mVideoFrameRate(-1),
192      mCamera(0),
193      mSurface(surface),
194      mNumFramesReceived(0),
195      mLastFrameTimestampUs(0),
196      mStarted(false),
197      mNumFramesEncoded(0),
198      mTimeBetweenFrameCaptureUs(0),
199      mFirstFrameTimeUs(0),
200      mNumFramesDropped(0),
201      mNumGlitches(0),
202      mGlitchDurationThresholdUs(200000),
203      mCollectStats(false) {
204    mVideoSize.width  = -1;
205    mVideoSize.height = -1;
206
207    mInitCheck = init(camera, proxy, cameraId,
208                    clientName, clientUid, clientPid,
209                    videoSize, frameRate,
210                    storeMetaDataInVideoBuffers);
211    if (mInitCheck != OK) releaseCamera();
212}
213
214status_t CameraSource::initCheck() const {
215    return mInitCheck;
216}
217
218status_t CameraSource::isCameraAvailable(
219    const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
220    int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
221
222    if (camera == 0) {
223        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid);
224        if (mCamera == 0) return -EBUSY;
225        mCameraFlags &= ~FLAGS_HOT_CAMERA;
226    } else {
227        // We get the proxy from Camera, not ICamera. We need to get the proxy
228        // to the remote Camera owned by the application. Here mCamera is a
229        // local Camera object created by us. We cannot use the proxy from
230        // mCamera here.
231        mCamera = Camera::create(camera);
232        if (mCamera == 0) return -EBUSY;
233        mCameraRecordingProxy = proxy;
234        mCameraFlags |= FLAGS_HOT_CAMERA;
235        mDeathNotifier = new DeathNotifier();
236        // isBinderAlive needs linkToDeath to work.
237        IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
238    }
239
240    mCamera->lock();
241
242    return OK;
243}
244
245
246/*
247 * Check to see whether the requested video width and height is one
248 * of the supported sizes.
249 * @param width the video frame width in pixels
250 * @param height the video frame height in pixels
251 * @param suppportedSizes the vector of sizes that we check against
252 * @return true if the dimension (width and height) is supported.
253 */
254static bool isVideoSizeSupported(
255    int32_t width, int32_t height,
256    const Vector<Size>& supportedSizes) {
257
258    ALOGV("isVideoSizeSupported");
259    for (size_t i = 0; i < supportedSizes.size(); ++i) {
260        if (width  == supportedSizes[i].width &&
261            height == supportedSizes[i].height) {
262            return true;
263        }
264    }
265    return false;
266}
267
268/*
269 * If the preview and video output is separate, we only set the
270 * the video size, and applications should set the preview size
271 * to some proper value, and the recording framework will not
272 * change the preview size; otherwise, if the video and preview
273 * output is the same, we need to set the preview to be the same
274 * as the requested video size.
275 *
276 */
277/*
278 * Query the camera to retrieve the supported video frame sizes
279 * and also to see whether CameraParameters::setVideoSize()
280 * is supported or not.
281 * @param params CameraParameters to retrieve the information
282 * @@param isSetVideoSizeSupported retunrs whether method
283 *      CameraParameters::setVideoSize() is supported or not.
284 * @param sizes returns the vector of Size objects for the
285 *      supported video frame sizes advertised by the camera.
286 */
287static void getSupportedVideoSizes(
288    const CameraParameters& params,
289    bool *isSetVideoSizeSupported,
290    Vector<Size>& sizes) {
291
292    *isSetVideoSizeSupported = true;
293    params.getSupportedVideoSizes(sizes);
294    if (sizes.size() == 0) {
295        ALOGD("Camera does not support setVideoSize()");
296        params.getSupportedPreviewSizes(sizes);
297        *isSetVideoSizeSupported = false;
298    }
299}
300
301/*
302 * Check whether the camera has the supported color format
303 * @param params CameraParameters to retrieve the information
304 * @return OK if no error.
305 */
306status_t CameraSource::isCameraColorFormatSupported(
307        const CameraParameters& params) {
308    mColorFormat = getColorFormat(params.get(
309            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
310    if (mColorFormat == -1) {
311        return BAD_VALUE;
312    }
313    return OK;
314}
315
316/*
317 * Configure the camera to use the requested video size
318 * (width and height) and/or frame rate. If both width and
319 * height are -1, configuration on the video size is skipped.
320 * if frameRate is -1, configuration on the frame rate
321 * is skipped. Skipping the configuration allows one to
322 * use the current camera setting without the need to
323 * actually know the specific values (see Create() method).
324 *
325 * @param params the CameraParameters to be configured
326 * @param width the target video frame width in pixels
327 * @param height the target video frame height in pixels
328 * @param frameRate the target frame rate in frames per second.
329 * @return OK if no error.
330 */
331status_t CameraSource::configureCamera(
332        CameraParameters* params,
333        int32_t width, int32_t height,
334        int32_t frameRate) {
335    ALOGV("configureCamera");
336    Vector<Size> sizes;
337    bool isSetVideoSizeSupportedByCamera = true;
338    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
339    bool isCameraParamChanged = false;
340    if (width != -1 && height != -1) {
341        if (!isVideoSizeSupported(width, height, sizes)) {
342            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
343            return BAD_VALUE;
344        }
345        if (isSetVideoSizeSupportedByCamera) {
346            params->setVideoSize(width, height);
347        } else {
348            params->setPreviewSize(width, height);
349        }
350        isCameraParamChanged = true;
351    } else if ((width == -1 && height != -1) ||
352               (width != -1 && height == -1)) {
353        // If one and only one of the width and height is -1
354        // we reject such a request.
355        ALOGE("Requested video size (%dx%d) is not supported", width, height);
356        return BAD_VALUE;
357    } else {  // width == -1 && height == -1
358        // Do not configure the camera.
359        // Use the current width and height value setting from the camera.
360    }
361
362    if (frameRate != -1) {
363        CHECK(frameRate > 0 && frameRate <= 120);
364        const char* supportedFrameRates =
365                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
366        CHECK(supportedFrameRates != NULL);
367        ALOGV("Supported frame rates: %s", supportedFrameRates);
368        char buf[4];
369        snprintf(buf, 4, "%d", frameRate);
370        if (strstr(supportedFrameRates, buf) == NULL) {
371            ALOGE("Requested frame rate (%d) is not supported: %s",
372                frameRate, supportedFrameRates);
373            return BAD_VALUE;
374        }
375
376        // The frame rate is supported, set the camera to the requested value.
377        params->setPreviewFrameRate(frameRate);
378        isCameraParamChanged = true;
379    } else {  // frameRate == -1
380        // Do not configure the camera.
381        // Use the current frame rate value setting from the camera
382    }
383
384    if (isCameraParamChanged) {
385        // Either frame rate or frame size needs to be changed.
386        String8 s = params->flatten();
387        if (OK != mCamera->setParameters(s)) {
388            ALOGE("Could not change settings."
389                 " Someone else is using camera %p?", mCamera.get());
390            return -EBUSY;
391        }
392    }
393    return OK;
394}
395
396/*
397 * Check whether the requested video frame size
398 * has been successfully configured or not. If both width and height
399 * are -1, check on the current width and height value setting
400 * is performed.
401 *
402 * @param params CameraParameters to retrieve the information
403 * @param the target video frame width in pixels to check against
404 * @param the target video frame height in pixels to check against
405 * @return OK if no error
406 */
407status_t CameraSource::checkVideoSize(
408        const CameraParameters& params,
409        int32_t width, int32_t height) {
410
411    ALOGV("checkVideoSize");
412    // The actual video size is the same as the preview size
413    // if the camera hal does not support separate video and
414    // preview output. In this case, we retrieve the video
415    // size from preview.
416    int32_t frameWidthActual = -1;
417    int32_t frameHeightActual = -1;
418    Vector<Size> sizes;
419    params.getSupportedVideoSizes(sizes);
420    if (sizes.size() == 0) {
421        // video size is the same as preview size
422        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
423    } else {
424        // video size may not be the same as preview
425        params.getVideoSize(&frameWidthActual, &frameHeightActual);
426    }
427    if (frameWidthActual < 0 || frameHeightActual < 0) {
428        ALOGE("Failed to retrieve video frame size (%dx%d)",
429                frameWidthActual, frameHeightActual);
430        return UNKNOWN_ERROR;
431    }
432
433    // Check the actual video frame size against the target/requested
434    // video frame size.
435    if (width != -1 && height != -1) {
436        if (frameWidthActual != width || frameHeightActual != height) {
437            ALOGE("Failed to set video frame size to %dx%d. "
438                    "The actual video size is %dx%d ", width, height,
439                    frameWidthActual, frameHeightActual);
440            return UNKNOWN_ERROR;
441        }
442    }
443
444    // Good now.
445    mVideoSize.width = frameWidthActual;
446    mVideoSize.height = frameHeightActual;
447    return OK;
448}
449
450/*
451 * Check the requested frame rate has been successfully configured or not.
452 * If the target frameRate is -1, check on the current frame rate value
453 * setting is performed.
454 *
455 * @param params CameraParameters to retrieve the information
456 * @param the target video frame rate to check against
457 * @return OK if no error.
458 */
459status_t CameraSource::checkFrameRate(
460        const CameraParameters& params,
461        int32_t frameRate) {
462
463    ALOGV("checkFrameRate");
464    int32_t frameRateActual = params.getPreviewFrameRate();
465    if (frameRateActual < 0) {
466        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
467        return UNKNOWN_ERROR;
468    }
469
470    // Check the actual video frame rate against the target/requested
471    // video frame rate.
472    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
473        ALOGE("Failed to set preview frame rate to %d fps. The actual "
474                "frame rate is %d", frameRate, frameRateActual);
475        return UNKNOWN_ERROR;
476    }
477
478    // Good now.
479    mVideoFrameRate = frameRateActual;
480    return OK;
481}
482
483/*
484 * Initialize the CameraSource to so that it becomes
485 * ready for providing the video input streams as requested.
486 * @param camera the camera object used for the video source
487 * @param cameraId if camera == 0, use camera with this id
488 *      as the video source
489 * @param videoSize the target video frame size. If both
490 *      width and height in videoSize is -1, use the current
491 *      width and heigth settings by the camera
492 * @param frameRate the target frame rate in frames per second.
493 *      if it is -1, use the current camera frame rate setting.
494 * @param storeMetaDataInVideoBuffers request to store meta
495 *      data or real YUV data in video buffers. Request to
496 *      store meta data in video buffers may not be honored
497 *      if the source does not support this feature.
498 *
499 * @return OK if no error.
500 */
501status_t CameraSource::init(
502        const sp<hardware::ICamera>& camera,
503        const sp<ICameraRecordingProxy>& proxy,
504        int32_t cameraId,
505        const String16& clientName,
506        uid_t clientUid,
507        pid_t clientPid,
508        Size videoSize,
509        int32_t frameRate,
510        bool storeMetaDataInVideoBuffers) {
511
512    ALOGV("init");
513    status_t err = OK;
514    int64_t token = IPCThreadState::self()->clearCallingIdentity();
515    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
516                               videoSize, frameRate,
517                               storeMetaDataInVideoBuffers);
518    IPCThreadState::self()->restoreCallingIdentity(token);
519    return err;
520}
521
522void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
523    mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
524            "StageFright-CameraSource-BufferHeap");
525    for (uint32_t i = 0; i < bufferCount; i++) {
526        mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
527    }
528}
529
530status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
531        uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
532    ALOGV("initBufferQueue");
533
534    if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
535        ALOGE("%s: Buffer queue already exists", __FUNCTION__);
536        return ALREADY_EXISTS;
537    }
538
539    // Create a buffer queue.
540    sp<IGraphicBufferProducer> producer;
541    sp<IGraphicBufferConsumer> consumer;
542    BufferQueue::createBufferQueue(&producer, &consumer);
543
544    uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
545    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
546        usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
547    }
548
549    bufferCount += kConsumerBufferCount;
550
551    mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
552    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
553    mVideoBufferProducer = producer;
554
555    status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
556    if (res != OK) {
557        ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
558                strerror(-res), res);
559        return res;
560    }
561
562    res = mVideoBufferConsumer->setDefaultBufferFormat(format);
563    if (res != OK) {
564        ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
565                strerror(-res), res);
566        return res;
567    }
568
569    res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
570    if (res != OK) {
571        ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
572                strerror(-res), res);
573        return res;
574    }
575
576    res = mCamera->setVideoTarget(mVideoBufferProducer);
577    if (res != OK) {
578        ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
579        return res;
580    }
581
582    // Create memory heap to store buffers as VideoNativeMetadata.
583    createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
584
585    mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
586    res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
587    if (res != OK) {
588        ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
589                strerror(-res), res);
590        return res;
591    }
592
593    return OK;
594}
595
596status_t CameraSource::initWithCameraAccess(
597        const sp<hardware::ICamera>& camera,
598        const sp<ICameraRecordingProxy>& proxy,
599        int32_t cameraId,
600        const String16& clientName,
601        uid_t clientUid,
602        pid_t clientPid,
603        Size videoSize,
604        int32_t frameRate,
605        bool storeMetaDataInVideoBuffers) {
606    ALOGV("initWithCameraAccess");
607    status_t err = OK;
608
609    if ((err = isCameraAvailable(camera, proxy, cameraId,
610            clientName, clientUid, clientPid)) != OK) {
611        ALOGE("Camera connection could not be established.");
612        return err;
613    }
614    CameraParameters params(mCamera->getParameters());
615    if ((err = isCameraColorFormatSupported(params)) != OK) {
616        return err;
617    }
618
619    // Set the camera to use the requested video frame size
620    // and/or frame rate.
621    if ((err = configureCamera(&params,
622                    videoSize.width, videoSize.height,
623                    frameRate))) {
624        return err;
625    }
626
627    // Check on video frame size and frame rate.
628    CameraParameters newCameraParams(mCamera->getParameters());
629    if ((err = checkVideoSize(newCameraParams,
630                videoSize.width, videoSize.height)) != OK) {
631        return err;
632    }
633    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
634        return err;
635    }
636
637    // Set the preview display. Skip this if mSurface is null because
638    // applications may already set a surface to the camera.
639    if (mSurface != NULL) {
640        // This CHECK is good, since we just passed the lock/unlock
641        // check earlier by calling mCamera->setParameters().
642        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
643    }
644
645    // By default, store real data in video buffers.
646    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
647    if (storeMetaDataInVideoBuffers) {
648        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
649            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
650        } else if (OK == mCamera->setVideoBufferMode(
651                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
652            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
653        }
654    }
655
656    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
657        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
658        if (err != OK) {
659            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
660                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
661            return err;
662        }
663    }
664
665    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
666    if (glitchDurationUs > mGlitchDurationThresholdUs) {
667        mGlitchDurationThresholdUs = glitchDurationUs;
668    }
669
670    // XXX: query camera for the stride and slice height
671    // when the capability becomes available.
672    mMeta = new MetaData;
673    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
674    mMeta->setInt32(kKeyColorFormat, mColorFormat);
675    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
676    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
677    mMeta->setInt32(kKeyStride,      mVideoSize.width);
678    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
679    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
680    return OK;
681}
682
683CameraSource::~CameraSource() {
684    if (mStarted) {
685        reset();
686    } else if (mInitCheck == OK) {
687        // Camera is initialized but because start() is never called,
688        // the lock on Camera is never released(). This makes sure
689        // Camera's lock is released in this case.
690        releaseCamera();
691    }
692}
693
694status_t CameraSource::startCameraRecording() {
695    ALOGV("startCameraRecording");
696    // Reset the identity to the current thread because media server owns the
697    // camera and recording is started by the applications. The applications
698    // will connect to the camera in ICameraRecordingProxy::startRecording.
699    int64_t token = IPCThreadState::self()->clearCallingIdentity();
700    status_t err;
701
702    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
703        // Initialize buffer queue.
704        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
705                (android_dataspace_t)mEncoderDataSpace,
706                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
707        if (err != OK) {
708            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
709                    strerror(-err), err);
710            return err;
711        }
712    } else {
713        if (mNumInputBuffers > 0) {
714            err = mCamera->sendCommand(
715                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
716
717            // This could happen for CameraHAL1 clients; thus the failure is
718            // not a fatal error
719            if (err != OK) {
720                ALOGW("Failed to set video buffer count to %d due to %d",
721                    mNumInputBuffers, err);
722            }
723        }
724
725        err = mCamera->sendCommand(
726            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
727
728        // This could happen for CameraHAL1 clients; thus the failure is
729        // not a fatal error
730        if (err != OK) {
731            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
732                    mEncoderFormat, mEncoderDataSpace, err);
733        }
734
735        // Create memory heap to store buffers as VideoNativeMetadata.
736        createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
737    }
738
739    err = OK;
740    if (mCameraFlags & FLAGS_HOT_CAMERA) {
741        mCamera->unlock();
742        mCamera.clear();
743        if ((err = mCameraRecordingProxy->startRecording(
744                new ProxyListener(this))) != OK) {
745            ALOGE("Failed to start recording, received error: %s (%d)",
746                    strerror(-err), err);
747        }
748    } else {
749        mCamera->setListener(new CameraSourceListener(this));
750        mCamera->startRecording();
751        if (!mCamera->recordingEnabled()) {
752            err = -EINVAL;
753            ALOGE("Failed to start recording");
754        }
755    }
756    IPCThreadState::self()->restoreCallingIdentity(token);
757    return err;
758}
759
760status_t CameraSource::start(MetaData *meta) {
761    ALOGV("start");
762    CHECK(!mStarted);
763    if (mInitCheck != OK) {
764        ALOGE("CameraSource is not initialized yet");
765        return mInitCheck;
766    }
767
768    char value[PROPERTY_VALUE_MAX];
769    if (property_get("media.stagefright.record-stats", value, NULL)
770        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
771        mCollectStats = true;
772    }
773
774    mStartTimeUs = 0;
775    mNumInputBuffers = 0;
776    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
777    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
778
779    if (meta) {
780        int64_t startTimeUs;
781        if (meta->findInt64(kKeyTime, &startTimeUs)) {
782            mStartTimeUs = startTimeUs;
783        }
784
785        int32_t nBuffers;
786        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
787            CHECK_GT(nBuffers, 0);
788            mNumInputBuffers = nBuffers;
789        }
790
791        // apply encoder color format if specified
792        if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
793            ALOGI("Using encoder format: %#x", mEncoderFormat);
794        }
795        if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
796            ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
797        }
798    }
799
800    status_t err;
801    if ((err = startCameraRecording()) == OK) {
802        mStarted = true;
803    }
804
805    return err;
806}
807
808void CameraSource::stopCameraRecording() {
809    ALOGV("stopCameraRecording");
810    if (mCameraFlags & FLAGS_HOT_CAMERA) {
811        if (mCameraRecordingProxy != 0) {
812            mCameraRecordingProxy->stopRecording();
813        }
814    } else {
815        if (mCamera != 0) {
816            mCamera->setListener(NULL);
817            mCamera->stopRecording();
818        }
819    }
820}
821
822void CameraSource::releaseCamera() {
823    ALOGV("releaseCamera");
824    sp<Camera> camera;
825    bool coldCamera = false;
826    {
827        Mutex::Autolock autoLock(mLock);
828        // get a local ref and clear ref to mCamera now
829        camera = mCamera;
830        mCamera.clear();
831        coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
832    }
833
834    if (camera != 0) {
835        int64_t token = IPCThreadState::self()->clearCallingIdentity();
836        if (coldCamera) {
837            ALOGV("Camera was cold when we started, stopping preview");
838            camera->stopPreview();
839            camera->disconnect();
840        }
841        camera->unlock();
842        IPCThreadState::self()->restoreCallingIdentity(token);
843    }
844
845    {
846        Mutex::Autolock autoLock(mLock);
847        if (mCameraRecordingProxy != 0) {
848            IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
849            mCameraRecordingProxy.clear();
850        }
851        mCameraFlags = 0;
852    }
853}
854
855status_t CameraSource::reset() {
856    ALOGD("reset: E");
857
858    {
859        Mutex::Autolock autoLock(mLock);
860        mStarted = false;
861        mFrameAvailableCondition.signal();
862
863        int64_t token;
864        bool isTokenValid = false;
865        if (mCamera != 0) {
866            token = IPCThreadState::self()->clearCallingIdentity();
867            isTokenValid = true;
868        }
869        releaseQueuedFrames();
870        while (!mFramesBeingEncoded.empty()) {
871            if (NO_ERROR !=
872                mFrameCompleteCondition.waitRelative(mLock,
873                        mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
874                ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
875                    mFramesBeingEncoded.size());
876            }
877        }
878        stopCameraRecording();
879        if (isTokenValid) {
880            IPCThreadState::self()->restoreCallingIdentity(token);
881        }
882
883        if (mCollectStats) {
884            ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
885                    mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
886                    mLastFrameTimestampUs - mFirstFrameTimeUs);
887        }
888
889        if (mNumGlitches > 0) {
890            ALOGW("%d long delays between neighboring video frames", mNumGlitches);
891        }
892
893        CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
894    }
895
896    if (mBufferQueueListener != nullptr) {
897        mBufferQueueListener->requestExit();
898        mBufferQueueListener->join();
899        mBufferQueueListener.clear();
900    }
901
902    mVideoBufferConsumer.clear();
903    mVideoBufferProducer.clear();
904    releaseCamera();
905
906    ALOGD("reset: X");
907    return OK;
908}
909
910void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
911    ALOGV("releaseRecordingFrame");
912
913    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
914        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
915        ssize_t offset;
916        size_t size;
917        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
918        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
919            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
920                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
921            return;
922        }
923
924        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
925                (uint8_t*)heap->getBase() + offset);
926
927        // Find the corresponding buffer item for the native window buffer.
928        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
929        if (index == NAME_NOT_FOUND) {
930            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
931            return;
932        }
933
934        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
935        mReceivedBufferItemMap.removeItemsAt(index);
936        mVideoBufferConsumer->releaseBuffer(buffer);
937        mMemoryBases.push_back(frame);
938        mMemoryBaseAvailableCond.signal();
939    } else {
940        native_handle_t* handle = nullptr;
941
942        // Check if frame contains a VideoNativeHandleMetadata.
943        if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
944            VideoNativeHandleMetadata *metadata =
945                (VideoNativeHandleMetadata*)(frame->pointer());
946            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
947                handle = metadata->pHandle;
948            }
949        }
950
951        if (handle != nullptr) {
952            // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
953            releaseRecordingFrameHandle(handle);
954            mMemoryBases.push_back(frame);
955            mMemoryBaseAvailableCond.signal();
956        } else if (mCameraRecordingProxy != nullptr) {
957            // mCamera is created by application. Return the frame back to camera via camera
958            // recording proxy.
959            mCameraRecordingProxy->releaseRecordingFrame(frame);
960        } else if (mCamera != nullptr) {
961            // mCamera is created by CameraSource. Return the frame directly back to camera.
962            int64_t token = IPCThreadState::self()->clearCallingIdentity();
963            mCamera->releaseRecordingFrame(frame);
964            IPCThreadState::self()->restoreCallingIdentity(token);
965        }
966    }
967}
968
969void CameraSource::releaseQueuedFrames() {
970    List<sp<IMemory> >::iterator it;
971    while (!mFramesReceived.empty()) {
972        it = mFramesReceived.begin();
973        releaseRecordingFrame(*it);
974        mFramesReceived.erase(it);
975        ++mNumFramesDropped;
976    }
977}
978
979sp<MetaData> CameraSource::getFormat() {
980    return mMeta;
981}
982
983void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
984    releaseRecordingFrame(frame);
985}
986
987void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
988    ALOGV("signalBufferReturned: %p", buffer->data());
989    Mutex::Autolock autoLock(mLock);
990    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
991         it != mFramesBeingEncoded.end(); ++it) {
992        if ((*it)->pointer() ==  buffer->data()) {
993            releaseOneRecordingFrame((*it));
994            mFramesBeingEncoded.erase(it);
995            ++mNumFramesEncoded;
996            buffer->setObserver(0);
997            buffer->release();
998            mFrameCompleteCondition.signal();
999            return;
1000        }
1001    }
1002    CHECK(!"signalBufferReturned: bogus buffer");
1003}
1004
1005status_t CameraSource::read(
1006        MediaBuffer **buffer, const ReadOptions *options) {
1007    ALOGV("read");
1008
1009    *buffer = NULL;
1010
1011    int64_t seekTimeUs;
1012    ReadOptions::SeekMode mode;
1013    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
1014        return ERROR_UNSUPPORTED;
1015    }
1016
1017    sp<IMemory> frame;
1018    int64_t frameTime;
1019
1020    {
1021        Mutex::Autolock autoLock(mLock);
1022        while (mStarted && mFramesReceived.empty()) {
1023            if (NO_ERROR !=
1024                mFrameAvailableCondition.waitRelative(mLock,
1025                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
1026                if (mCameraRecordingProxy != 0 &&
1027                    !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
1028                    ALOGW("camera recording proxy is gone");
1029                    return ERROR_END_OF_STREAM;
1030                }
1031                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
1032                    mLastFrameTimestampUs);
1033            }
1034        }
1035        if (!mStarted) {
1036            return OK;
1037        }
1038        frame = *mFramesReceived.begin();
1039        mFramesReceived.erase(mFramesReceived.begin());
1040
1041        frameTime = *mFrameTimes.begin();
1042        mFrameTimes.erase(mFrameTimes.begin());
1043        mFramesBeingEncoded.push_back(frame);
1044        *buffer = new MediaBuffer(frame->pointer(), frame->size());
1045        (*buffer)->setObserver(this);
1046        (*buffer)->add_ref();
1047        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
1048    }
1049    return OK;
1050}
1051
1052bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
1053    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
1054        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
1055        return true;
1056    }
1057
1058    // May need to skip frame or modify timestamp. Currently implemented
1059    // by the subclass CameraSourceTimeLapse.
1060    if (skipCurrentFrame(timestampUs)) {
1061        return true;
1062    }
1063
1064    if (mNumFramesReceived > 0) {
1065        if (timestampUs <= mLastFrameTimestampUs) {
1066            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
1067                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
1068            return true;
1069        }
1070        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
1071            ++mNumGlitches;
1072        }
1073    }
1074
1075    mLastFrameTimestampUs = timestampUs;
1076    if (mNumFramesReceived == 0) {
1077        mFirstFrameTimeUs = timestampUs;
1078        // Initial delay
1079        if (mStartTimeUs > 0) {
1080            if (timestampUs < mStartTimeUs) {
1081                // Frame was captured before recording was started
1082                // Drop it without updating the statistical data.
1083                return true;
1084            }
1085            mStartTimeUs = timestampUs - mStartTimeUs;
1086        }
1087    }
1088
1089    return false;
1090}
1091
1092void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
1093        int32_t msgType __unused, const sp<IMemory> &data) {
1094    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
1095    Mutex::Autolock autoLock(mLock);
1096
1097    if (shouldSkipFrameLocked(timestampUs)) {
1098        releaseOneRecordingFrame(data);
1099        return;
1100    }
1101
1102    ++mNumFramesReceived;
1103
1104    CHECK(data != NULL && data->size() > 0);
1105    mFramesReceived.push_back(data);
1106    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1107    mFrameTimes.push_back(timeUs);
1108    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1109        mStartTimeUs, timeUs);
1110    mFrameAvailableCondition.signal();
1111}
1112
1113void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
1114    if (mCameraRecordingProxy != nullptr) {
1115        mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
1116    } else if (mCamera != nullptr) {
1117        int64_t token = IPCThreadState::self()->clearCallingIdentity();
1118        mCamera->releaseRecordingFrameHandle(handle);
1119        IPCThreadState::self()->restoreCallingIdentity(token);
1120    }
1121}
1122
1123void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
1124                native_handle_t* handle) {
1125    ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
1126    Mutex::Autolock autoLock(mLock);
1127    if (handle == nullptr) return;
1128
1129    if (shouldSkipFrameLocked(timestampUs)) {
1130        releaseRecordingFrameHandle(handle);
1131        return;
1132    }
1133
1134    while (mMemoryBases.empty()) {
1135        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1136                TIMED_OUT) {
1137            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1138            releaseRecordingFrameHandle(handle);
1139            return;
1140        }
1141    }
1142
1143    ++mNumFramesReceived;
1144
1145    sp<IMemory> data = *mMemoryBases.begin();
1146    mMemoryBases.erase(mMemoryBases.begin());
1147
1148    // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
1149    VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
1150    metadata->eType = kMetadataBufferTypeNativeHandleSource;
1151    metadata->pHandle = handle;
1152
1153    mFramesReceived.push_back(data);
1154    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1155    mFrameTimes.push_back(timeUs);
1156    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
1157    mFrameAvailableCondition.signal();
1158}
1159
1160CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1161        const sp<CameraSource>& cameraSource) {
1162    mConsumer = consumer;
1163    mConsumer->setFrameAvailableListener(this);
1164    mCameraSource = cameraSource;
1165}
1166
1167void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1168    ALOGV("%s: onFrameAvailable", __FUNCTION__);
1169
1170    Mutex::Autolock l(mLock);
1171
1172    if (!mFrameAvailable) {
1173        mFrameAvailable = true;
1174        mFrameAvailableSignal.signal();
1175    }
1176}
1177
1178bool CameraSource::BufferQueueListener::threadLoop() {
1179    if (mConsumer == nullptr || mCameraSource == nullptr) {
1180        return false;
1181    }
1182
1183    {
1184        Mutex::Autolock l(mLock);
1185        while (!mFrameAvailable) {
1186            if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1187                return true;
1188            }
1189        }
1190        mFrameAvailable = false;
1191    }
1192
1193    BufferItem buffer;
1194    while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1195        mCameraSource->processBufferQueueFrame(buffer);
1196    }
1197
1198    return true;
1199}
1200
1201void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1202    Mutex::Autolock autoLock(mLock);
1203
1204    int64_t timestampUs = buffer.mTimestamp / 1000;
1205    if (shouldSkipFrameLocked(timestampUs)) {
1206        mVideoBufferConsumer->releaseBuffer(buffer);
1207        return;
1208    }
1209
1210    while (mMemoryBases.empty()) {
1211        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1212                TIMED_OUT) {
1213            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1214            mVideoBufferConsumer->releaseBuffer(buffer);
1215            return;
1216        }
1217    }
1218
1219    ++mNumFramesReceived;
1220
1221    // Find a available memory slot to store the buffer as VideoNativeMetadata.
1222    sp<IMemory> data = *mMemoryBases.begin();
1223    mMemoryBases.erase(mMemoryBases.begin());
1224
1225    ssize_t offset;
1226    size_t size;
1227    sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1228    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1229        (uint8_t*)heap->getBase() + offset);
1230    memset(payload, 0, sizeof(VideoNativeMetadata));
1231    payload->eType = kMetadataBufferTypeANWBuffer;
1232    payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1233    payload->nFenceFd = -1;
1234
1235    // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1236    // when the encoder returns the native window buffer.
1237    mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1238
1239    mFramesReceived.push_back(data);
1240    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1241    mFrameTimes.push_back(timeUs);
1242    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1243        mStartTimeUs, timeUs);
1244    mFrameAvailableCondition.signal();
1245}
1246
1247MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1248    ALOGV("metaDataStoredInVideoBuffers");
1249
1250    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
1251    // buffer queue.
1252    switch (mVideoBufferMode) {
1253        case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
1254            return kMetadataBufferTypeNativeHandleSource;
1255        case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
1256            return kMetadataBufferTypeANWBuffer;
1257        default:
1258            return kMetadataBufferTypeInvalid;
1259    }
1260}
1261
1262CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
1263    mSource = source;
1264}
1265
1266void CameraSource::ProxyListener::dataCallbackTimestamp(
1267        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
1268    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
1269}
1270
1271void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
1272        native_handle_t* handle) {
1273    mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
1274}
1275
1276void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1277    ALOGI("Camera recording proxy died");
1278}
1279
1280}  // namespace android
1281