1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <binder/MemoryBase.h>
26#include <binder/MemoryHeapBase.h>
27#include <media/hardware/HardwareAPI.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/CameraSource.h>
30#include <media/stagefright/MediaDefs.h>
31#include <media/stagefright/MediaErrors.h>
32#include <media/stagefright/MetaData.h>
33#include <camera/Camera.h>
34#include <camera/CameraParameters.h>
35#include <gui/Surface.h>
36#include <utils/String8.h>
37#include <cutils/properties.h>
38
39#if LOG_NDEBUG
40#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41#else
42#define UNUSED_UNLESS_VERBOSE(x)
43#endif
44
45namespace android {
46
47static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48
49struct CameraSourceListener : public CameraListener {
50    explicit CameraSourceListener(const sp<CameraSource> &source);
51
52    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
53    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
54                          camera_frame_metadata_t *metadata);
55
56    virtual void postDataTimestamp(
57            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
58
59    virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
60
61    virtual void postRecordingFrameHandleTimestampBatch(
62                const std::vector<nsecs_t>& timestamps,
63                const std::vector<native_handle_t*>& handles);
64
65protected:
66    virtual ~CameraSourceListener();
67
68private:
69    wp<CameraSource> mSource;
70
71    CameraSourceListener(const CameraSourceListener &);
72    CameraSourceListener &operator=(const CameraSourceListener &);
73};
74
75CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
76    : mSource(source) {
77}
78
79CameraSourceListener::~CameraSourceListener() {
80}
81
82void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
83    UNUSED_UNLESS_VERBOSE(msgType);
84    UNUSED_UNLESS_VERBOSE(ext1);
85    UNUSED_UNLESS_VERBOSE(ext2);
86    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
87}
88
89void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
90                                    camera_frame_metadata_t * /* metadata */) {
91    ALOGV("postData(%d, ptr:%p, size:%zu)",
92         msgType, dataPtr->pointer(), dataPtr->size());
93
94    sp<CameraSource> source = mSource.promote();
95    if (source.get() != NULL) {
96        source->dataCallback(msgType, dataPtr);
97    }
98}
99
100void CameraSourceListener::postDataTimestamp(
101        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
102
103    sp<CameraSource> source = mSource.promote();
104    if (source.get() != NULL) {
105        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
106    }
107}
108
109void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
110        native_handle_t* handle) {
111    sp<CameraSource> source = mSource.promote();
112    if (source.get() != nullptr) {
113        source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
114    }
115}
116
117void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
118        const std::vector<nsecs_t>& timestamps,
119        const std::vector<native_handle_t*>& handles) {
120    sp<CameraSource> source = mSource.promote();
121    if (source.get() != nullptr) {
122        int n = timestamps.size();
123        std::vector<nsecs_t> modifiedTimestamps(n);
124        for (int i = 0; i < n; i++) {
125            modifiedTimestamps[i] = timestamps[i] / 1000;
126        }
127        source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
128    }
129}
130
131static int32_t getColorFormat(const char* colorFormat) {
132    if (!colorFormat) {
133        ALOGE("Invalid color format");
134        return -1;
135    }
136
137    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
138       return OMX_COLOR_FormatYUV420Planar;
139    }
140
141    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
142       return OMX_COLOR_FormatYUV422SemiPlanar;
143    }
144
145    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
146        return OMX_COLOR_FormatYUV420SemiPlanar;
147    }
148
149    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
150        return OMX_COLOR_FormatYCbYCr;
151    }
152
153    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
154       return OMX_COLOR_Format16bitRGB565;
155    }
156
157    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
158       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
159    }
160
161    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
162        return OMX_COLOR_FormatAndroidOpaque;
163    }
164
165    ALOGE("Uknown color format (%s), please add it to "
166         "CameraSource::getColorFormat", colorFormat);
167
168    CHECK(!"Unknown color format");
169    return -1;
170}
171
172CameraSource *CameraSource::Create(const String16 &clientName) {
173    Size size;
174    size.width = -1;
175    size.height = -1;
176
177    sp<hardware::ICamera> camera;
178    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
179            Camera::USE_CALLING_PID, size, -1, NULL, false);
180}
181
182// static
183CameraSource *CameraSource::CreateFromCamera(
184    const sp<hardware::ICamera>& camera,
185    const sp<ICameraRecordingProxy>& proxy,
186    int32_t cameraId,
187    const String16& clientName,
188    uid_t clientUid,
189    pid_t clientPid,
190    Size videoSize,
191    int32_t frameRate,
192    const sp<IGraphicBufferProducer>& surface,
193    bool storeMetaDataInVideoBuffers) {
194
195    CameraSource *source = new CameraSource(camera, proxy, cameraId,
196            clientName, clientUid, clientPid, videoSize, frameRate, surface,
197            storeMetaDataInVideoBuffers);
198    return source;
199}
200
201CameraSource::CameraSource(
202    const sp<hardware::ICamera>& camera,
203    const sp<ICameraRecordingProxy>& proxy,
204    int32_t cameraId,
205    const String16& clientName,
206    uid_t clientUid,
207    pid_t clientPid,
208    Size videoSize,
209    int32_t frameRate,
210    const sp<IGraphicBufferProducer>& surface,
211    bool storeMetaDataInVideoBuffers)
212    : mCameraFlags(0),
213      mNumInputBuffers(0),
214      mVideoFrameRate(-1),
215      mCamera(0),
216      mSurface(surface),
217      mNumFramesReceived(0),
218      mLastFrameTimestampUs(0),
219      mStarted(false),
220      mNumFramesEncoded(0),
221      mTimeBetweenFrameCaptureUs(0),
222      mFirstFrameTimeUs(0),
223      mNumFramesDropped(0),
224      mNumGlitches(0),
225      mGlitchDurationThresholdUs(200000),
226      mCollectStats(false) {
227    mVideoSize.width  = -1;
228    mVideoSize.height = -1;
229
230    mInitCheck = init(camera, proxy, cameraId,
231                    clientName, clientUid, clientPid,
232                    videoSize, frameRate,
233                    storeMetaDataInVideoBuffers);
234    if (mInitCheck != OK) releaseCamera();
235}
236
237status_t CameraSource::initCheck() const {
238    return mInitCheck;
239}
240
241status_t CameraSource::isCameraAvailable(
242    const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
243    int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
244
245    if (camera == 0) {
246        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid);
247        if (mCamera == 0) return -EBUSY;
248        mCameraFlags &= ~FLAGS_HOT_CAMERA;
249    } else {
250        // We get the proxy from Camera, not ICamera. We need to get the proxy
251        // to the remote Camera owned by the application. Here mCamera is a
252        // local Camera object created by us. We cannot use the proxy from
253        // mCamera here.
254        mCamera = Camera::create(camera);
255        if (mCamera == 0) return -EBUSY;
256        mCameraRecordingProxy = proxy;
257        mCameraFlags |= FLAGS_HOT_CAMERA;
258        mDeathNotifier = new DeathNotifier();
259        // isBinderAlive needs linkToDeath to work.
260        IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
261    }
262
263    mCamera->lock();
264
265    return OK;
266}
267
268
269/*
270 * Check to see whether the requested video width and height is one
271 * of the supported sizes.
272 * @param width the video frame width in pixels
273 * @param height the video frame height in pixels
274 * @param suppportedSizes the vector of sizes that we check against
275 * @return true if the dimension (width and height) is supported.
276 */
277static bool isVideoSizeSupported(
278    int32_t width, int32_t height,
279    const Vector<Size>& supportedSizes) {
280
281    ALOGV("isVideoSizeSupported");
282    for (size_t i = 0; i < supportedSizes.size(); ++i) {
283        if (width  == supportedSizes[i].width &&
284            height == supportedSizes[i].height) {
285            return true;
286        }
287    }
288    return false;
289}
290
291/*
292 * If the preview and video output is separate, we only set the
293 * the video size, and applications should set the preview size
294 * to some proper value, and the recording framework will not
295 * change the preview size; otherwise, if the video and preview
296 * output is the same, we need to set the preview to be the same
297 * as the requested video size.
298 *
299 */
300/*
301 * Query the camera to retrieve the supported video frame sizes
302 * and also to see whether CameraParameters::setVideoSize()
303 * is supported or not.
304 * @param params CameraParameters to retrieve the information
305 * @@param isSetVideoSizeSupported retunrs whether method
306 *      CameraParameters::setVideoSize() is supported or not.
307 * @param sizes returns the vector of Size objects for the
308 *      supported video frame sizes advertised by the camera.
309 */
310static void getSupportedVideoSizes(
311    const CameraParameters& params,
312    bool *isSetVideoSizeSupported,
313    Vector<Size>& sizes) {
314
315    *isSetVideoSizeSupported = true;
316    params.getSupportedVideoSizes(sizes);
317    if (sizes.size() == 0) {
318        ALOGD("Camera does not support setVideoSize()");
319        params.getSupportedPreviewSizes(sizes);
320        *isSetVideoSizeSupported = false;
321    }
322}
323
324/*
325 * Check whether the camera has the supported color format
326 * @param params CameraParameters to retrieve the information
327 * @return OK if no error.
328 */
329status_t CameraSource::isCameraColorFormatSupported(
330        const CameraParameters& params) {
331    mColorFormat = getColorFormat(params.get(
332            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
333    if (mColorFormat == -1) {
334        return BAD_VALUE;
335    }
336    return OK;
337}
338
339/*
340 * Configure the camera to use the requested video size
341 * (width and height) and/or frame rate. If both width and
342 * height are -1, configuration on the video size is skipped.
343 * if frameRate is -1, configuration on the frame rate
344 * is skipped. Skipping the configuration allows one to
345 * use the current camera setting without the need to
346 * actually know the specific values (see Create() method).
347 *
348 * @param params the CameraParameters to be configured
349 * @param width the target video frame width in pixels
350 * @param height the target video frame height in pixels
351 * @param frameRate the target frame rate in frames per second.
352 * @return OK if no error.
353 */
354status_t CameraSource::configureCamera(
355        CameraParameters* params,
356        int32_t width, int32_t height,
357        int32_t frameRate) {
358    ALOGV("configureCamera");
359    Vector<Size> sizes;
360    bool isSetVideoSizeSupportedByCamera = true;
361    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
362    bool isCameraParamChanged = false;
363    if (width != -1 && height != -1) {
364        if (!isVideoSizeSupported(width, height, sizes)) {
365            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
366            return BAD_VALUE;
367        }
368        if (isSetVideoSizeSupportedByCamera) {
369            params->setVideoSize(width, height);
370        } else {
371            params->setPreviewSize(width, height);
372        }
373        isCameraParamChanged = true;
374    } else if ((width == -1 && height != -1) ||
375               (width != -1 && height == -1)) {
376        // If one and only one of the width and height is -1
377        // we reject such a request.
378        ALOGE("Requested video size (%dx%d) is not supported", width, height);
379        return BAD_VALUE;
380    } else {  // width == -1 && height == -1
381        // Do not configure the camera.
382        // Use the current width and height value setting from the camera.
383    }
384
385    if (frameRate != -1) {
386        CHECK(frameRate > 0 && frameRate <= 120);
387        const char* supportedFrameRates =
388                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
389        CHECK(supportedFrameRates != NULL);
390        ALOGV("Supported frame rates: %s", supportedFrameRates);
391        char buf[4];
392        snprintf(buf, 4, "%d", frameRate);
393        if (strstr(supportedFrameRates, buf) == NULL) {
394            ALOGE("Requested frame rate (%d) is not supported: %s",
395                frameRate, supportedFrameRates);
396            return BAD_VALUE;
397        }
398
399        // The frame rate is supported, set the camera to the requested value.
400        params->setPreviewFrameRate(frameRate);
401        isCameraParamChanged = true;
402    } else {  // frameRate == -1
403        // Do not configure the camera.
404        // Use the current frame rate value setting from the camera
405    }
406
407    if (isCameraParamChanged) {
408        // Either frame rate or frame size needs to be changed.
409        String8 s = params->flatten();
410        if (OK != mCamera->setParameters(s)) {
411            ALOGE("Could not change settings."
412                 " Someone else is using camera %p?", mCamera.get());
413            return -EBUSY;
414        }
415    }
416    return OK;
417}
418
419/*
420 * Check whether the requested video frame size
421 * has been successfully configured or not. If both width and height
422 * are -1, check on the current width and height value setting
423 * is performed.
424 *
425 * @param params CameraParameters to retrieve the information
426 * @param the target video frame width in pixels to check against
427 * @param the target video frame height in pixels to check against
428 * @return OK if no error
429 */
430status_t CameraSource::checkVideoSize(
431        const CameraParameters& params,
432        int32_t width, int32_t height) {
433
434    ALOGV("checkVideoSize");
435    // The actual video size is the same as the preview size
436    // if the camera hal does not support separate video and
437    // preview output. In this case, we retrieve the video
438    // size from preview.
439    int32_t frameWidthActual = -1;
440    int32_t frameHeightActual = -1;
441    Vector<Size> sizes;
442    params.getSupportedVideoSizes(sizes);
443    if (sizes.size() == 0) {
444        // video size is the same as preview size
445        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
446    } else {
447        // video size may not be the same as preview
448        params.getVideoSize(&frameWidthActual, &frameHeightActual);
449    }
450    if (frameWidthActual < 0 || frameHeightActual < 0) {
451        ALOGE("Failed to retrieve video frame size (%dx%d)",
452                frameWidthActual, frameHeightActual);
453        return UNKNOWN_ERROR;
454    }
455
456    // Check the actual video frame size against the target/requested
457    // video frame size.
458    if (width != -1 && height != -1) {
459        if (frameWidthActual != width || frameHeightActual != height) {
460            ALOGE("Failed to set video frame size to %dx%d. "
461                    "The actual video size is %dx%d ", width, height,
462                    frameWidthActual, frameHeightActual);
463            return UNKNOWN_ERROR;
464        }
465    }
466
467    // Good now.
468    mVideoSize.width = frameWidthActual;
469    mVideoSize.height = frameHeightActual;
470    return OK;
471}
472
473/*
474 * Check the requested frame rate has been successfully configured or not.
475 * If the target frameRate is -1, check on the current frame rate value
476 * setting is performed.
477 *
478 * @param params CameraParameters to retrieve the information
479 * @param the target video frame rate to check against
480 * @return OK if no error.
481 */
482status_t CameraSource::checkFrameRate(
483        const CameraParameters& params,
484        int32_t frameRate) {
485
486    ALOGV("checkFrameRate");
487    int32_t frameRateActual = params.getPreviewFrameRate();
488    if (frameRateActual < 0) {
489        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
490        return UNKNOWN_ERROR;
491    }
492
493    // Check the actual video frame rate against the target/requested
494    // video frame rate.
495    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
496        ALOGE("Failed to set preview frame rate to %d fps. The actual "
497                "frame rate is %d", frameRate, frameRateActual);
498        return UNKNOWN_ERROR;
499    }
500
501    // Good now.
502    mVideoFrameRate = frameRateActual;
503    return OK;
504}
505
506/*
507 * Initialize the CameraSource to so that it becomes
508 * ready for providing the video input streams as requested.
509 * @param camera the camera object used for the video source
510 * @param cameraId if camera == 0, use camera with this id
511 *      as the video source
512 * @param videoSize the target video frame size. If both
513 *      width and height in videoSize is -1, use the current
514 *      width and heigth settings by the camera
515 * @param frameRate the target frame rate in frames per second.
516 *      if it is -1, use the current camera frame rate setting.
517 * @param storeMetaDataInVideoBuffers request to store meta
518 *      data or real YUV data in video buffers. Request to
519 *      store meta data in video buffers may not be honored
520 *      if the source does not support this feature.
521 *
522 * @return OK if no error.
523 */
524status_t CameraSource::init(
525        const sp<hardware::ICamera>& camera,
526        const sp<ICameraRecordingProxy>& proxy,
527        int32_t cameraId,
528        const String16& clientName,
529        uid_t clientUid,
530        pid_t clientPid,
531        Size videoSize,
532        int32_t frameRate,
533        bool storeMetaDataInVideoBuffers) {
534
535    ALOGV("init");
536    status_t err = OK;
537    int64_t token = IPCThreadState::self()->clearCallingIdentity();
538    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
539                               videoSize, frameRate,
540                               storeMetaDataInVideoBuffers);
541    IPCThreadState::self()->restoreCallingIdentity(token);
542    return err;
543}
544
545void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
546    mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
547            "StageFright-CameraSource-BufferHeap");
548    for (uint32_t i = 0; i < bufferCount; i++) {
549        mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
550    }
551}
552
553status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
554        uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
555    ALOGV("initBufferQueue");
556
557    if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
558        ALOGE("%s: Buffer queue already exists", __FUNCTION__);
559        return ALREADY_EXISTS;
560    }
561
562    // Create a buffer queue.
563    sp<IGraphicBufferProducer> producer;
564    sp<IGraphicBufferConsumer> consumer;
565    BufferQueue::createBufferQueue(&producer, &consumer);
566
567    uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
568    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
569        usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
570    }
571
572    bufferCount += kConsumerBufferCount;
573
574    mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
575    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
576    mVideoBufferProducer = producer;
577
578    status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
579    if (res != OK) {
580        ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
581                strerror(-res), res);
582        return res;
583    }
584
585    res = mVideoBufferConsumer->setDefaultBufferFormat(format);
586    if (res != OK) {
587        ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
588                strerror(-res), res);
589        return res;
590    }
591
592    res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
593    if (res != OK) {
594        ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
595                strerror(-res), res);
596        return res;
597    }
598
599    res = mCamera->setVideoTarget(mVideoBufferProducer);
600    if (res != OK) {
601        ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
602        return res;
603    }
604
605    // Create memory heap to store buffers as VideoNativeMetadata.
606    createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
607
608    mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
609    res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
610    if (res != OK) {
611        ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
612                strerror(-res), res);
613        return res;
614    }
615
616    return OK;
617}
618
619status_t CameraSource::initWithCameraAccess(
620        const sp<hardware::ICamera>& camera,
621        const sp<ICameraRecordingProxy>& proxy,
622        int32_t cameraId,
623        const String16& clientName,
624        uid_t clientUid,
625        pid_t clientPid,
626        Size videoSize,
627        int32_t frameRate,
628        bool storeMetaDataInVideoBuffers) {
629    ALOGV("initWithCameraAccess");
630    status_t err = OK;
631
632    if ((err = isCameraAvailable(camera, proxy, cameraId,
633            clientName, clientUid, clientPid)) != OK) {
634        ALOGE("Camera connection could not be established.");
635        return err;
636    }
637    CameraParameters params(mCamera->getParameters());
638    if ((err = isCameraColorFormatSupported(params)) != OK) {
639        return err;
640    }
641
642    // Set the camera to use the requested video frame size
643    // and/or frame rate.
644    if ((err = configureCamera(&params,
645                    videoSize.width, videoSize.height,
646                    frameRate))) {
647        return err;
648    }
649
650    // Check on video frame size and frame rate.
651    CameraParameters newCameraParams(mCamera->getParameters());
652    if ((err = checkVideoSize(newCameraParams,
653                videoSize.width, videoSize.height)) != OK) {
654        return err;
655    }
656    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
657        return err;
658    }
659
660    // Set the preview display. Skip this if mSurface is null because
661    // applications may already set a surface to the camera.
662    if (mSurface != NULL) {
663        // This CHECK is good, since we just passed the lock/unlock
664        // check earlier by calling mCamera->setParameters().
665        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
666    }
667
668    // By default, store real data in video buffers.
669    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
670    if (storeMetaDataInVideoBuffers) {
671        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
672            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
673        } else if (OK == mCamera->setVideoBufferMode(
674                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
675            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
676        }
677    }
678
679    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
680        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
681        if (err != OK) {
682            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
683                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
684            return err;
685        }
686    }
687
688    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
689    if (glitchDurationUs > mGlitchDurationThresholdUs) {
690        mGlitchDurationThresholdUs = glitchDurationUs;
691    }
692
693    // XXX: query camera for the stride and slice height
694    // when the capability becomes available.
695    mMeta = new MetaData;
696    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
697    mMeta->setInt32(kKeyColorFormat, mColorFormat);
698    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
699    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
700    mMeta->setInt32(kKeyStride,      mVideoSize.width);
701    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
702    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
703    return OK;
704}
705
706CameraSource::~CameraSource() {
707    if (mStarted) {
708        reset();
709    } else if (mInitCheck == OK) {
710        // Camera is initialized but because start() is never called,
711        // the lock on Camera is never released(). This makes sure
712        // Camera's lock is released in this case.
713        releaseCamera();
714    }
715}
716
717status_t CameraSource::startCameraRecording() {
718    ALOGV("startCameraRecording");
719    // Reset the identity to the current thread because media server owns the
720    // camera and recording is started by the applications. The applications
721    // will connect to the camera in ICameraRecordingProxy::startRecording.
722    int64_t token = IPCThreadState::self()->clearCallingIdentity();
723    status_t err;
724
725    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
726        // Initialize buffer queue.
727        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
728                (android_dataspace_t)mEncoderDataSpace,
729                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
730        if (err != OK) {
731            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
732                    strerror(-err), err);
733            return err;
734        }
735    } else {
736        if (mNumInputBuffers > 0) {
737            err = mCamera->sendCommand(
738                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
739
740            // This could happen for CameraHAL1 clients; thus the failure is
741            // not a fatal error
742            if (err != OK) {
743                ALOGW("Failed to set video buffer count to %d due to %d",
744                    mNumInputBuffers, err);
745            }
746        }
747
748        err = mCamera->sendCommand(
749            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
750
751        // This could happen for CameraHAL1 clients; thus the failure is
752        // not a fatal error
753        if (err != OK) {
754            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
755                    mEncoderFormat, mEncoderDataSpace, err);
756        }
757
758        // Create memory heap to store buffers as VideoNativeMetadata.
759        createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
760    }
761
762    err = OK;
763    if (mCameraFlags & FLAGS_HOT_CAMERA) {
764        mCamera->unlock();
765        mCamera.clear();
766        if ((err = mCameraRecordingProxy->startRecording(
767                new ProxyListener(this))) != OK) {
768            ALOGE("Failed to start recording, received error: %s (%d)",
769                    strerror(-err), err);
770        }
771    } else {
772        mCamera->setListener(new CameraSourceListener(this));
773        mCamera->startRecording();
774        if (!mCamera->recordingEnabled()) {
775            err = -EINVAL;
776            ALOGE("Failed to start recording");
777        }
778    }
779    IPCThreadState::self()->restoreCallingIdentity(token);
780    return err;
781}
782
783status_t CameraSource::start(MetaData *meta) {
784    ALOGV("start");
785    CHECK(!mStarted);
786    if (mInitCheck != OK) {
787        ALOGE("CameraSource is not initialized yet");
788        return mInitCheck;
789    }
790
791    if (property_get_bool("media.stagefright.record-stats", false)) {
792        mCollectStats = true;
793    }
794
795    mStartTimeUs = 0;
796    mNumInputBuffers = 0;
797    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
798    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
799
800    if (meta) {
801        int64_t startTimeUs;
802        if (meta->findInt64(kKeyTime, &startTimeUs)) {
803            mStartTimeUs = startTimeUs;
804        }
805
806        int32_t nBuffers;
807        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
808            CHECK_GT(nBuffers, 0);
809            mNumInputBuffers = nBuffers;
810        }
811
812        // apply encoder color format if specified
813        if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
814            ALOGI("Using encoder format: %#x", mEncoderFormat);
815        }
816        if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
817            ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
818        }
819    }
820
821    status_t err;
822    if ((err = startCameraRecording()) == OK) {
823        mStarted = true;
824    }
825
826    return err;
827}
828
829void CameraSource::stopCameraRecording() {
830    ALOGV("stopCameraRecording");
831    if (mCameraFlags & FLAGS_HOT_CAMERA) {
832        if (mCameraRecordingProxy != 0) {
833            mCameraRecordingProxy->stopRecording();
834        }
835    } else {
836        if (mCamera != 0) {
837            mCamera->setListener(NULL);
838            mCamera->stopRecording();
839        }
840    }
841}
842
843void CameraSource::releaseCamera() {
844    ALOGV("releaseCamera");
845    sp<Camera> camera;
846    bool coldCamera = false;
847    {
848        Mutex::Autolock autoLock(mLock);
849        // get a local ref and clear ref to mCamera now
850        camera = mCamera;
851        mCamera.clear();
852        coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
853    }
854
855    if (camera != 0) {
856        int64_t token = IPCThreadState::self()->clearCallingIdentity();
857        if (coldCamera) {
858            ALOGV("Camera was cold when we started, stopping preview");
859            camera->stopPreview();
860            camera->disconnect();
861        }
862        camera->unlock();
863        IPCThreadState::self()->restoreCallingIdentity(token);
864    }
865
866    {
867        Mutex::Autolock autoLock(mLock);
868        if (mCameraRecordingProxy != 0) {
869            IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
870            mCameraRecordingProxy.clear();
871        }
872        mCameraFlags = 0;
873    }
874}
875
876status_t CameraSource::reset() {
877    ALOGD("reset: E");
878
879    {
880        Mutex::Autolock autoLock(mLock);
881        mStarted = false;
882        mFrameAvailableCondition.signal();
883
884        int64_t token;
885        bool isTokenValid = false;
886        if (mCamera != 0) {
887            token = IPCThreadState::self()->clearCallingIdentity();
888            isTokenValid = true;
889        }
890        releaseQueuedFrames();
891        while (!mFramesBeingEncoded.empty()) {
892            if (NO_ERROR !=
893                mFrameCompleteCondition.waitRelative(mLock,
894                        mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
895                ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
896                    mFramesBeingEncoded.size());
897            }
898        }
899        stopCameraRecording();
900        if (isTokenValid) {
901            IPCThreadState::self()->restoreCallingIdentity(token);
902        }
903
904        if (mCollectStats) {
905            ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
906                    mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
907                    mLastFrameTimestampUs - mFirstFrameTimeUs);
908        }
909
910        if (mNumGlitches > 0) {
911            ALOGW("%d long delays between neighboring video frames", mNumGlitches);
912        }
913
914        CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
915    }
916
917    if (mBufferQueueListener != nullptr) {
918        mBufferQueueListener->requestExit();
919        mBufferQueueListener->join();
920        mBufferQueueListener.clear();
921    }
922
923    mVideoBufferConsumer.clear();
924    mVideoBufferProducer.clear();
925    releaseCamera();
926
927    ALOGD("reset: X");
928    return OK;
929}
930
931void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
932    ALOGV("releaseRecordingFrame");
933
934    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
935        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
936        ssize_t offset;
937        size_t size;
938        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
939        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
940            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
941                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
942            return;
943        }
944
945        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
946                (uint8_t*)heap->getBase() + offset);
947
948        // Find the corresponding buffer item for the native window buffer.
949        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
950        if (index == NAME_NOT_FOUND) {
951            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
952            return;
953        }
954
955        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
956        mReceivedBufferItemMap.removeItemsAt(index);
957        mVideoBufferConsumer->releaseBuffer(buffer);
958        mMemoryBases.push_back(frame);
959        mMemoryBaseAvailableCond.signal();
960    } else {
961        native_handle_t* handle = nullptr;
962
963        // Check if frame contains a VideoNativeHandleMetadata.
964        if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
965            VideoNativeHandleMetadata *metadata =
966                (VideoNativeHandleMetadata*)(frame->pointer());
967            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
968                handle = metadata->pHandle;
969            }
970        }
971
972        if (handle != nullptr) {
973            uint32_t batchSize = 0;
974            {
975                Mutex::Autolock autoLock(mBatchLock);
976                if (mInflightBatchSizes.size() > 0) {
977                    batchSize = mInflightBatchSizes[0];
978                }
979            }
980            if (batchSize == 0) { // return buffers one by one
981                // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
982                releaseRecordingFrameHandle(handle);
983                mMemoryBases.push_back(frame);
984                mMemoryBaseAvailableCond.signal();
985            } else { // Group buffers in batch then return
986                Mutex::Autolock autoLock(mBatchLock);
987                mInflightReturnedHandles.push_back(handle);
988                mInflightReturnedMemorys.push_back(frame);
989                if (mInflightReturnedHandles.size() == batchSize) {
990                    releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
991
992                    mInflightBatchSizes.pop_front();
993                    mInflightReturnedHandles.clear();
994                    for (const auto& mem : mInflightReturnedMemorys) {
995                        mMemoryBases.push_back(mem);
996                        mMemoryBaseAvailableCond.signal();
997                    }
998                    mInflightReturnedMemorys.clear();
999                }
1000            }
1001
1002        } else if (mCameraRecordingProxy != nullptr) {
1003            // mCamera is created by application. Return the frame back to camera via camera
1004            // recording proxy.
1005            mCameraRecordingProxy->releaseRecordingFrame(frame);
1006        } else if (mCamera != nullptr) {
1007            // mCamera is created by CameraSource. Return the frame directly back to camera.
1008            int64_t token = IPCThreadState::self()->clearCallingIdentity();
1009            mCamera->releaseRecordingFrame(frame);
1010            IPCThreadState::self()->restoreCallingIdentity(token);
1011        }
1012    }
1013}
1014
1015void CameraSource::releaseQueuedFrames() {
1016    List<sp<IMemory> >::iterator it;
1017    while (!mFramesReceived.empty()) {
1018        it = mFramesReceived.begin();
1019        releaseRecordingFrame(*it);
1020        mFramesReceived.erase(it);
1021        ++mNumFramesDropped;
1022    }
1023}
1024
1025sp<MetaData> CameraSource::getFormat() {
1026    return mMeta;
1027}
1028
1029void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
1030    releaseRecordingFrame(frame);
1031}
1032
1033void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
1034    ALOGV("signalBufferReturned: %p", buffer->data());
1035    Mutex::Autolock autoLock(mLock);
1036    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
1037         it != mFramesBeingEncoded.end(); ++it) {
1038        if ((*it)->pointer() ==  buffer->data()) {
1039            releaseOneRecordingFrame((*it));
1040            mFramesBeingEncoded.erase(it);
1041            ++mNumFramesEncoded;
1042            buffer->setObserver(0);
1043            buffer->release();
1044            mFrameCompleteCondition.signal();
1045            return;
1046        }
1047    }
1048    CHECK(!"signalBufferReturned: bogus buffer");
1049}
1050
1051status_t CameraSource::read(
1052        MediaBuffer **buffer, const ReadOptions *options) {
1053    ALOGV("read");
1054
1055    *buffer = NULL;
1056
1057    int64_t seekTimeUs;
1058    ReadOptions::SeekMode mode;
1059    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
1060        return ERROR_UNSUPPORTED;
1061    }
1062
1063    sp<IMemory> frame;
1064    int64_t frameTime;
1065
1066    {
1067        Mutex::Autolock autoLock(mLock);
1068        while (mStarted && mFramesReceived.empty()) {
1069            if (NO_ERROR !=
1070                mFrameAvailableCondition.waitRelative(mLock,
1071                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
1072                if (mCameraRecordingProxy != 0 &&
1073                    !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
1074                    ALOGW("camera recording proxy is gone");
1075                    return ERROR_END_OF_STREAM;
1076                }
1077                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
1078                    mLastFrameTimestampUs);
1079            }
1080        }
1081        if (!mStarted) {
1082            return OK;
1083        }
1084        frame = *mFramesReceived.begin();
1085        mFramesReceived.erase(mFramesReceived.begin());
1086
1087        frameTime = *mFrameTimes.begin();
1088        mFrameTimes.erase(mFrameTimes.begin());
1089        mFramesBeingEncoded.push_back(frame);
1090        *buffer = new MediaBuffer(frame->pointer(), frame->size());
1091        (*buffer)->setObserver(this);
1092        (*buffer)->add_ref();
1093        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
1094    }
1095    return OK;
1096}
1097
1098bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
1099    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
1100        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
1101        return true;
1102    }
1103
1104    // May need to skip frame or modify timestamp. Currently implemented
1105    // by the subclass CameraSourceTimeLapse.
1106    if (skipCurrentFrame(timestampUs)) {
1107        return true;
1108    }
1109
1110    if (mNumFramesReceived > 0) {
1111        if (timestampUs <= mLastFrameTimestampUs) {
1112            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
1113                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
1114            return true;
1115        }
1116        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
1117            ++mNumGlitches;
1118        }
1119    }
1120
1121    mLastFrameTimestampUs = timestampUs;
1122    if (mNumFramesReceived == 0) {
1123        mFirstFrameTimeUs = timestampUs;
1124        // Initial delay
1125        if (mStartTimeUs > 0) {
1126            if (timestampUs < mStartTimeUs) {
1127                // Frame was captured before recording was started
1128                // Drop it without updating the statistical data.
1129                return true;
1130            }
1131            mStartTimeUs = timestampUs - mStartTimeUs;
1132        }
1133    }
1134
1135    return false;
1136}
1137
1138void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
1139        int32_t msgType __unused, const sp<IMemory> &data) {
1140    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
1141    Mutex::Autolock autoLock(mLock);
1142
1143    if (shouldSkipFrameLocked(timestampUs)) {
1144        releaseOneRecordingFrame(data);
1145        return;
1146    }
1147
1148    ++mNumFramesReceived;
1149
1150    CHECK(data != NULL && data->size() > 0);
1151    mFramesReceived.push_back(data);
1152    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1153    mFrameTimes.push_back(timeUs);
1154    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1155        mStartTimeUs, timeUs);
1156    mFrameAvailableCondition.signal();
1157}
1158
1159void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
1160    if (mCameraRecordingProxy != nullptr) {
1161        mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
1162    } else if (mCamera != nullptr) {
1163        int64_t token = IPCThreadState::self()->clearCallingIdentity();
1164        mCamera->releaseRecordingFrameHandle(handle);
1165        IPCThreadState::self()->restoreCallingIdentity(token);
1166    } else {
1167        native_handle_close(handle);
1168        native_handle_delete(handle);
1169    }
1170}
1171
1172void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
1173    if (mCameraRecordingProxy != nullptr) {
1174        mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
1175    } else if (mCamera != nullptr) {
1176        int64_t token = IPCThreadState::self()->clearCallingIdentity();
1177        mCamera->releaseRecordingFrameHandleBatch(handles);
1178        IPCThreadState::self()->restoreCallingIdentity(token);
1179    } else {
1180        for (auto& handle : handles) {
1181            native_handle_close(handle);
1182            native_handle_delete(handle);
1183        }
1184    }
1185}
1186
1187void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
1188                native_handle_t* handle) {
1189    ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
1190    Mutex::Autolock autoLock(mLock);
1191    if (handle == nullptr) return;
1192
1193    if (shouldSkipFrameLocked(timestampUs)) {
1194        releaseRecordingFrameHandle(handle);
1195        return;
1196    }
1197
1198    while (mMemoryBases.empty()) {
1199        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1200                TIMED_OUT) {
1201            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1202            releaseRecordingFrameHandle(handle);
1203            return;
1204        }
1205    }
1206
1207    ++mNumFramesReceived;
1208
1209    sp<IMemory> data = *mMemoryBases.begin();
1210    mMemoryBases.erase(mMemoryBases.begin());
1211
1212    // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
1213    VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
1214    metadata->eType = kMetadataBufferTypeNativeHandleSource;
1215    metadata->pHandle = handle;
1216
1217    mFramesReceived.push_back(data);
1218    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1219    mFrameTimes.push_back(timeUs);
1220    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
1221    mFrameAvailableCondition.signal();
1222}
1223
1224void CameraSource::recordingFrameHandleCallbackTimestampBatch(
1225        const std::vector<int64_t>& timestampsUs,
1226        const std::vector<native_handle_t*>& handles) {
1227    size_t n = timestampsUs.size();
1228    if (n != handles.size()) {
1229        ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
1230                __FUNCTION__, timestampsUs.size(), handles.size());
1231    }
1232
1233    Mutex::Autolock autoLock(mLock);
1234    int batchSize = 0;
1235    for (size_t i = 0; i < n; i++) {
1236        int64_t timestampUs = timestampsUs[i];
1237        native_handle_t* handle = handles[i];
1238
1239        ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
1240        if (handle == nullptr) continue;
1241
1242        if (shouldSkipFrameLocked(timestampUs)) {
1243            releaseRecordingFrameHandle(handle);
1244            continue;
1245        }
1246
1247        while (mMemoryBases.empty()) {
1248            if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1249                    TIMED_OUT) {
1250                ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1251                releaseRecordingFrameHandle(handle);
1252                continue;
1253            }
1254        }
1255        ++batchSize;
1256        ++mNumFramesReceived;
1257        sp<IMemory> data = *mMemoryBases.begin();
1258        mMemoryBases.erase(mMemoryBases.begin());
1259
1260        // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
1261        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
1262        metadata->eType = kMetadataBufferTypeNativeHandleSource;
1263        metadata->pHandle = handle;
1264
1265        mFramesReceived.push_back(data);
1266        int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1267        mFrameTimes.push_back(timeUs);
1268        ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
1269
1270    }
1271    if (batchSize > 0) {
1272        Mutex::Autolock autoLock(mBatchLock);
1273        mInflightBatchSizes.push_back(batchSize);
1274    }
1275    for (int i = 0; i < batchSize; i++) {
1276        mFrameAvailableCondition.signal();
1277    }
1278}
1279
1280CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1281        const sp<CameraSource>& cameraSource) {
1282    mConsumer = consumer;
1283    mConsumer->setFrameAvailableListener(this);
1284    mCameraSource = cameraSource;
1285}
1286
1287void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1288    ALOGV("%s: onFrameAvailable", __FUNCTION__);
1289
1290    Mutex::Autolock l(mLock);
1291
1292    if (!mFrameAvailable) {
1293        mFrameAvailable = true;
1294        mFrameAvailableSignal.signal();
1295    }
1296}
1297
1298bool CameraSource::BufferQueueListener::threadLoop() {
1299    if (mConsumer == nullptr || mCameraSource == nullptr) {
1300        return false;
1301    }
1302
1303    {
1304        Mutex::Autolock l(mLock);
1305        while (!mFrameAvailable) {
1306            if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1307                return true;
1308            }
1309        }
1310        mFrameAvailable = false;
1311    }
1312
1313    BufferItem buffer;
1314    while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1315        mCameraSource->processBufferQueueFrame(buffer);
1316    }
1317
1318    return true;
1319}
1320
1321void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1322    Mutex::Autolock autoLock(mLock);
1323
1324    int64_t timestampUs = buffer.mTimestamp / 1000;
1325    if (shouldSkipFrameLocked(timestampUs)) {
1326        mVideoBufferConsumer->releaseBuffer(buffer);
1327        return;
1328    }
1329
1330    while (mMemoryBases.empty()) {
1331        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1332                TIMED_OUT) {
1333            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1334            mVideoBufferConsumer->releaseBuffer(buffer);
1335            return;
1336        }
1337    }
1338
1339    ++mNumFramesReceived;
1340
1341    // Find a available memory slot to store the buffer as VideoNativeMetadata.
1342    sp<IMemory> data = *mMemoryBases.begin();
1343    mMemoryBases.erase(mMemoryBases.begin());
1344
1345    ssize_t offset;
1346    size_t size;
1347    sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1348    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1349        (uint8_t*)heap->getBase() + offset);
1350    memset(payload, 0, sizeof(VideoNativeMetadata));
1351    payload->eType = kMetadataBufferTypeANWBuffer;
1352    payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1353    payload->nFenceFd = -1;
1354
1355    // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1356    // when the encoder returns the native window buffer.
1357    mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1358
1359    mFramesReceived.push_back(data);
1360    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1361    mFrameTimes.push_back(timeUs);
1362    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1363        mStartTimeUs, timeUs);
1364    mFrameAvailableCondition.signal();
1365}
1366
1367MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1368    ALOGV("metaDataStoredInVideoBuffers");
1369
1370    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
1371    // buffer queue.
1372    switch (mVideoBufferMode) {
1373        case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
1374            return kMetadataBufferTypeNativeHandleSource;
1375        case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
1376            return kMetadataBufferTypeANWBuffer;
1377        default:
1378            return kMetadataBufferTypeInvalid;
1379    }
1380}
1381
1382CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
1383    mSource = source;
1384}
1385
1386void CameraSource::ProxyListener::dataCallbackTimestamp(
1387        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
1388    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
1389}
1390
1391void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
1392        native_handle_t* handle) {
1393    mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
1394}
1395
1396void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
1397        const std::vector<int64_t>& timestampsUs,
1398        const std::vector<native_handle_t*>& handles) {
1399    int n = timestampsUs.size();
1400    std::vector<nsecs_t> modifiedTimestamps(n);
1401    for (int i = 0; i < n; i++) {
1402        modifiedTimestamps[i] = timestampsUs[i] / 1000;
1403    }
1404    mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
1405}
1406
1407void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1408    ALOGI("Camera recording proxy died");
1409}
1410
1411}  // namespace android
1412