1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <binder/MemoryBase.h>
26#include <binder/MemoryHeapBase.h>
27#include <media/hardware/HardwareAPI.h>
28#include <media/stagefright/foundation/ADebug.h>
29#include <media/stagefright/CameraSource.h>
30#include <media/stagefright/MediaDefs.h>
31#include <media/stagefright/MediaErrors.h>
32#include <media/stagefright/MetaData.h>
33#include <camera/Camera.h>
34#include <camera/CameraParameters.h>
35#include <gui/Surface.h>
36#include <utils/String8.h>
37#include <cutils/properties.h>
38
39#if LOG_NDEBUG
40#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41#else
42#define UNUSED_UNLESS_VERBOSE(x)
43#endif
44
45namespace android {
46
47static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48
49struct CameraSourceListener : public CameraListener {
50    explicit CameraSourceListener(const sp<CameraSource> &source);
51
52    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
53    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
54                          camera_frame_metadata_t *metadata);
55
56    virtual void postDataTimestamp(
57            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
58
59    virtual void postRecordingFrameHandleTimestamp(nsecs_t timestamp, native_handle_t* handle);
60
61    virtual void postRecordingFrameHandleTimestampBatch(
62                const std::vector<nsecs_t>& timestamps,
63                const std::vector<native_handle_t*>& handles);
64
65protected:
66    virtual ~CameraSourceListener();
67
68private:
69    wp<CameraSource> mSource;
70
71    CameraSourceListener(const CameraSourceListener &);
72    CameraSourceListener &operator=(const CameraSourceListener &);
73};
74
75CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
76    : mSource(source) {
77}
78
79CameraSourceListener::~CameraSourceListener() {
80}
81
82void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
83    UNUSED_UNLESS_VERBOSE(msgType);
84    UNUSED_UNLESS_VERBOSE(ext1);
85    UNUSED_UNLESS_VERBOSE(ext2);
86    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
87}
88
89void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
90                                    camera_frame_metadata_t * /* metadata */) {
91    ALOGV("postData(%d, ptr:%p, size:%zu)",
92         msgType, dataPtr->pointer(), dataPtr->size());
93
94    sp<CameraSource> source = mSource.promote();
95    if (source.get() != NULL) {
96        source->dataCallback(msgType, dataPtr);
97    }
98}
99
100void CameraSourceListener::postDataTimestamp(
101        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
102
103    sp<CameraSource> source = mSource.promote();
104    if (source.get() != NULL) {
105        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
106    }
107}
108
109void CameraSourceListener::postRecordingFrameHandleTimestamp(nsecs_t timestamp,
110        native_handle_t* handle) {
111    sp<CameraSource> source = mSource.promote();
112    if (source.get() != nullptr) {
113        source->recordingFrameHandleCallbackTimestamp(timestamp/1000, handle);
114    }
115}
116
117void CameraSourceListener::postRecordingFrameHandleTimestampBatch(
118        const std::vector<nsecs_t>& timestamps,
119        const std::vector<native_handle_t*>& handles) {
120    sp<CameraSource> source = mSource.promote();
121    if (source.get() != nullptr) {
122        int n = timestamps.size();
123        std::vector<nsecs_t> modifiedTimestamps(n);
124        for (int i = 0; i < n; i++) {
125            modifiedTimestamps[i] = timestamps[i] / 1000;
126        }
127        source->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
128    }
129}
130
131static int32_t getColorFormat(const char* colorFormat) {
132    if (!colorFormat) {
133        ALOGE("Invalid color format");
134        return -1;
135    }
136
137    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
138       return OMX_COLOR_FormatYUV420Planar;
139    }
140
141    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
142       return OMX_COLOR_FormatYUV422SemiPlanar;
143    }
144
145    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
146        return OMX_COLOR_FormatYUV420SemiPlanar;
147    }
148
149    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
150        return OMX_COLOR_FormatYCbYCr;
151    }
152
153    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
154       return OMX_COLOR_Format16bitRGB565;
155    }
156
157    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
158       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
159    }
160
161    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
162        return OMX_COLOR_FormatAndroidOpaque;
163    }
164
165    ALOGE("Uknown color format (%s), please add it to "
166         "CameraSource::getColorFormat", colorFormat);
167
168    CHECK(!"Unknown color format");
169    return -1;
170}
171
172CameraSource *CameraSource::Create(const String16 &clientName) {
173    Size size;
174    size.width = -1;
175    size.height = -1;
176
177    sp<hardware::ICamera> camera;
178    return new CameraSource(camera, NULL, 0, clientName, Camera::USE_CALLING_UID,
179            Camera::USE_CALLING_PID, size, -1, NULL, false);
180}
181
182// static
183CameraSource *CameraSource::CreateFromCamera(
184    const sp<hardware::ICamera>& camera,
185    const sp<ICameraRecordingProxy>& proxy,
186    int32_t cameraId,
187    const String16& clientName,
188    uid_t clientUid,
189    pid_t clientPid,
190    Size videoSize,
191    int32_t frameRate,
192    const sp<IGraphicBufferProducer>& surface,
193    bool storeMetaDataInVideoBuffers) {
194
195    CameraSource *source = new CameraSource(camera, proxy, cameraId,
196            clientName, clientUid, clientPid, videoSize, frameRate, surface,
197            storeMetaDataInVideoBuffers);
198    return source;
199}
200
201CameraSource::CameraSource(
202    const sp<hardware::ICamera>& camera,
203    const sp<ICameraRecordingProxy>& proxy,
204    int32_t cameraId,
205    const String16& clientName,
206    uid_t clientUid,
207    pid_t clientPid,
208    Size videoSize,
209    int32_t frameRate,
210    const sp<IGraphicBufferProducer>& surface,
211    bool storeMetaDataInVideoBuffers)
212    : mCameraFlags(0),
213      mNumInputBuffers(0),
214      mVideoFrameRate(-1),
215      mCamera(0),
216      mSurface(surface),
217      mNumFramesReceived(0),
218      mLastFrameTimestampUs(0),
219      mStarted(false),
220      mNumFramesEncoded(0),
221      mTimeBetweenFrameCaptureUs(0),
222      mFirstFrameTimeUs(0),
223      mStopSystemTimeUs(-1),
224      mNumFramesDropped(0),
225      mNumGlitches(0),
226      mGlitchDurationThresholdUs(200000),
227      mCollectStats(false) {
228    mVideoSize.width  = -1;
229    mVideoSize.height = -1;
230
231    mInitCheck = init(camera, proxy, cameraId,
232                    clientName, clientUid, clientPid,
233                    videoSize, frameRate,
234                    storeMetaDataInVideoBuffers);
235    if (mInitCheck != OK) releaseCamera();
236}
237
238status_t CameraSource::initCheck() const {
239    return mInitCheck;
240}
241
242status_t CameraSource::isCameraAvailable(
243    const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
244    int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
245
246    if (camera == 0) {
247        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid);
248        if (mCamera == 0) return -EBUSY;
249        mCameraFlags &= ~FLAGS_HOT_CAMERA;
250    } else {
251        // We get the proxy from Camera, not ICamera. We need to get the proxy
252        // to the remote Camera owned by the application. Here mCamera is a
253        // local Camera object created by us. We cannot use the proxy from
254        // mCamera here.
255        mCamera = Camera::create(camera);
256        if (mCamera == 0) return -EBUSY;
257        mCameraRecordingProxy = proxy;
258        mCameraFlags |= FLAGS_HOT_CAMERA;
259        mDeathNotifier = new DeathNotifier();
260        // isBinderAlive needs linkToDeath to work.
261        IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
262    }
263
264    mCamera->lock();
265
266    return OK;
267}
268
269
270/*
271 * Check to see whether the requested video width and height is one
272 * of the supported sizes.
273 * @param width the video frame width in pixels
274 * @param height the video frame height in pixels
275 * @param suppportedSizes the vector of sizes that we check against
276 * @return true if the dimension (width and height) is supported.
277 */
278static bool isVideoSizeSupported(
279    int32_t width, int32_t height,
280    const Vector<Size>& supportedSizes) {
281
282    ALOGV("isVideoSizeSupported");
283    for (size_t i = 0; i < supportedSizes.size(); ++i) {
284        if (width  == supportedSizes[i].width &&
285            height == supportedSizes[i].height) {
286            return true;
287        }
288    }
289    return false;
290}
291
292/*
293 * If the preview and video output is separate, we only set the
294 * the video size, and applications should set the preview size
295 * to some proper value, and the recording framework will not
296 * change the preview size; otherwise, if the video and preview
297 * output is the same, we need to set the preview to be the same
298 * as the requested video size.
299 *
300 */
301/*
302 * Query the camera to retrieve the supported video frame sizes
303 * and also to see whether CameraParameters::setVideoSize()
304 * is supported or not.
305 * @param params CameraParameters to retrieve the information
306 * @@param isSetVideoSizeSupported retunrs whether method
307 *      CameraParameters::setVideoSize() is supported or not.
308 * @param sizes returns the vector of Size objects for the
309 *      supported video frame sizes advertised by the camera.
310 */
311static void getSupportedVideoSizes(
312    const CameraParameters& params,
313    bool *isSetVideoSizeSupported,
314    Vector<Size>& sizes) {
315
316    *isSetVideoSizeSupported = true;
317    params.getSupportedVideoSizes(sizes);
318    if (sizes.size() == 0) {
319        ALOGD("Camera does not support setVideoSize()");
320        params.getSupportedPreviewSizes(sizes);
321        *isSetVideoSizeSupported = false;
322    }
323}
324
325/*
326 * Check whether the camera has the supported color format
327 * @param params CameraParameters to retrieve the information
328 * @return OK if no error.
329 */
330status_t CameraSource::isCameraColorFormatSupported(
331        const CameraParameters& params) {
332    mColorFormat = getColorFormat(params.get(
333            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
334    if (mColorFormat == -1) {
335        return BAD_VALUE;
336    }
337    return OK;
338}
339
340/*
341 * Configure the camera to use the requested video size
342 * (width and height) and/or frame rate. If both width and
343 * height are -1, configuration on the video size is skipped.
344 * if frameRate is -1, configuration on the frame rate
345 * is skipped. Skipping the configuration allows one to
346 * use the current camera setting without the need to
347 * actually know the specific values (see Create() method).
348 *
349 * @param params the CameraParameters to be configured
350 * @param width the target video frame width in pixels
351 * @param height the target video frame height in pixels
352 * @param frameRate the target frame rate in frames per second.
353 * @return OK if no error.
354 */
355status_t CameraSource::configureCamera(
356        CameraParameters* params,
357        int32_t width, int32_t height,
358        int32_t frameRate) {
359    ALOGV("configureCamera");
360    Vector<Size> sizes;
361    bool isSetVideoSizeSupportedByCamera = true;
362    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
363    bool isCameraParamChanged = false;
364    if (width != -1 && height != -1) {
365        if (!isVideoSizeSupported(width, height, sizes)) {
366            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
367            return BAD_VALUE;
368        }
369        if (isSetVideoSizeSupportedByCamera) {
370            params->setVideoSize(width, height);
371        } else {
372            params->setPreviewSize(width, height);
373        }
374        isCameraParamChanged = true;
375    } else if ((width == -1 && height != -1) ||
376               (width != -1 && height == -1)) {
377        // If one and only one of the width and height is -1
378        // we reject such a request.
379        ALOGE("Requested video size (%dx%d) is not supported", width, height);
380        return BAD_VALUE;
381    } else {  // width == -1 && height == -1
382        // Do not configure the camera.
383        // Use the current width and height value setting from the camera.
384    }
385
386    if (frameRate != -1) {
387        CHECK(frameRate > 0 && frameRate <= 120);
388        const char* supportedFrameRates =
389                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
390        CHECK(supportedFrameRates != NULL);
391        ALOGV("Supported frame rates: %s", supportedFrameRates);
392        char buf[4];
393        snprintf(buf, 4, "%d", frameRate);
394        if (strstr(supportedFrameRates, buf) == NULL) {
395            ALOGE("Requested frame rate (%d) is not supported: %s",
396                frameRate, supportedFrameRates);
397            return BAD_VALUE;
398        }
399
400        // The frame rate is supported, set the camera to the requested value.
401        params->setPreviewFrameRate(frameRate);
402        isCameraParamChanged = true;
403    } else {  // frameRate == -1
404        // Do not configure the camera.
405        // Use the current frame rate value setting from the camera
406    }
407
408    if (isCameraParamChanged) {
409        // Either frame rate or frame size needs to be changed.
410        String8 s = params->flatten();
411        if (OK != mCamera->setParameters(s)) {
412            ALOGE("Could not change settings."
413                 " Someone else is using camera %p?", mCamera.get());
414            return -EBUSY;
415        }
416    }
417    return OK;
418}
419
420/*
421 * Check whether the requested video frame size
422 * has been successfully configured or not. If both width and height
423 * are -1, check on the current width and height value setting
424 * is performed.
425 *
426 * @param params CameraParameters to retrieve the information
427 * @param the target video frame width in pixels to check against
428 * @param the target video frame height in pixels to check against
429 * @return OK if no error
430 */
431status_t CameraSource::checkVideoSize(
432        const CameraParameters& params,
433        int32_t width, int32_t height) {
434
435    ALOGV("checkVideoSize");
436    // The actual video size is the same as the preview size
437    // if the camera hal does not support separate video and
438    // preview output. In this case, we retrieve the video
439    // size from preview.
440    int32_t frameWidthActual = -1;
441    int32_t frameHeightActual = -1;
442    Vector<Size> sizes;
443    params.getSupportedVideoSizes(sizes);
444    if (sizes.size() == 0) {
445        // video size is the same as preview size
446        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
447    } else {
448        // video size may not be the same as preview
449        params.getVideoSize(&frameWidthActual, &frameHeightActual);
450    }
451    if (frameWidthActual < 0 || frameHeightActual < 0) {
452        ALOGE("Failed to retrieve video frame size (%dx%d)",
453                frameWidthActual, frameHeightActual);
454        return UNKNOWN_ERROR;
455    }
456
457    // Check the actual video frame size against the target/requested
458    // video frame size.
459    if (width != -1 && height != -1) {
460        if (frameWidthActual != width || frameHeightActual != height) {
461            ALOGE("Failed to set video frame size to %dx%d. "
462                    "The actual video size is %dx%d ", width, height,
463                    frameWidthActual, frameHeightActual);
464            return UNKNOWN_ERROR;
465        }
466    }
467
468    // Good now.
469    mVideoSize.width = frameWidthActual;
470    mVideoSize.height = frameHeightActual;
471    return OK;
472}
473
474/*
475 * Check the requested frame rate has been successfully configured or not.
476 * If the target frameRate is -1, check on the current frame rate value
477 * setting is performed.
478 *
479 * @param params CameraParameters to retrieve the information
480 * @param the target video frame rate to check against
481 * @return OK if no error.
482 */
483status_t CameraSource::checkFrameRate(
484        const CameraParameters& params,
485        int32_t frameRate) {
486
487    ALOGV("checkFrameRate");
488    int32_t frameRateActual = params.getPreviewFrameRate();
489    if (frameRateActual < 0) {
490        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
491        return UNKNOWN_ERROR;
492    }
493
494    // Check the actual video frame rate against the target/requested
495    // video frame rate.
496    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
497        ALOGE("Failed to set preview frame rate to %d fps. The actual "
498                "frame rate is %d", frameRate, frameRateActual);
499        return UNKNOWN_ERROR;
500    }
501
502    // Good now.
503    mVideoFrameRate = frameRateActual;
504    return OK;
505}
506
507/*
508 * Initialize the CameraSource to so that it becomes
509 * ready for providing the video input streams as requested.
510 * @param camera the camera object used for the video source
511 * @param cameraId if camera == 0, use camera with this id
512 *      as the video source
513 * @param videoSize the target video frame size. If both
514 *      width and height in videoSize is -1, use the current
515 *      width and heigth settings by the camera
516 * @param frameRate the target frame rate in frames per second.
517 *      if it is -1, use the current camera frame rate setting.
518 * @param storeMetaDataInVideoBuffers request to store meta
519 *      data or real YUV data in video buffers. Request to
520 *      store meta data in video buffers may not be honored
521 *      if the source does not support this feature.
522 *
523 * @return OK if no error.
524 */
525status_t CameraSource::init(
526        const sp<hardware::ICamera>& camera,
527        const sp<ICameraRecordingProxy>& proxy,
528        int32_t cameraId,
529        const String16& clientName,
530        uid_t clientUid,
531        pid_t clientPid,
532        Size videoSize,
533        int32_t frameRate,
534        bool storeMetaDataInVideoBuffers) {
535
536    ALOGV("init");
537    status_t err = OK;
538    int64_t token = IPCThreadState::self()->clearCallingIdentity();
539    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
540                               videoSize, frameRate,
541                               storeMetaDataInVideoBuffers);
542    IPCThreadState::self()->restoreCallingIdentity(token);
543    return err;
544}
545
546void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
547    mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
548            "StageFright-CameraSource-BufferHeap");
549    for (uint32_t i = 0; i < bufferCount; i++) {
550        mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
551    }
552}
553
554status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
555        uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
556    ALOGV("initBufferQueue");
557
558    if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
559        ALOGE("%s: Buffer queue already exists", __FUNCTION__);
560        return ALREADY_EXISTS;
561    }
562
563    // Create a buffer queue.
564    sp<IGraphicBufferProducer> producer;
565    sp<IGraphicBufferConsumer> consumer;
566    BufferQueue::createBufferQueue(&producer, &consumer);
567
568    uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
569    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
570        usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
571    }
572
573    bufferCount += kConsumerBufferCount;
574
575    mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
576    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
577    mVideoBufferProducer = producer;
578
579    status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
580    if (res != OK) {
581        ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
582                strerror(-res), res);
583        return res;
584    }
585
586    res = mVideoBufferConsumer->setDefaultBufferFormat(format);
587    if (res != OK) {
588        ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
589                strerror(-res), res);
590        return res;
591    }
592
593    res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
594    if (res != OK) {
595        ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
596                strerror(-res), res);
597        return res;
598    }
599
600    res = mCamera->setVideoTarget(mVideoBufferProducer);
601    if (res != OK) {
602        ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
603        return res;
604    }
605
606    // Create memory heap to store buffers as VideoNativeMetadata.
607    createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
608
609    mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
610    res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
611    if (res != OK) {
612        ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
613                strerror(-res), res);
614        return res;
615    }
616
617    return OK;
618}
619
620status_t CameraSource::initWithCameraAccess(
621        const sp<hardware::ICamera>& camera,
622        const sp<ICameraRecordingProxy>& proxy,
623        int32_t cameraId,
624        const String16& clientName,
625        uid_t clientUid,
626        pid_t clientPid,
627        Size videoSize,
628        int32_t frameRate,
629        bool storeMetaDataInVideoBuffers) {
630    ALOGV("initWithCameraAccess");
631    status_t err = OK;
632
633    if ((err = isCameraAvailable(camera, proxy, cameraId,
634            clientName, clientUid, clientPid)) != OK) {
635        ALOGE("Camera connection could not be established.");
636        return err;
637    }
638    CameraParameters params(mCamera->getParameters());
639    if ((err = isCameraColorFormatSupported(params)) != OK) {
640        return err;
641    }
642
643    // Set the camera to use the requested video frame size
644    // and/or frame rate.
645    if ((err = configureCamera(&params,
646                    videoSize.width, videoSize.height,
647                    frameRate))) {
648        return err;
649    }
650
651    // Check on video frame size and frame rate.
652    CameraParameters newCameraParams(mCamera->getParameters());
653    if ((err = checkVideoSize(newCameraParams,
654                videoSize.width, videoSize.height)) != OK) {
655        return err;
656    }
657    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
658        return err;
659    }
660
661    // Set the preview display. Skip this if mSurface is null because
662    // applications may already set a surface to the camera.
663    if (mSurface != NULL) {
664        // This CHECK is good, since we just passed the lock/unlock
665        // check earlier by calling mCamera->setParameters().
666        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
667    }
668
669    // By default, store real data in video buffers.
670    mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
671    if (storeMetaDataInVideoBuffers) {
672        if (OK == mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE)) {
673            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE;
674        } else if (OK == mCamera->setVideoBufferMode(
675                hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA)) {
676            mVideoBufferMode = hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA;
677        }
678    }
679
680    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
681        err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV);
682        if (err != OK) {
683            ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV failed: "
684                    "%s (err=%d)", __FUNCTION__, strerror(-err), err);
685            return err;
686        }
687    }
688
689    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
690    if (glitchDurationUs > mGlitchDurationThresholdUs) {
691        mGlitchDurationThresholdUs = glitchDurationUs;
692    }
693
694    // XXX: query camera for the stride and slice height
695    // when the capability becomes available.
696    mMeta = new MetaData;
697    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
698    mMeta->setInt32(kKeyColorFormat, mColorFormat);
699    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
700    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
701    mMeta->setInt32(kKeyStride,      mVideoSize.width);
702    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
703    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
704    return OK;
705}
706
707CameraSource::~CameraSource() {
708    if (mStarted) {
709        reset();
710    } else if (mInitCheck == OK) {
711        // Camera is initialized but because start() is never called,
712        // the lock on Camera is never released(). This makes sure
713        // Camera's lock is released in this case.
714        releaseCamera();
715    }
716}
717
718status_t CameraSource::startCameraRecording() {
719    ALOGV("startCameraRecording");
720    // Reset the identity to the current thread because media server owns the
721    // camera and recording is started by the applications. The applications
722    // will connect to the camera in ICameraRecordingProxy::startRecording.
723    int64_t token = IPCThreadState::self()->clearCallingIdentity();
724    status_t err;
725
726    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
727        // Initialize buffer queue.
728        err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
729                (android_dataspace_t)mEncoderDataSpace,
730                mNumInputBuffers > 0 ? mNumInputBuffers : 1);
731        if (err != OK) {
732            ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
733                    strerror(-err), err);
734            return err;
735        }
736    } else {
737        if (mNumInputBuffers > 0) {
738            err = mCamera->sendCommand(
739                CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
740
741            // This could happen for CameraHAL1 clients; thus the failure is
742            // not a fatal error
743            if (err != OK) {
744                ALOGW("Failed to set video buffer count to %d due to %d",
745                    mNumInputBuffers, err);
746            }
747        }
748
749        err = mCamera->sendCommand(
750            CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace);
751
752        // This could happen for CameraHAL1 clients; thus the failure is
753        // not a fatal error
754        if (err != OK) {
755            ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d",
756                    mEncoderFormat, mEncoderDataSpace, err);
757        }
758
759        // Create memory heap to store buffers as VideoNativeMetadata.
760        createVideoBufferMemoryHeap(sizeof(VideoNativeHandleMetadata), kDefaultVideoBufferCount);
761    }
762
763    err = OK;
764    if (mCameraFlags & FLAGS_HOT_CAMERA) {
765        mCamera->unlock();
766        mCamera.clear();
767        if ((err = mCameraRecordingProxy->startRecording(
768                new ProxyListener(this))) != OK) {
769            ALOGE("Failed to start recording, received error: %s (%d)",
770                    strerror(-err), err);
771        }
772    } else {
773        mCamera->setListener(new CameraSourceListener(this));
774        mCamera->startRecording();
775        if (!mCamera->recordingEnabled()) {
776            err = -EINVAL;
777            ALOGE("Failed to start recording");
778        }
779    }
780    IPCThreadState::self()->restoreCallingIdentity(token);
781    return err;
782}
783
784status_t CameraSource::start(MetaData *meta) {
785    ALOGV("start");
786    CHECK(!mStarted);
787    if (mInitCheck != OK) {
788        ALOGE("CameraSource is not initialized yet");
789        return mInitCheck;
790    }
791
792    if (property_get_bool("media.stagefright.record-stats", false)) {
793        mCollectStats = true;
794    }
795
796    mStartTimeUs = 0;
797    mNumInputBuffers = 0;
798    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
799    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
800
801    if (meta) {
802        int64_t startTimeUs;
803        if (meta->findInt64(kKeyTime, &startTimeUs)) {
804            mStartTimeUs = startTimeUs;
805        }
806
807        int32_t nBuffers;
808        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
809            CHECK_GT(nBuffers, 0);
810            mNumInputBuffers = nBuffers;
811        }
812
813        // apply encoder color format if specified
814        if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
815            ALOGI("Using encoder format: %#x", mEncoderFormat);
816        }
817        if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
818            ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
819        }
820    }
821
822    status_t err;
823    if ((err = startCameraRecording()) == OK) {
824        mStarted = true;
825    }
826
827    return err;
828}
829
830void CameraSource::stopCameraRecording() {
831    ALOGV("stopCameraRecording");
832    if (mCameraFlags & FLAGS_HOT_CAMERA) {
833        if (mCameraRecordingProxy != 0) {
834            mCameraRecordingProxy->stopRecording();
835        }
836    } else {
837        if (mCamera != 0) {
838            mCamera->setListener(NULL);
839            mCamera->stopRecording();
840        }
841    }
842}
843
844void CameraSource::releaseCamera() {
845    ALOGV("releaseCamera");
846    sp<Camera> camera;
847    bool coldCamera = false;
848    {
849        Mutex::Autolock autoLock(mLock);
850        // get a local ref and clear ref to mCamera now
851        camera = mCamera;
852        mCamera.clear();
853        coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
854    }
855
856    if (camera != 0) {
857        int64_t token = IPCThreadState::self()->clearCallingIdentity();
858        if (coldCamera) {
859            ALOGV("Camera was cold when we started, stopping preview");
860            camera->stopPreview();
861            camera->disconnect();
862        }
863        camera->unlock();
864        IPCThreadState::self()->restoreCallingIdentity(token);
865    }
866
867    {
868        Mutex::Autolock autoLock(mLock);
869        if (mCameraRecordingProxy != 0) {
870            IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
871            mCameraRecordingProxy.clear();
872        }
873        mCameraFlags = 0;
874    }
875}
876
877status_t CameraSource::reset() {
878    ALOGD("reset: E");
879
880    {
881        Mutex::Autolock autoLock(mLock);
882        mStarted = false;
883        mStopSystemTimeUs = -1;
884        mFrameAvailableCondition.signal();
885
886        int64_t token;
887        bool isTokenValid = false;
888        if (mCamera != 0) {
889            token = IPCThreadState::self()->clearCallingIdentity();
890            isTokenValid = true;
891        }
892        releaseQueuedFrames();
893        while (!mFramesBeingEncoded.empty()) {
894            if (NO_ERROR !=
895                mFrameCompleteCondition.waitRelative(mLock,
896                        mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
897                ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
898                    mFramesBeingEncoded.size());
899            }
900        }
901        stopCameraRecording();
902        if (isTokenValid) {
903            IPCThreadState::self()->restoreCallingIdentity(token);
904        }
905
906        if (mCollectStats) {
907            ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
908                    mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
909                    mLastFrameTimestampUs - mFirstFrameTimeUs);
910        }
911
912        if (mNumGlitches > 0) {
913            ALOGW("%d long delays between neighboring video frames", mNumGlitches);
914        }
915
916        CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
917    }
918
919    if (mBufferQueueListener != nullptr) {
920        mBufferQueueListener->requestExit();
921        mBufferQueueListener->join();
922        mBufferQueueListener.clear();
923    }
924
925    mVideoBufferConsumer.clear();
926    mVideoBufferProducer.clear();
927    releaseCamera();
928
929    ALOGD("reset: X");
930    return OK;
931}
932
933void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
934    ALOGV("releaseRecordingFrame");
935
936    if (mVideoBufferMode == hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
937        // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
938        ssize_t offset;
939        size_t size;
940        sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
941        if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
942            ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
943                    heap->getHeapID(), mMemoryHeapBase->getHeapID());
944            return;
945        }
946
947        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
948                (uint8_t*)heap->getBase() + offset);
949
950        // Find the corresponding buffer item for the native window buffer.
951        ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
952        if (index == NAME_NOT_FOUND) {
953            ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
954            return;
955        }
956
957        BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
958        mReceivedBufferItemMap.removeItemsAt(index);
959        mVideoBufferConsumer->releaseBuffer(buffer);
960        mMemoryBases.push_back(frame);
961        mMemoryBaseAvailableCond.signal();
962    } else {
963        native_handle_t* handle = nullptr;
964
965        // Check if frame contains a VideoNativeHandleMetadata.
966        if (frame->size() == sizeof(VideoNativeHandleMetadata)) {
967            VideoNativeHandleMetadata *metadata =
968                (VideoNativeHandleMetadata*)(frame->pointer());
969            if (metadata->eType == kMetadataBufferTypeNativeHandleSource) {
970                handle = metadata->pHandle;
971            }
972        }
973
974        if (handle != nullptr) {
975            ssize_t offset;
976            size_t size;
977            sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
978            if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
979                ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)",
980		     __FUNCTION__, heap->getHeapID(), mMemoryHeapBase->getHeapID());
981                return;
982            }
983            uint32_t batchSize = 0;
984            {
985                Mutex::Autolock autoLock(mBatchLock);
986                if (mInflightBatchSizes.size() > 0) {
987                    batchSize = mInflightBatchSizes[0];
988                }
989            }
990            if (batchSize == 0) { // return buffers one by one
991                // Frame contains a VideoNativeHandleMetadata. Send the handle back to camera.
992                releaseRecordingFrameHandle(handle);
993                mMemoryBases.push_back(frame);
994                mMemoryBaseAvailableCond.signal();
995            } else { // Group buffers in batch then return
996                Mutex::Autolock autoLock(mBatchLock);
997                mInflightReturnedHandles.push_back(handle);
998                mInflightReturnedMemorys.push_back(frame);
999                if (mInflightReturnedHandles.size() == batchSize) {
1000                    releaseRecordingFrameHandleBatch(mInflightReturnedHandles);
1001
1002                    mInflightBatchSizes.pop_front();
1003                    mInflightReturnedHandles.clear();
1004                    for (const auto& mem : mInflightReturnedMemorys) {
1005                        mMemoryBases.push_back(mem);
1006                        mMemoryBaseAvailableCond.signal();
1007                    }
1008                    mInflightReturnedMemorys.clear();
1009                }
1010            }
1011
1012        } else if (mCameraRecordingProxy != nullptr) {
1013            // mCamera is created by application. Return the frame back to camera via camera
1014            // recording proxy.
1015            mCameraRecordingProxy->releaseRecordingFrame(frame);
1016        } else if (mCamera != nullptr) {
1017            // mCamera is created by CameraSource. Return the frame directly back to camera.
1018            int64_t token = IPCThreadState::self()->clearCallingIdentity();
1019            mCamera->releaseRecordingFrame(frame);
1020            IPCThreadState::self()->restoreCallingIdentity(token);
1021        }
1022    }
1023}
1024
1025void CameraSource::releaseQueuedFrames() {
1026    List<sp<IMemory> >::iterator it;
1027    while (!mFramesReceived.empty()) {
1028        it = mFramesReceived.begin();
1029        releaseRecordingFrame(*it);
1030        mFramesReceived.erase(it);
1031        ++mNumFramesDropped;
1032    }
1033}
1034
1035sp<MetaData> CameraSource::getFormat() {
1036    return mMeta;
1037}
1038
1039void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
1040    releaseRecordingFrame(frame);
1041}
1042
1043void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
1044    ALOGV("signalBufferReturned: %p", buffer->data());
1045    Mutex::Autolock autoLock(mLock);
1046    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
1047         it != mFramesBeingEncoded.end(); ++it) {
1048        if ((*it)->pointer() ==  buffer->data()) {
1049            releaseOneRecordingFrame((*it));
1050            mFramesBeingEncoded.erase(it);
1051            ++mNumFramesEncoded;
1052            buffer->setObserver(0);
1053            buffer->release();
1054            mFrameCompleteCondition.signal();
1055            return;
1056        }
1057    }
1058    CHECK(!"signalBufferReturned: bogus buffer");
1059}
1060
1061status_t CameraSource::read(
1062        MediaBuffer **buffer, const ReadOptions *options) {
1063    ALOGV("read");
1064
1065    *buffer = NULL;
1066
1067    int64_t seekTimeUs;
1068    ReadOptions::SeekMode mode;
1069    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
1070        return ERROR_UNSUPPORTED;
1071    }
1072
1073    sp<IMemory> frame;
1074    int64_t frameTime;
1075
1076    {
1077        Mutex::Autolock autoLock(mLock);
1078        while (mStarted && mFramesReceived.empty()) {
1079            if (NO_ERROR !=
1080                mFrameAvailableCondition.waitRelative(mLock,
1081                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
1082                if (mCameraRecordingProxy != 0 &&
1083                    !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
1084                    ALOGW("camera recording proxy is gone");
1085                    return ERROR_END_OF_STREAM;
1086                }
1087                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
1088                    mLastFrameTimestampUs);
1089            }
1090        }
1091        if (!mStarted) {
1092            return OK;
1093        }
1094        frame = *mFramesReceived.begin();
1095        mFramesReceived.erase(mFramesReceived.begin());
1096
1097        frameTime = *mFrameTimes.begin();
1098        mFrameTimes.erase(mFrameTimes.begin());
1099        mFramesBeingEncoded.push_back(frame);
1100        *buffer = new MediaBuffer(frame->pointer(), frame->size());
1101        (*buffer)->setObserver(this);
1102        (*buffer)->add_ref();
1103        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
1104    }
1105    return OK;
1106}
1107
1108status_t CameraSource::setStopTimeUs(int64_t stopTimeUs) {
1109    Mutex::Autolock autoLock(mLock);
1110    ALOGV("Set stoptime: %lld us", (long long)stopTimeUs);
1111
1112    if (stopTimeUs < -1) {
1113        ALOGE("Invalid stop time %lld us", (long long)stopTimeUs);
1114        return BAD_VALUE;
1115    } else if (stopTimeUs == -1) {
1116        ALOGI("reset stopTime to be -1");
1117    }
1118
1119    mStopSystemTimeUs = stopTimeUs;
1120    return OK;
1121}
1122
1123bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
1124    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
1125        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
1126        return true;
1127    }
1128
1129    if (mStopSystemTimeUs != -1 && timestampUs >= mStopSystemTimeUs) {
1130        ALOGV("Drop Camera frame at %lld  stop time: %lld us",
1131                (long long)timestampUs, (long long)mStopSystemTimeUs);
1132        return true;
1133    }
1134
1135    // May need to skip frame or modify timestamp. Currently implemented
1136    // by the subclass CameraSourceTimeLapse.
1137    if (skipCurrentFrame(timestampUs)) {
1138        return true;
1139    }
1140
1141    if (mNumFramesReceived > 0) {
1142        if (timestampUs <= mLastFrameTimestampUs) {
1143            ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
1144                    (long long)timestampUs, (long long)mLastFrameTimestampUs);
1145            return true;
1146        }
1147        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
1148            ++mNumGlitches;
1149        }
1150    }
1151
1152    mLastFrameTimestampUs = timestampUs;
1153    if (mNumFramesReceived == 0) {
1154        mFirstFrameTimeUs = timestampUs;
1155        // Initial delay
1156        if (mStartTimeUs > 0) {
1157            if (timestampUs < mStartTimeUs) {
1158                // Frame was captured before recording was started
1159                // Drop it without updating the statistical data.
1160                return true;
1161            }
1162            mStartTimeUs = timestampUs - mStartTimeUs;
1163        }
1164    }
1165
1166    return false;
1167}
1168
1169void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
1170        int32_t msgType __unused, const sp<IMemory> &data) {
1171    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);
1172    Mutex::Autolock autoLock(mLock);
1173
1174    if (shouldSkipFrameLocked(timestampUs)) {
1175        releaseOneRecordingFrame(data);
1176        return;
1177    }
1178
1179    ++mNumFramesReceived;
1180
1181    CHECK(data != NULL && data->size() > 0);
1182    mFramesReceived.push_back(data);
1183    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1184    mFrameTimes.push_back(timeUs);
1185    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1186        mStartTimeUs, timeUs);
1187    mFrameAvailableCondition.signal();
1188}
1189
1190void CameraSource::releaseRecordingFrameHandle(native_handle_t* handle) {
1191    if (mCameraRecordingProxy != nullptr) {
1192        mCameraRecordingProxy->releaseRecordingFrameHandle(handle);
1193    } else if (mCamera != nullptr) {
1194        int64_t token = IPCThreadState::self()->clearCallingIdentity();
1195        mCamera->releaseRecordingFrameHandle(handle);
1196        IPCThreadState::self()->restoreCallingIdentity(token);
1197    } else {
1198        native_handle_close(handle);
1199        native_handle_delete(handle);
1200    }
1201}
1202
1203void CameraSource::releaseRecordingFrameHandleBatch(const std::vector<native_handle_t*>& handles) {
1204    if (mCameraRecordingProxy != nullptr) {
1205        mCameraRecordingProxy->releaseRecordingFrameHandleBatch(handles);
1206    } else if (mCamera != nullptr) {
1207        int64_t token = IPCThreadState::self()->clearCallingIdentity();
1208        mCamera->releaseRecordingFrameHandleBatch(handles);
1209        IPCThreadState::self()->restoreCallingIdentity(token);
1210    } else {
1211        for (auto& handle : handles) {
1212            native_handle_close(handle);
1213            native_handle_delete(handle);
1214        }
1215    }
1216}
1217
1218void CameraSource::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
1219                native_handle_t* handle) {
1220    ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
1221    Mutex::Autolock autoLock(mLock);
1222    if (handle == nullptr) return;
1223
1224    if (shouldSkipFrameLocked(timestampUs)) {
1225        releaseRecordingFrameHandle(handle);
1226        return;
1227    }
1228
1229    while (mMemoryBases.empty()) {
1230        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1231                TIMED_OUT) {
1232            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1233            releaseRecordingFrameHandle(handle);
1234            return;
1235        }
1236    }
1237
1238    ++mNumFramesReceived;
1239
1240    sp<IMemory> data = *mMemoryBases.begin();
1241    mMemoryBases.erase(mMemoryBases.begin());
1242
1243    // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
1244    VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
1245    metadata->eType = kMetadataBufferTypeNativeHandleSource;
1246    metadata->pHandle = handle;
1247
1248    mFramesReceived.push_back(data);
1249    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1250    mFrameTimes.push_back(timeUs);
1251    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
1252    mFrameAvailableCondition.signal();
1253}
1254
1255void CameraSource::recordingFrameHandleCallbackTimestampBatch(
1256        const std::vector<int64_t>& timestampsUs,
1257        const std::vector<native_handle_t*>& handles) {
1258    size_t n = timestampsUs.size();
1259    if (n != handles.size()) {
1260        ALOGE("%s: timestampsUs(%zu) and handles(%zu) size mismatch!",
1261                __FUNCTION__, timestampsUs.size(), handles.size());
1262    }
1263
1264    Mutex::Autolock autoLock(mLock);
1265    int batchSize = 0;
1266    for (size_t i = 0; i < n; i++) {
1267        int64_t timestampUs = timestampsUs[i];
1268        native_handle_t* handle = handles[i];
1269
1270        ALOGV("%s: timestamp %lld us", __FUNCTION__, (long long)timestampUs);
1271        if (handle == nullptr) continue;
1272
1273        if (shouldSkipFrameLocked(timestampUs)) {
1274            releaseRecordingFrameHandle(handle);
1275            continue;
1276        }
1277
1278        while (mMemoryBases.empty()) {
1279            if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1280                    TIMED_OUT) {
1281                ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1282                releaseRecordingFrameHandle(handle);
1283                continue;
1284            }
1285        }
1286        ++batchSize;
1287        ++mNumFramesReceived;
1288        sp<IMemory> data = *mMemoryBases.begin();
1289        mMemoryBases.erase(mMemoryBases.begin());
1290
1291        // Wrap native handle in sp<IMemory> so it can be pushed to mFramesReceived.
1292        VideoNativeHandleMetadata *metadata = (VideoNativeHandleMetadata*)(data->pointer());
1293        metadata->eType = kMetadataBufferTypeNativeHandleSource;
1294        metadata->pHandle = handle;
1295
1296        mFramesReceived.push_back(data);
1297        int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1298        mFrameTimes.push_back(timeUs);
1299        ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64, mStartTimeUs, timeUs);
1300
1301    }
1302    if (batchSize > 0) {
1303        Mutex::Autolock autoLock(mBatchLock);
1304        mInflightBatchSizes.push_back(batchSize);
1305    }
1306    for (int i = 0; i < batchSize; i++) {
1307        mFrameAvailableCondition.signal();
1308    }
1309}
1310
1311CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
1312        const sp<CameraSource>& cameraSource) {
1313    mConsumer = consumer;
1314    mConsumer->setFrameAvailableListener(this);
1315    mCameraSource = cameraSource;
1316}
1317
1318void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
1319    ALOGV("%s: onFrameAvailable", __FUNCTION__);
1320
1321    Mutex::Autolock l(mLock);
1322
1323    if (!mFrameAvailable) {
1324        mFrameAvailable = true;
1325        mFrameAvailableSignal.signal();
1326    }
1327}
1328
1329bool CameraSource::BufferQueueListener::threadLoop() {
1330    if (mConsumer == nullptr || mCameraSource == nullptr) {
1331        return false;
1332    }
1333
1334    {
1335        Mutex::Autolock l(mLock);
1336        while (!mFrameAvailable) {
1337            if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1338                return true;
1339            }
1340        }
1341        mFrameAvailable = false;
1342    }
1343
1344    BufferItem buffer;
1345    while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1346        mCameraSource->processBufferQueueFrame(buffer);
1347    }
1348
1349    return true;
1350}
1351
1352void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1353    Mutex::Autolock autoLock(mLock);
1354
1355    int64_t timestampUs = buffer.mTimestamp / 1000;
1356    if (shouldSkipFrameLocked(timestampUs)) {
1357        mVideoBufferConsumer->releaseBuffer(buffer);
1358        return;
1359    }
1360
1361    while (mMemoryBases.empty()) {
1362        if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1363                TIMED_OUT) {
1364            ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1365            mVideoBufferConsumer->releaseBuffer(buffer);
1366            return;
1367        }
1368    }
1369
1370    ++mNumFramesReceived;
1371
1372    // Find a available memory slot to store the buffer as VideoNativeMetadata.
1373    sp<IMemory> data = *mMemoryBases.begin();
1374    mMemoryBases.erase(mMemoryBases.begin());
1375
1376    ssize_t offset;
1377    size_t size;
1378    sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1379    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1380        (uint8_t*)heap->getBase() + offset);
1381    memset(payload, 0, sizeof(VideoNativeMetadata));
1382    payload->eType = kMetadataBufferTypeANWBuffer;
1383    payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1384    payload->nFenceFd = -1;
1385
1386    // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1387    // when the encoder returns the native window buffer.
1388    mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1389
1390    mFramesReceived.push_back(data);
1391    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1392    mFrameTimes.push_back(timeUs);
1393    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1394        mStartTimeUs, timeUs);
1395    mFrameAvailableCondition.signal();
1396}
1397
1398MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1399    ALOGV("metaDataStoredInVideoBuffers");
1400
1401    // Output buffers will contain metadata if camera sends us buffer in metadata mode or via
1402    // buffer queue.
1403    switch (mVideoBufferMode) {
1404        case hardware::ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA:
1405            return kMetadataBufferTypeNativeHandleSource;
1406        case hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE:
1407            return kMetadataBufferTypeANWBuffer;
1408        default:
1409            return kMetadataBufferTypeInvalid;
1410    }
1411}
1412
1413CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
1414    mSource = source;
1415}
1416
1417void CameraSource::ProxyListener::dataCallbackTimestamp(
1418        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
1419    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
1420}
1421
1422void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestamp(nsecs_t timestamp,
1423        native_handle_t* handle) {
1424    mSource->recordingFrameHandleCallbackTimestamp(timestamp / 1000, handle);
1425}
1426
1427void CameraSource::ProxyListener::recordingFrameHandleCallbackTimestampBatch(
1428        const std::vector<int64_t>& timestampsUs,
1429        const std::vector<native_handle_t*>& handles) {
1430    int n = timestampsUs.size();
1431    std::vector<nsecs_t> modifiedTimestamps(n);
1432    for (int i = 0; i < n; i++) {
1433        modifiedTimestamps[i] = timestampsUs[i] / 1000;
1434    }
1435    mSource->recordingFrameHandleCallbackTimestampBatch(modifiedTimestamps, handles);
1436}
1437
1438void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1439    ALOGI("Camera recording proxy died");
1440}
1441
1442}  // namespace android
1443