CameraSource.cpp revision 393410a441b6d06daf286ed496470e9d6b2b6ca8
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <surfaceflinger/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36struct CameraSourceListener : public CameraListener {
37    CameraSourceListener(const sp<CameraSource> &source);
38
39    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
40    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
41
42    virtual void postDataTimestamp(
43            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
44
45protected:
46    virtual ~CameraSourceListener();
47
48private:
49    wp<CameraSource> mSource;
50
51    CameraSourceListener(const CameraSourceListener &);
52    CameraSourceListener &operator=(const CameraSourceListener &);
53};
54
55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
56    : mSource(source) {
57}
58
59CameraSourceListener::~CameraSourceListener() {
60}
61
62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
63    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
64}
65
66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
67    LOGV("postData(%d, ptr:%p, size:%d)",
68         msgType, dataPtr->pointer(), dataPtr->size());
69
70    sp<CameraSource> source = mSource.promote();
71    if (source.get() != NULL) {
72        source->dataCallback(msgType, dataPtr);
73    }
74}
75
76void CameraSourceListener::postDataTimestamp(
77        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
78
79    sp<CameraSource> source = mSource.promote();
80    if (source.get() != NULL) {
81        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
82    }
83}
84
85static int32_t getColorFormat(const char* colorFormat) {
86    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
87       return OMX_COLOR_FormatYUV420Planar;
88    }
89
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
91       return OMX_COLOR_FormatYUV422SemiPlanar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
95        return OMX_COLOR_FormatYUV420SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
99        return OMX_COLOR_FormatYCbYCr;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
103       return OMX_COLOR_Format16bitRGB565;
104    }
105
106    LOGE("Uknown color format (%s), please add it to "
107         "CameraSource::getColorFormat", colorFormat);
108
109    CHECK_EQ(0, "Unknown color format");
110}
111
112CameraSource *CameraSource::Create() {
113    Size size;
114    size.width = -1;
115    size.height = -1;
116
117    sp<ICamera> camera;
118    return new CameraSource(camera, 0, size, -1, NULL, false);
119}
120
121// static
122CameraSource *CameraSource::CreateFromCamera(
123    const sp<ICamera>& camera,
124    int32_t cameraId,
125    Size videoSize,
126    int32_t frameRate,
127    const sp<Surface>& surface,
128    bool storeMetaDataInVideoBuffers) {
129
130    CameraSource *source = new CameraSource(camera, cameraId,
131                    videoSize, frameRate, surface,
132                    storeMetaDataInVideoBuffers);
133
134    if (source != NULL) {
135        if (source->initCheck() != OK) {
136            delete source;
137            return NULL;
138        }
139    }
140    return source;
141}
142
143CameraSource::CameraSource(
144    const sp<ICamera>& camera,
145    int32_t cameraId,
146    Size videoSize,
147    int32_t frameRate,
148    const sp<Surface>& surface,
149    bool storeMetaDataInVideoBuffers)
150    : mCameraFlags(0),
151      mVideoFrameRate(-1),
152      mCamera(0),
153      mSurface(surface),
154      mNumFramesReceived(0),
155      mLastFrameTimestampUs(0),
156      mStarted(false),
157      mFirstFrameTimeUs(0),
158      mNumFramesEncoded(0),
159      mNumFramesDropped(0),
160      mNumGlitches(0),
161      mGlitchDurationThresholdUs(200000),
162      mCollectStats(false) {
163
164    mVideoSize.width  = -1;
165    mVideoSize.height = -1;
166
167    mInitCheck = init(camera, cameraId,
168                    videoSize, frameRate,
169                    storeMetaDataInVideoBuffers);
170}
171
172status_t CameraSource::initCheck() const {
173    return mInitCheck;
174}
175
176status_t CameraSource::isCameraAvailable(
177    const sp<ICamera>& camera, int32_t cameraId) {
178
179    if (camera == 0) {
180        mCamera = Camera::connect(cameraId);
181        mCameraFlags &= ~FLAGS_HOT_CAMERA;
182    } else {
183        mCamera = Camera::create(camera);
184        mCameraFlags |= FLAGS_HOT_CAMERA;
185    }
186
187    // Is camera available?
188    if (mCamera == 0) {
189        LOGE("Camera connection could not be established.");
190        return -EBUSY;
191    }
192    if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
193        mCamera->lock();
194    }
195    return OK;
196}
197
198
199/*
200 * Check to see whether the requested video width and height is one
201 * of the supported sizes.
202 * @param width the video frame width in pixels
203 * @param height the video frame height in pixels
204 * @param suppportedSizes the vector of sizes that we check against
205 * @return true if the dimension (width and height) is supported.
206 */
207static bool isVideoSizeSupported(
208    int32_t width, int32_t height,
209    const Vector<Size>& supportedSizes) {
210
211    LOGV("isVideoSizeSupported");
212    for (size_t i = 0; i < supportedSizes.size(); ++i) {
213        if (width  == supportedSizes[i].width &&
214            height == supportedSizes[i].height) {
215            return true;
216        }
217    }
218    return false;
219}
220
221/*
222 * If the preview and video output is separate, we only set the
223 * the video size, and applications should set the preview size
224 * to some proper value, and the recording framework will not
225 * change the preview size; otherwise, if the video and preview
226 * output is the same, we need to set the preview to be the same
227 * as the requested video size.
228 *
229 */
230/*
231 * Query the camera to retrieve the supported video frame sizes
232 * and also to see whether CameraParameters::setVideoSize()
233 * is supported or not.
234 * @param params CameraParameters to retrieve the information
235 * @@param isSetVideoSizeSupported retunrs whether method
236 *      CameraParameters::setVideoSize() is supported or not.
237 * @param sizes returns the vector of Size objects for the
238 *      supported video frame sizes advertised by the camera.
239 */
240static void getSupportedVideoSizes(
241    const CameraParameters& params,
242    bool *isSetVideoSizeSupported,
243    Vector<Size>& sizes) {
244
245    *isSetVideoSizeSupported = true;
246    params.getSupportedVideoSizes(sizes);
247    if (sizes.size() == 0) {
248        LOGD("Camera does not support setVideoSize()");
249        params.getSupportedPreviewSizes(sizes);
250        *isSetVideoSizeSupported = false;
251    }
252}
253
254/*
255 * Check whether the camera has the supported color format
256 * @param params CameraParameters to retrieve the information
257 * @return OK if no error.
258 */
259status_t CameraSource::isCameraColorFormatSupported(
260        const CameraParameters& params) {
261    mColorFormat = getColorFormat(params.get(
262            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
263    if (mColorFormat == -1) {
264        return BAD_VALUE;
265    }
266    return OK;
267}
268
269/*
270 * Configure the camera to use the requested video size
271 * (width and height) and/or frame rate. If both width and
272 * height are -1, configuration on the video size is skipped.
273 * if frameRate is -1, configuration on the frame rate
274 * is skipped. Skipping the configuration allows one to
275 * use the current camera setting without the need to
276 * actually know the specific values (see Create() method).
277 *
278 * @param params the CameraParameters to be configured
279 * @param width the target video frame width in pixels
280 * @param height the target video frame height in pixels
281 * @param frameRate the target frame rate in frames per second.
282 * @return OK if no error.
283 */
284status_t CameraSource::configureCamera(
285        CameraParameters* params,
286        int32_t width, int32_t height,
287        int32_t frameRate) {
288
289    Vector<Size> sizes;
290    bool isSetVideoSizeSupportedByCamera = true;
291    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
292    bool isCameraParamChanged = false;
293    if (width != -1 && height != -1) {
294        if (!isVideoSizeSupported(width, height, sizes)) {
295            LOGE("Video dimension (%dx%d) is unsupported", width, height);
296            return BAD_VALUE;
297        }
298        if (isSetVideoSizeSupportedByCamera) {
299            params->setVideoSize(width, height);
300        } else {
301            params->setPreviewSize(width, height);
302        }
303        isCameraParamChanged = true;
304    } else if ((width == -1 && height != -1) ||
305               (width != -1 && height == -1)) {
306        // If one and only one of the width and height is -1
307        // we reject such a request.
308        LOGE("Requested video size (%dx%d) is not supported", width, height);
309        return BAD_VALUE;
310    } else {  // width == -1 && height == -1
311        // Do not configure the camera.
312        // Use the current width and height value setting from the camera.
313    }
314
315    if (frameRate != -1) {
316        CHECK(frameRate > 0 && frameRate <= 120);
317        const char* supportedFrameRates =
318                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
319        CHECK(supportedFrameRates != NULL);
320        LOGV("Supported frame rates: %s", supportedFrameRates);
321        char buf[4];
322        snprintf(buf, 4, "%d", frameRate);
323        if (strstr(supportedFrameRates, buf) == NULL) {
324            LOGE("Requested frame rate (%d) is not supported: %s",
325                frameRate, supportedFrameRates);
326            return BAD_VALUE;
327        }
328
329        // The frame rate is supported, set the camera to the requested value.
330        params->setPreviewFrameRate(frameRate);
331        isCameraParamChanged = true;
332    } else {  // frameRate == -1
333        // Do not configure the camera.
334        // Use the current frame rate value setting from the camera
335    }
336
337    if (isCameraParamChanged) {
338        // Either frame rate or frame size needs to be changed.
339        String8 s = params->flatten();
340        if (OK != mCamera->setParameters(s)) {
341            LOGE("Could not change settings."
342                 " Someone else is using camera %p?", mCamera.get());
343            return -EBUSY;
344        }
345    }
346    return OK;
347}
348
349/*
350 * Check whether the requested video frame size
351 * has been successfully configured or not. If both width and height
352 * are -1, check on the current width and height value setting
353 * is performed.
354 *
355 * @param params CameraParameters to retrieve the information
356 * @param the target video frame width in pixels to check against
357 * @param the target video frame height in pixels to check against
358 * @return OK if no error
359 */
360status_t CameraSource::checkVideoSize(
361        const CameraParameters& params,
362        int32_t width, int32_t height) {
363
364    // The actual video size is the same as the preview size
365    // if the camera hal does not support separate video and
366    // preview output. In this case, we retrieve the video
367    // size from preview.
368    int32_t frameWidthActual = -1;
369    int32_t frameHeightActual = -1;
370    Vector<Size> sizes;
371    params.getSupportedVideoSizes(sizes);
372    if (sizes.size() == 0) {
373        // video size is the same as preview size
374        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
375    } else {
376        // video size may not be the same as preview
377        params.getVideoSize(&frameWidthActual, &frameHeightActual);
378    }
379    if (frameWidthActual < 0 || frameHeightActual < 0) {
380        LOGE("Failed to retrieve video frame size (%dx%d)",
381                frameWidthActual, frameHeightActual);
382        return UNKNOWN_ERROR;
383    }
384
385    // Check the actual video frame size against the target/requested
386    // video frame size.
387    if (width != -1 && height != -1) {
388        if (frameWidthActual != width || frameHeightActual != height) {
389            LOGE("Failed to set video frame size to %dx%d. "
390                    "The actual video size is %dx%d ", width, height,
391                    frameWidthActual, frameHeightActual);
392            return UNKNOWN_ERROR;
393        }
394    }
395
396    // Good now.
397    mVideoSize.width = frameWidthActual;
398    mVideoSize.height = frameHeightActual;
399    return OK;
400}
401
402/*
403 * Check the requested frame rate has been successfully configured or not.
404 * If the target frameRate is -1, check on the current frame rate value
405 * setting is performed.
406 *
407 * @param params CameraParameters to retrieve the information
408 * @param the target video frame rate to check against
409 * @return OK if no error.
410 */
411status_t CameraSource::checkFrameRate(
412        const CameraParameters& params,
413        int32_t frameRate) {
414
415    int32_t frameRateActual = params.getPreviewFrameRate();
416    if (frameRateActual < 0) {
417        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
418        return UNKNOWN_ERROR;
419    }
420
421    // Check the actual video frame rate against the target/requested
422    // video frame rate.
423    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
424        LOGE("Failed to set preview frame rate to %d fps. The actual "
425                "frame rate is %d", frameRate, frameRateActual);
426        return UNKNOWN_ERROR;
427    }
428
429    // Good now.
430    mVideoFrameRate = frameRateActual;
431    return OK;
432}
433
434/*
435 * Initialize the CameraSource to so that it becomes
436 * ready for providing the video input streams as requested.
437 * @param camera the camera object used for the video source
438 * @param cameraId if camera == 0, use camera with this id
439 *      as the video source
440 * @param videoSize the target video frame size. If both
441 *      width and height in videoSize is -1, use the current
442 *      width and heigth settings by the camera
443 * @param frameRate the target frame rate in frames per second.
444 *      if it is -1, use the current camera frame rate setting.
445 * @param storeMetaDataInVideoBuffers request to store meta
446 *      data or real YUV data in video buffers. Request to
447 *      store meta data in video buffers may not be honored
448 *      if the source does not support this feature.
449 *
450 * @return OK if no error.
451 */
452status_t CameraSource::init(
453        const sp<ICamera>& camera,
454        int32_t cameraId,
455        Size videoSize,
456        int32_t frameRate,
457        bool storeMetaDataInVideoBuffers) {
458
459    status_t err = OK;
460    int64_t token = IPCThreadState::self()->clearCallingIdentity();
461
462    if ((err  = isCameraAvailable(camera, cameraId)) != OK) {
463        return err;
464    }
465    CameraParameters params(mCamera->getParameters());
466    if ((err = isCameraColorFormatSupported(params)) != OK) {
467        return err;
468    }
469
470    // Set the camera to use the requested video frame size
471    // and/or frame rate.
472    if ((err = configureCamera(&params,
473                    videoSize.width, videoSize.height,
474                    frameRate))) {
475        return err;
476    }
477
478    // Check on video frame size and frame rate.
479    CameraParameters newCameraParams(mCamera->getParameters());
480    if ((err = checkVideoSize(newCameraParams,
481                videoSize.width, videoSize.height)) != OK) {
482        return err;
483    }
484    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
485        return err;
486    }
487
488    // This CHECK is good, since we just passed the lock/unlock
489    // check earlier by calling mCamera->setParameters().
490    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
491
492    mIsMetaDataStoredInVideoBuffers = false;
493    if (storeMetaDataInVideoBuffers &&
494        OK == mCamera->storeMetaDataInBuffers(true)) {
495        mIsMetaDataStoredInVideoBuffers = true;
496    }
497
498    /*
499     * mCamera->startRecording() signals camera hal to make
500     * available the video buffers (for instance, allocation
501     * of the video buffers may be triggered when camera hal's
502     * startRecording() method is called). Making available these
503     * video buffers earlier (before calling start()) is critical,
504     * if one wants to configure omx video encoders to use these
505     * buffers for passing video frame data during video recording
506     * without the need to memcpy the video frame data stored
507     * in these buffers. Eliminating memcpy for video frame data
508     * is crucial in performance for HD quality video recording
509     * applications.
510     *
511     * Based on OMX IL spec, configuring the omx video encoders
512     * must occur in loaded state. When start() is called, omx
513     * video encoders are already in idle state, which is too
514     * late. Thus, we must call mCamera->startRecording() earlier.
515     */
516    startCameraRecording();
517
518    IPCThreadState::self()->restoreCallingIdentity(token);
519
520    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
521    if (glitchDurationUs > mGlitchDurationThresholdUs) {
522        mGlitchDurationThresholdUs = glitchDurationUs;
523    }
524
525    // XXX: query camera for the stride and slice height
526    // when the capability becomes available.
527    mMeta = new MetaData;
528    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
529    mMeta->setInt32(kKeyColorFormat, mColorFormat);
530    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
531    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
532    mMeta->setInt32(kKeyStride,      mVideoSize.width);
533    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
534    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
535    return OK;
536}
537
538CameraSource::~CameraSource() {
539    if (mStarted) {
540        stop();
541    }
542}
543
544void CameraSource::startCameraRecording() {
545    CHECK_EQ(OK, mCamera->startRecording());
546    CHECK(mCamera->recordingEnabled());
547}
548
549status_t CameraSource::start(MetaData *meta) {
550    CHECK(!mStarted);
551    if (mInitCheck != OK) {
552        LOGE("CameraSource is not initialized yet");
553        return mInitCheck;
554    }
555
556    char value[PROPERTY_VALUE_MAX];
557    if (property_get("media.stagefright.record-stats", value, NULL)
558        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
559        mCollectStats = true;
560    }
561
562    mStartTimeUs = 0;
563    int64_t startTimeUs;
564    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
565        mStartTimeUs = startTimeUs;
566    }
567
568    int64_t token = IPCThreadState::self()->clearCallingIdentity();
569    mCamera->setListener(new CameraSourceListener(this));
570    IPCThreadState::self()->restoreCallingIdentity(token);
571
572    mStarted = true;
573    return OK;
574}
575
576void CameraSource::stopCameraRecording() {
577    mCamera->setListener(NULL);
578    mCamera->stopRecording();
579}
580
581status_t CameraSource::stop() {
582    LOGV("stop");
583    Mutex::Autolock autoLock(mLock);
584    mStarted = false;
585    mFrameAvailableCondition.signal();
586
587    int64_t token = IPCThreadState::self()->clearCallingIdentity();
588    stopCameraRecording();
589    releaseQueuedFrames();
590    while (!mFramesBeingEncoded.empty()) {
591        LOGI("Waiting for outstanding frames being encoded: %d",
592                mFramesBeingEncoded.size());
593        mFrameCompleteCondition.wait(mLock);
594    }
595
596    LOGV("Disconnect camera");
597    if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
598        LOGV("Camera was cold when we started, stopping preview");
599        mCamera->stopPreview();
600    }
601    mCamera->unlock();
602    mCamera.clear();
603    mCamera = 0;
604    mCameraFlags = 0;
605    IPCThreadState::self()->restoreCallingIdentity(token);
606
607    if (mCollectStats) {
608        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
609                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
610                mLastFrameTimestampUs - mFirstFrameTimeUs);
611    }
612
613    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
614    return OK;
615}
616
617void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
618    mCamera->releaseRecordingFrame(frame);
619}
620
621void CameraSource::releaseQueuedFrames() {
622    List<sp<IMemory> >::iterator it;
623    while (!mFramesReceived.empty()) {
624        it = mFramesReceived.begin();
625        releaseRecordingFrame(*it);
626        mFramesReceived.erase(it);
627        ++mNumFramesDropped;
628    }
629}
630
631sp<MetaData> CameraSource::getFormat() {
632    return mMeta;
633}
634
635void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
636    int64_t token = IPCThreadState::self()->clearCallingIdentity();
637    releaseRecordingFrame(frame);
638    IPCThreadState::self()->restoreCallingIdentity(token);
639}
640
641void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
642    LOGV("signalBufferReturned: %p", buffer->data());
643    Mutex::Autolock autoLock(mLock);
644    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
645         it != mFramesBeingEncoded.end(); ++it) {
646        if ((*it)->pointer() ==  buffer->data()) {
647            releaseOneRecordingFrame((*it));
648            mFramesBeingEncoded.erase(it);
649            ++mNumFramesEncoded;
650            buffer->setObserver(0);
651            buffer->release();
652            mFrameCompleteCondition.signal();
653            return;
654        }
655    }
656    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
657}
658
659status_t CameraSource::read(
660        MediaBuffer **buffer, const ReadOptions *options) {
661    LOGV("read");
662
663    *buffer = NULL;
664
665    int64_t seekTimeUs;
666    ReadOptions::SeekMode mode;
667    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
668        return ERROR_UNSUPPORTED;
669    }
670
671    sp<IMemory> frame;
672    int64_t frameTime;
673
674    {
675        Mutex::Autolock autoLock(mLock);
676        while (mStarted) {
677            while(mFramesReceived.empty()) {
678                mFrameAvailableCondition.wait(mLock);
679            }
680
681            if (!mStarted) {
682                return OK;
683            }
684
685            frame = *mFramesReceived.begin();
686            mFramesReceived.erase(mFramesReceived.begin());
687
688            frameTime = *mFrameTimes.begin();
689            mFrameTimes.erase(mFrameTimes.begin());
690            int64_t skipTimeUs;
691            if (!options || !options->getSkipFrame(&skipTimeUs)) {
692                skipTimeUs = frameTime;
693            }
694            if (skipTimeUs > frameTime) {
695                LOGV("skipTimeUs: %lld us > frameTime: %lld us",
696                    skipTimeUs, frameTime);
697                releaseOneRecordingFrame(frame);
698                ++mNumFramesDropped;
699                // Safeguard against the abuse of the kSkipFrame_Option.
700                if (skipTimeUs - frameTime >= 1E6) {
701                    LOGE("Frame skipping requested is way too long: %lld us",
702                        skipTimeUs - frameTime);
703                    return UNKNOWN_ERROR;
704                }
705            } else {
706                mFramesBeingEncoded.push_back(frame);
707                *buffer = new MediaBuffer(frame->pointer(), frame->size());
708                (*buffer)->setObserver(this);
709                (*buffer)->add_ref();
710                (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
711
712                return OK;
713            }
714        }
715    }
716    return OK;
717}
718
719void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
720        int32_t msgType, const sp<IMemory> &data) {
721    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
722    Mutex::Autolock autoLock(mLock);
723    if (!mStarted) {
724        releaseOneRecordingFrame(data);
725        ++mNumFramesReceived;
726        ++mNumFramesDropped;
727        return;
728    }
729
730    if (mNumFramesReceived > 0 &&
731        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
732        if (mNumGlitches % 10 == 0) {  // Don't spam the log
733            LOGW("Long delay detected in video recording");
734        }
735        ++mNumGlitches;
736    }
737
738    // May need to skip frame or modify timestamp. Currently implemented
739    // by the subclass CameraSourceTimeLapse.
740    if(skipCurrentFrame(timestampUs)) {
741        releaseOneRecordingFrame(data);
742        return;
743    }
744
745    mLastFrameTimestampUs = timestampUs;
746    if (mNumFramesReceived == 0) {
747        mFirstFrameTimeUs = timestampUs;
748        // Initial delay
749        if (mStartTimeUs > 0) {
750            if (timestampUs < mStartTimeUs) {
751                // Frame was captured before recording was started
752                // Drop it without updating the statistical data.
753                releaseOneRecordingFrame(data);
754                return;
755            }
756            mStartTimeUs = timestampUs - mStartTimeUs;
757        }
758    }
759    ++mNumFramesReceived;
760
761    mFramesReceived.push_back(data);
762    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
763    mFrameTimes.push_back(timeUs);
764    LOGV("initial delay: %lld, current time stamp: %lld",
765        mStartTimeUs, timeUs);
766    mFrameAvailableCondition.signal();
767}
768
769size_t CameraSource::getNumberOfVideoBuffers() const {
770    LOGV("getNumberOfVideoBuffers");
771    size_t nBuffers = 0;
772    int64_t token = IPCThreadState::self()->clearCallingIdentity();
773    if (mInitCheck == OK && mCamera != 0) {
774        nBuffers = mCamera->getNumberOfVideoBuffers();
775    }
776    IPCThreadState::self()->restoreCallingIdentity(token);
777    return nBuffers;
778}
779
780sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
781    LOGV("getVideoBuffer: %d", index);
782    sp<IMemory> buffer = 0;
783    int64_t token = IPCThreadState::self()->clearCallingIdentity();
784    if (mInitCheck == OK && mCamera != 0) {
785        buffer = mCamera->getVideoBuffer(index);
786    }
787    IPCThreadState::self()->restoreCallingIdentity(token);
788    return buffer;
789}
790
791bool CameraSource::isMetaDataStoredInVideoBuffers() const {
792    LOGV("isMetaDataStoredInVideoBuffers");
793    return mIsMetaDataStoredInVideoBuffers;
794}
795
796}  // namespace android
797