CameraSource.cpp revision 2b37ced30f89437c804c3945b901019b86d210ae
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <surfaceflinger/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36struct CameraSourceListener : public CameraListener {
37    CameraSourceListener(const sp<CameraSource> &source);
38
39    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
40    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
41
42    virtual void postDataTimestamp(
43            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
44
45protected:
46    virtual ~CameraSourceListener();
47
48private:
49    wp<CameraSource> mSource;
50
51    CameraSourceListener(const CameraSourceListener &);
52    CameraSourceListener &operator=(const CameraSourceListener &);
53};
54
55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
56    : mSource(source) {
57}
58
59CameraSourceListener::~CameraSourceListener() {
60}
61
62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
63    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
64}
65
66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
67    LOGV("postData(%d, ptr:%p, size:%d)",
68         msgType, dataPtr->pointer(), dataPtr->size());
69
70    sp<CameraSource> source = mSource.promote();
71    if (source.get() != NULL) {
72        source->dataCallback(msgType, dataPtr);
73    }
74}
75
76void CameraSourceListener::postDataTimestamp(
77        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
78
79    sp<CameraSource> source = mSource.promote();
80    if (source.get() != NULL) {
81        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
82    }
83}
84
85static int32_t getColorFormat(const char* colorFormat) {
86    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
87       return OMX_COLOR_FormatYUV420Planar;
88    }
89
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
91       return OMX_COLOR_FormatYUV422SemiPlanar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
95        return OMX_COLOR_FormatYUV420SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
99        return OMX_COLOR_FormatYCbYCr;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
103       return OMX_COLOR_Format16bitRGB565;
104    }
105
106    LOGE("Uknown color format (%s), please add it to "
107         "CameraSource::getColorFormat", colorFormat);
108
109    CHECK_EQ(0, "Unknown color format");
110}
111
112CameraSource *CameraSource::Create() {
113    Size size;
114    size.width = -1;
115    size.height = -1;
116
117    sp<ICamera> camera;
118    return new CameraSource(camera, 0, size, -1, NULL);
119}
120
121// static
122CameraSource *CameraSource::CreateFromCamera(
123    const sp<ICamera>& camera,
124    int32_t cameraId,
125    Size videoSize,
126    int32_t frameRate,
127    const sp<Surface>& surface) {
128
129    CameraSource *source = new CameraSource(camera, cameraId,
130                    videoSize, frameRate, surface);
131
132    if (source != NULL) {
133        if (source->initCheck() != OK) {
134            delete source;
135            return NULL;
136        }
137    }
138    return source;
139}
140
141CameraSource::CameraSource(
142    const sp<ICamera>& camera,
143    int32_t cameraId,
144    Size videoSize,
145    int32_t frameRate,
146    const sp<Surface>& surface)
147    : mCameraFlags(0),
148      mVideoFrameRate(-1),
149      mCamera(0),
150      mSurface(surface),
151      mNumFramesReceived(0),
152      mLastFrameTimestampUs(0),
153      mStarted(false),
154      mFirstFrameTimeUs(0),
155      mNumFramesEncoded(0),
156      mNumFramesDropped(0),
157      mNumGlitches(0),
158      mGlitchDurationThresholdUs(200000),
159      mCollectStats(false) {
160
161    mVideoSize.width  = -1;
162    mVideoSize.height = -1;
163
164    mInitCheck = init(camera, cameraId, videoSize, frameRate);
165}
166
167status_t CameraSource::initCheck() const {
168    return mInitCheck;
169}
170
171status_t CameraSource::isCameraAvailable(
172    const sp<ICamera>& camera, int32_t cameraId) {
173
174    if (camera == 0) {
175        mCamera = Camera::connect(cameraId);
176        mCameraFlags &= ~FLAGS_HOT_CAMERA;
177    } else {
178        mCamera = Camera::create(camera);
179        mCameraFlags |= FLAGS_HOT_CAMERA;
180    }
181
182    // Is camera available?
183    if (mCamera == 0) {
184        LOGE("Camera connection could not be established.");
185        return -EBUSY;
186    }
187    if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
188        mCamera->lock();
189    }
190    return OK;
191}
192
193
194/*
195 * Check to see whether the requested video width and height is one
196 * of the supported sizes.
197 * @param width the video frame width in pixels
198 * @param height the video frame height in pixels
199 * @param suppportedSizes the vector of sizes that we check against
200 * @return true if the dimension (width and height) is supported.
201 */
202static bool isVideoSizeSupported(
203    int32_t width, int32_t height,
204    const Vector<Size>& supportedSizes) {
205
206    LOGV("isVideoSizeSupported");
207    for (size_t i = 0; i < supportedSizes.size(); ++i) {
208        if (width  == supportedSizes[i].width &&
209            height == supportedSizes[i].height) {
210            return true;
211        }
212    }
213    return false;
214}
215
216/*
217 * If the preview and video output is separate, we only set the
218 * the video size, and applications should set the preview size
219 * to some proper value, and the recording framework will not
220 * change the preview size; otherwise, if the video and preview
221 * output is the same, we need to set the preview to be the same
222 * as the requested video size.
223 *
224 */
225/*
226 * Query the camera to retrieve the supported video frame sizes
227 * and also to see whether CameraParameters::setVideoSize()
228 * is supported or not.
229 * @param params CameraParameters to retrieve the information
230 * @@param isSetVideoSizeSupported retunrs whether method
231 *      CameraParameters::setVideoSize() is supported or not.
232 * @param sizes returns the vector of Size objects for the
233 *      supported video frame sizes advertised by the camera.
234 */
235static void getSupportedVideoSizes(
236    const CameraParameters& params,
237    bool *isSetVideoSizeSupported,
238    Vector<Size>& sizes) {
239
240    *isSetVideoSizeSupported = true;
241    params.getSupportedVideoSizes(sizes);
242    if (sizes.size() == 0) {
243        LOGD("Camera does not support setVideoSize()");
244        params.getSupportedPreviewSizes(sizes);
245        *isSetVideoSizeSupported = false;
246    }
247}
248
249/*
250 * Check whether the camera has the supported color format
251 * @param params CameraParameters to retrieve the information
252 * @return OK if no error.
253 */
254status_t CameraSource::isCameraColorFormatSupported(
255        const CameraParameters& params) {
256    mColorFormat = getColorFormat(params.get(
257            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
258    if (mColorFormat == -1) {
259        return BAD_VALUE;
260    }
261    return OK;
262}
263
264/*
265 * Configure the camera to use the requested video size
266 * (width and height) and/or frame rate. If both width and
267 * height are -1, configuration on the video size is skipped.
268 * if frameRate is -1, configuration on the frame rate
269 * is skipped. Skipping the configuration allows one to
270 * use the current camera setting without the need to
271 * actually know the specific values (see Create() method).
272 *
273 * @param params the CameraParameters to be configured
274 * @param width the target video frame width in pixels
275 * @param height the target video frame height in pixels
276 * @param frameRate the target frame rate in frames per second.
277 * @return OK if no error.
278 */
279status_t CameraSource::configureCamera(
280        CameraParameters* params,
281        int32_t width, int32_t height,
282        int32_t frameRate) {
283
284    Vector<Size> sizes;
285    bool isSetVideoSizeSupportedByCamera = true;
286    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
287    bool isCameraParamChanged = false;
288    if (width != -1 && height != -1) {
289        if (!isVideoSizeSupported(width, height, sizes)) {
290            LOGE("Video dimension (%dx%d) is unsupported", width, height);
291            return BAD_VALUE;
292        }
293        if (isSetVideoSizeSupportedByCamera) {
294            params->setVideoSize(width, height);
295        } else {
296            params->setPreviewSize(width, height);
297        }
298        isCameraParamChanged = true;
299    } else if ((width == -1 && height != -1) ||
300               (width != -1 && height == -1)) {
301        // If one and only one of the width and height is -1
302        // we reject such a request.
303        LOGE("Requested video size (%dx%d) is not supported", width, height);
304        return BAD_VALUE;
305    } else {  // width == -1 && height == -1
306        // Do not configure the camera.
307        // Use the current width and height value setting from the camera.
308    }
309
310    if (frameRate != -1) {
311        params->setPreviewFrameRate(frameRate);
312        isCameraParamChanged = true;
313    } else {  // frameRate == -1
314        // Do not configure the camera.
315        // Use the current frame rate value setting from the camera
316    }
317
318    if (isCameraParamChanged) {
319        // Either frame rate or frame size needs to be changed.
320        String8 s = params->flatten();
321        if (OK != mCamera->setParameters(s)) {
322            LOGE("Could not change settings."
323                 " Someone else is using camera %p?", mCamera.get());
324            return -EBUSY;
325        }
326    }
327    return OK;
328}
329
330/*
331 * Check whether the requested video frame size
332 * has been successfully configured or not. If both width and height
333 * are -1, check on the current width and height value setting
334 * is performed.
335 *
336 * @param params CameraParameters to retrieve the information
337 * @param the target video frame width in pixels to check against
338 * @param the target video frame height in pixels to check against
339 * @return OK if no error
340 */
341status_t CameraSource::checkVideoSize(
342        const CameraParameters& params,
343        int32_t width, int32_t height) {
344
345    int32_t frameWidthActual = -1;
346    int32_t frameHeightActual = -1;
347    params.getPreviewSize(&frameWidthActual, &frameHeightActual);
348    if (frameWidthActual < 0 || frameHeightActual < 0) {
349        LOGE("Failed to retrieve video frame size (%dx%d)",
350                frameWidthActual, frameHeightActual);
351        return UNKNOWN_ERROR;
352    }
353
354    // Check the actual video frame size against the target/requested
355    // video frame size.
356    if (width != -1 && height != -1) {
357        if (frameWidthActual != width || frameHeightActual != height) {
358            LOGE("Failed to set video frame size to %dx%d. "
359                    "The actual video size is %dx%d ", width, height,
360                    frameWidthActual, frameHeightActual);
361            return UNKNOWN_ERROR;
362        }
363    }
364
365    // Good now.
366    mVideoSize.width = frameWidthActual;
367    mVideoSize.height = frameHeightActual;
368    return OK;
369}
370
371/*
372 * Check the requested frame rate has been successfully configured or not.
373 * If the target frameRate is -1, check on the current frame rate value
374 * setting is performed.
375 *
376 * @param params CameraParameters to retrieve the information
377 * @param the target video frame rate to check against
378 * @return OK if no error.
379 */
380status_t CameraSource::checkFrameRate(
381        const CameraParameters& params,
382        int32_t frameRate) {
383
384    int32_t frameRateActual = params.getPreviewFrameRate();
385    if (frameRateActual < 0) {
386        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
387        return UNKNOWN_ERROR;
388    }
389
390    // Check the actual video frame rate against the target/requested
391    // video frame rate.
392    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
393        LOGE("Failed to set preview frame rate to %d fps. The actual "
394                "frame rate is %d", frameRate, frameRateActual);
395        return UNKNOWN_ERROR;
396    }
397
398    // Good now.
399    mVideoFrameRate = frameRateActual;
400    return OK;
401}
402
403/*
404 * Initialize the CameraSource to so that it becomes
405 * ready for providing the video input streams as requested.
406 * @param camera the camera object used for the video source
407 * @param cameraId if camera == 0, use camera with this id
408 *      as the video source
409 * @param videoSize the target video frame size. If both
410 *      width and height in videoSize is -1, use the current
411 *      width and heigth settings by the camera
412 * @param frameRate the target frame rate in frames per second.
413 *      if it is -1, use the current camera frame rate setting.
414 * @return OK if no error.
415 */
416status_t CameraSource::init(
417        const sp<ICamera>& camera,
418        int32_t cameraId,
419        Size videoSize,
420        int32_t frameRate) {
421
422    status_t err = OK;
423    int64_t token = IPCThreadState::self()->clearCallingIdentity();
424
425    if ((err  = isCameraAvailable(camera, cameraId)) != OK) {
426        return err;
427    }
428    CameraParameters params(mCamera->getParameters());
429    if ((err = isCameraColorFormatSupported(params)) != OK) {
430        return err;
431    }
432
433    // Set the camera to use the requested video frame size
434    // and/or frame rate.
435    if ((err = configureCamera(&params,
436                    videoSize.width, videoSize.height,
437                    frameRate))) {
438        return err;
439    }
440
441    // Check on video frame size and frame rate.
442    CameraParameters newCameraParams(mCamera->getParameters());
443    if ((err = checkVideoSize(newCameraParams,
444                videoSize.width, videoSize.height)) != OK) {
445        return err;
446    }
447    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
448        return err;
449    }
450
451    // This CHECK is good, since we just passed the lock/unlock
452    // check earlier by calling mCamera->setParameters().
453    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
454
455    /*
456     * mCamera->startRecording() signals camera hal to make
457     * available the video buffers (for instance, allocation
458     * of the video buffers may be triggered when camera hal's
459     * startRecording() method is called). Making available these
460     * video buffers earlier (before calling start()) is critical,
461     * if one wants to configure omx video encoders to use these
462     * buffers for passing video frame data during video recording
463     * without the need to memcpy the video frame data stored
464     * in these buffers. Eliminating memcpy for video frame data
465     * is crucial in performance for HD quality video recording
466     * applications.
467     *
468     * Based on OMX IL spec, configuring the omx video encoders
469     * must occur in loaded state. When start() is called, omx
470     * video encoders are already in idle state, which is too
471     * late. Thus, we must call mCamera->startRecording() earlier.
472     */
473    startCameraRecording();
474
475    IPCThreadState::self()->restoreCallingIdentity(token);
476
477    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
478    if (glitchDurationUs > mGlitchDurationThresholdUs) {
479        mGlitchDurationThresholdUs = glitchDurationUs;
480    }
481
482    // XXX: query camera for the stride and slice height
483    // when the capability becomes available.
484    mMeta = new MetaData;
485    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
486    mMeta->setInt32(kKeyColorFormat, mColorFormat);
487    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
488    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
489    mMeta->setInt32(kKeyStride,      mVideoSize.width);
490    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
491    return OK;
492}
493
494CameraSource::~CameraSource() {
495    if (mStarted) {
496        stop();
497    }
498}
499
500void CameraSource::startCameraRecording() {
501    CHECK_EQ(OK, mCamera->startRecording());
502    CHECK(mCamera->recordingEnabled());
503}
504
505status_t CameraSource::start(MetaData *meta) {
506    CHECK(!mStarted);
507    if (mInitCheck != OK) {
508        LOGE("CameraSource is not initialized yet");
509        return mInitCheck;
510    }
511
512    char value[PROPERTY_VALUE_MAX];
513    if (property_get("media.stagefright.record-stats", value, NULL)
514        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
515        mCollectStats = true;
516    }
517
518    mStartTimeUs = 0;
519    int64_t startTimeUs;
520    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
521        mStartTimeUs = startTimeUs;
522    }
523
524    int64_t token = IPCThreadState::self()->clearCallingIdentity();
525    mCamera->setListener(new CameraSourceListener(this));
526    IPCThreadState::self()->restoreCallingIdentity(token);
527
528    mStarted = true;
529    return OK;
530}
531
532void CameraSource::stopCameraRecording() {
533    mCamera->setListener(NULL);
534    mCamera->stopRecording();
535}
536
537status_t CameraSource::stop() {
538    LOGV("stop");
539    Mutex::Autolock autoLock(mLock);
540    mStarted = false;
541    mFrameAvailableCondition.signal();
542
543    int64_t token = IPCThreadState::self()->clearCallingIdentity();
544    stopCameraRecording();
545    releaseQueuedFrames();
546    while (!mFramesBeingEncoded.empty()) {
547        LOGI("Waiting for outstanding frames being encoded: %d",
548                mFramesBeingEncoded.size());
549        mFrameCompleteCondition.wait(mLock);
550    }
551
552    LOGV("Disconnect camera");
553    if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
554        LOGV("Camera was cold when we started, stopping preview");
555        mCamera->stopPreview();
556    }
557    mCamera->unlock();
558    mCamera.clear();
559    mCamera = 0;
560    mCameraFlags = 0;
561    IPCThreadState::self()->restoreCallingIdentity(token);
562
563    if (mCollectStats) {
564        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
565                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
566                mLastFrameTimestampUs - mFirstFrameTimeUs);
567    }
568
569    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
570    return OK;
571}
572
573void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
574    mCamera->releaseRecordingFrame(frame);
575}
576
577void CameraSource::releaseQueuedFrames() {
578    List<sp<IMemory> >::iterator it;
579    while (!mFramesReceived.empty()) {
580        it = mFramesReceived.begin();
581        releaseRecordingFrame(*it);
582        mFramesReceived.erase(it);
583        ++mNumFramesDropped;
584    }
585}
586
587sp<MetaData> CameraSource::getFormat() {
588    return mMeta;
589}
590
591void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
592    int64_t token = IPCThreadState::self()->clearCallingIdentity();
593    releaseRecordingFrame(frame);
594    IPCThreadState::self()->restoreCallingIdentity(token);
595}
596
597void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
598    LOGV("signalBufferReturned: %p", buffer->data());
599    Mutex::Autolock autoLock(mLock);
600    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
601         it != mFramesBeingEncoded.end(); ++it) {
602        if ((*it)->pointer() ==  buffer->data()) {
603            releaseOneRecordingFrame((*it));
604            mFramesBeingEncoded.erase(it);
605            ++mNumFramesEncoded;
606            buffer->setObserver(0);
607            buffer->release();
608            mFrameCompleteCondition.signal();
609            return;
610        }
611    }
612    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
613}
614
615status_t CameraSource::read(
616        MediaBuffer **buffer, const ReadOptions *options) {
617    LOGV("read");
618
619    *buffer = NULL;
620
621    int64_t seekTimeUs;
622    ReadOptions::SeekMode mode;
623    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
624        return ERROR_UNSUPPORTED;
625    }
626
627    sp<IMemory> frame;
628    int64_t frameTime;
629
630    {
631        Mutex::Autolock autoLock(mLock);
632        while (mStarted) {
633            while(mFramesReceived.empty()) {
634                mFrameAvailableCondition.wait(mLock);
635            }
636
637            if (!mStarted) {
638                return OK;
639            }
640
641            frame = *mFramesReceived.begin();
642            mFramesReceived.erase(mFramesReceived.begin());
643
644            frameTime = *mFrameTimes.begin();
645            mFrameTimes.erase(mFrameTimes.begin());
646            int64_t skipTimeUs;
647            if (!options || !options->getSkipFrame(&skipTimeUs)) {
648                skipTimeUs = frameTime;
649            }
650            if (skipTimeUs > frameTime) {
651                LOGV("skipTimeUs: %lld us > frameTime: %lld us",
652                    skipTimeUs, frameTime);
653                releaseOneRecordingFrame(frame);
654                ++mNumFramesDropped;
655                // Safeguard against the abuse of the kSkipFrame_Option.
656                if (skipTimeUs - frameTime >= 1E6) {
657                    LOGE("Frame skipping requested is way too long: %lld us",
658                        skipTimeUs - frameTime);
659                    return UNKNOWN_ERROR;
660                }
661            } else {
662                mFramesBeingEncoded.push_back(frame);
663                *buffer = new MediaBuffer(frame->pointer(), frame->size());
664                (*buffer)->setObserver(this);
665                (*buffer)->add_ref();
666                (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
667
668                return OK;
669            }
670        }
671    }
672    return OK;
673}
674
675void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
676        int32_t msgType, const sp<IMemory> &data) {
677    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
678    Mutex::Autolock autoLock(mLock);
679    if (!mStarted) {
680        releaseOneRecordingFrame(data);
681        ++mNumFramesReceived;
682        ++mNumFramesDropped;
683        return;
684    }
685
686    if (mNumFramesReceived > 0 &&
687        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
688        if (mNumGlitches % 10 == 0) {  // Don't spam the log
689            LOGW("Long delay detected in video recording");
690        }
691        ++mNumGlitches;
692    }
693
694    // May need to skip frame or modify timestamp. Currently implemented
695    // by the subclass CameraSourceTimeLapse.
696    if(skipCurrentFrame(timestampUs)) {
697        releaseOneRecordingFrame(data);
698        return;
699    }
700
701    mLastFrameTimestampUs = timestampUs;
702    if (mNumFramesReceived == 0) {
703        mFirstFrameTimeUs = timestampUs;
704        // Initial delay
705        if (mStartTimeUs > 0) {
706            if (timestampUs < mStartTimeUs) {
707                // Frame was captured before recording was started
708                // Drop it without updating the statistical data.
709                releaseOneRecordingFrame(data);
710                return;
711            }
712            mStartTimeUs = timestampUs - mStartTimeUs;
713        }
714    }
715    ++mNumFramesReceived;
716
717    mFramesReceived.push_back(data);
718    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
719    mFrameTimes.push_back(timeUs);
720    LOGV("initial delay: %lld, current time stamp: %lld",
721        mStartTimeUs, timeUs);
722    mFrameAvailableCondition.signal();
723}
724
725}  // namespace android
726