CameraSource.cpp revision 95068be1426dc0a4dc856cf9e35550c31b901711
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <surfaceflinger/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36struct CameraSourceListener : public CameraListener {
37    CameraSourceListener(const sp<CameraSource> &source);
38
39    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
40    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
41
42    virtual void postDataTimestamp(
43            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
44
45protected:
46    virtual ~CameraSourceListener();
47
48private:
49    wp<CameraSource> mSource;
50
51    CameraSourceListener(const CameraSourceListener &);
52    CameraSourceListener &operator=(const CameraSourceListener &);
53};
54
55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
56    : mSource(source) {
57}
58
59CameraSourceListener::~CameraSourceListener() {
60}
61
62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
63    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
64}
65
66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
67    LOGV("postData(%d, ptr:%p, size:%d)",
68         msgType, dataPtr->pointer(), dataPtr->size());
69
70    sp<CameraSource> source = mSource.promote();
71    if (source.get() != NULL) {
72        source->dataCallback(msgType, dataPtr);
73    }
74}
75
76void CameraSourceListener::postDataTimestamp(
77        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
78
79    sp<CameraSource> source = mSource.promote();
80    if (source.get() != NULL) {
81        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
82    }
83}
84
85static int32_t getColorFormat(const char* colorFormat) {
86    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
87       return OMX_COLOR_FormatYUV420Planar;
88    }
89
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
91       return OMX_COLOR_FormatYUV422SemiPlanar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
95        return OMX_COLOR_FormatYUV420SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
99        return OMX_COLOR_FormatYCbYCr;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
103       return OMX_COLOR_Format16bitRGB565;
104    }
105
106    LOGE("Uknown color format (%s), please add it to "
107         "CameraSource::getColorFormat", colorFormat);
108
109    CHECK_EQ(0, "Unknown color format");
110}
111
112CameraSource *CameraSource::Create() {
113    Size size;
114    size.width = -1;
115    size.height = -1;
116
117    sp<ICamera> camera;
118    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
119}
120
121// static
122CameraSource *CameraSource::CreateFromCamera(
123    const sp<ICamera>& camera,
124    const sp<ICameraRecordingProxy>& proxy,
125    int32_t cameraId,
126    Size videoSize,
127    int32_t frameRate,
128    const sp<Surface>& surface,
129    bool storeMetaDataInVideoBuffers) {
130
131    CameraSource *source = new CameraSource(camera, proxy, cameraId,
132                    videoSize, frameRate, surface,
133                    storeMetaDataInVideoBuffers);
134    return source;
135}
136
137CameraSource::CameraSource(
138    const sp<ICamera>& camera,
139    const sp<ICameraRecordingProxy>& proxy,
140    int32_t cameraId,
141    Size videoSize,
142    int32_t frameRate,
143    const sp<Surface>& surface,
144    bool storeMetaDataInVideoBuffers)
145    : mCameraFlags(0),
146      mVideoFrameRate(-1),
147      mCamera(0),
148      mSurface(surface),
149      mNumFramesReceived(0),
150      mLastFrameTimestampUs(0),
151      mStarted(false),
152      mNumFramesEncoded(0),
153      mFirstFrameTimeUs(0),
154      mNumFramesDropped(0),
155      mNumGlitches(0),
156      mGlitchDurationThresholdUs(200000),
157      mCollectStats(false) {
158    mVideoSize.width  = -1;
159    mVideoSize.height = -1;
160
161    int64_t token = IPCThreadState::self()->clearCallingIdentity();
162    mInitCheck = init(camera, proxy, cameraId,
163                    videoSize, frameRate,
164                    storeMetaDataInVideoBuffers);
165    if (mInitCheck != OK) releaseCamera();
166    IPCThreadState::self()->restoreCallingIdentity(token);
167}
168
169status_t CameraSource::initCheck() const {
170    return mInitCheck;
171}
172
173status_t CameraSource::isCameraAvailable(
174    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
175    int32_t cameraId) {
176
177    if (camera == 0) {
178        mCamera = Camera::connect(cameraId);
179        if (mCamera == 0) return -EBUSY;
180        // If proxy is not passed in by applications, still use the proxy of
181        // our own Camera to simplify the code.
182        mCameraRecordingProxy = mCamera->getRecordingProxy();
183        mCameraFlags &= ~FLAGS_HOT_CAMERA;
184    } else {
185        // We get the proxy from Camera, not ICamera. We need to get the proxy
186        // to the remote Camera owned by the application. Here mCamera is a
187        // local Camera object created by us. We cannot use the proxy from
188        // mCamera here.
189        mCamera = Camera::create(camera);
190        if (mCamera == 0) return -EBUSY;
191        mCameraRecordingProxy = proxy;
192        mCameraFlags |= FLAGS_HOT_CAMERA;
193    }
194
195    mCamera->lock();
196    mDeathNotifier = new DeathNotifier();
197    // isBinderAlive needs linkToDeath to work.
198    mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
199
200    return OK;
201}
202
203
204/*
205 * Check to see whether the requested video width and height is one
206 * of the supported sizes.
207 * @param width the video frame width in pixels
208 * @param height the video frame height in pixels
209 * @param suppportedSizes the vector of sizes that we check against
210 * @return true if the dimension (width and height) is supported.
211 */
212static bool isVideoSizeSupported(
213    int32_t width, int32_t height,
214    const Vector<Size>& supportedSizes) {
215
216    LOGV("isVideoSizeSupported");
217    for (size_t i = 0; i < supportedSizes.size(); ++i) {
218        if (width  == supportedSizes[i].width &&
219            height == supportedSizes[i].height) {
220            return true;
221        }
222    }
223    return false;
224}
225
226/*
227 * If the preview and video output is separate, we only set the
228 * the video size, and applications should set the preview size
229 * to some proper value, and the recording framework will not
230 * change the preview size; otherwise, if the video and preview
231 * output is the same, we need to set the preview to be the same
232 * as the requested video size.
233 *
234 */
235/*
236 * Query the camera to retrieve the supported video frame sizes
237 * and also to see whether CameraParameters::setVideoSize()
238 * is supported or not.
239 * @param params CameraParameters to retrieve the information
240 * @@param isSetVideoSizeSupported retunrs whether method
241 *      CameraParameters::setVideoSize() is supported or not.
242 * @param sizes returns the vector of Size objects for the
243 *      supported video frame sizes advertised by the camera.
244 */
245static void getSupportedVideoSizes(
246    const CameraParameters& params,
247    bool *isSetVideoSizeSupported,
248    Vector<Size>& sizes) {
249
250    *isSetVideoSizeSupported = true;
251    params.getSupportedVideoSizes(sizes);
252    if (sizes.size() == 0) {
253        LOGD("Camera does not support setVideoSize()");
254        params.getSupportedPreviewSizes(sizes);
255        *isSetVideoSizeSupported = false;
256    }
257}
258
259/*
260 * Check whether the camera has the supported color format
261 * @param params CameraParameters to retrieve the information
262 * @return OK if no error.
263 */
264status_t CameraSource::isCameraColorFormatSupported(
265        const CameraParameters& params) {
266    mColorFormat = getColorFormat(params.get(
267            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
268    if (mColorFormat == -1) {
269        return BAD_VALUE;
270    }
271    return OK;
272}
273
274/*
275 * Configure the camera to use the requested video size
276 * (width and height) and/or frame rate. If both width and
277 * height are -1, configuration on the video size is skipped.
278 * if frameRate is -1, configuration on the frame rate
279 * is skipped. Skipping the configuration allows one to
280 * use the current camera setting without the need to
281 * actually know the specific values (see Create() method).
282 *
283 * @param params the CameraParameters to be configured
284 * @param width the target video frame width in pixels
285 * @param height the target video frame height in pixels
286 * @param frameRate the target frame rate in frames per second.
287 * @return OK if no error.
288 */
289status_t CameraSource::configureCamera(
290        CameraParameters* params,
291        int32_t width, int32_t height,
292        int32_t frameRate) {
293
294    Vector<Size> sizes;
295    bool isSetVideoSizeSupportedByCamera = true;
296    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
297    bool isCameraParamChanged = false;
298    if (width != -1 && height != -1) {
299        if (!isVideoSizeSupported(width, height, sizes)) {
300            LOGE("Video dimension (%dx%d) is unsupported", width, height);
301            return BAD_VALUE;
302        }
303        if (isSetVideoSizeSupportedByCamera) {
304            params->setVideoSize(width, height);
305        } else {
306            params->setPreviewSize(width, height);
307        }
308        isCameraParamChanged = true;
309    } else if ((width == -1 && height != -1) ||
310               (width != -1 && height == -1)) {
311        // If one and only one of the width and height is -1
312        // we reject such a request.
313        LOGE("Requested video size (%dx%d) is not supported", width, height);
314        return BAD_VALUE;
315    } else {  // width == -1 && height == -1
316        // Do not configure the camera.
317        // Use the current width and height value setting from the camera.
318    }
319
320    if (frameRate != -1) {
321        CHECK(frameRate > 0 && frameRate <= 120);
322        const char* supportedFrameRates =
323                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
324        CHECK(supportedFrameRates != NULL);
325        LOGV("Supported frame rates: %s", supportedFrameRates);
326        char buf[4];
327        snprintf(buf, 4, "%d", frameRate);
328        if (strstr(supportedFrameRates, buf) == NULL) {
329            LOGE("Requested frame rate (%d) is not supported: %s",
330                frameRate, supportedFrameRates);
331            return BAD_VALUE;
332        }
333
334        // The frame rate is supported, set the camera to the requested value.
335        params->setPreviewFrameRate(frameRate);
336        isCameraParamChanged = true;
337    } else {  // frameRate == -1
338        // Do not configure the camera.
339        // Use the current frame rate value setting from the camera
340    }
341
342    if (isCameraParamChanged) {
343        // Either frame rate or frame size needs to be changed.
344        String8 s = params->flatten();
345        if (OK != mCamera->setParameters(s)) {
346            LOGE("Could not change settings."
347                 " Someone else is using camera %p?", mCamera.get());
348            return -EBUSY;
349        }
350    }
351    return OK;
352}
353
354/*
355 * Check whether the requested video frame size
356 * has been successfully configured or not. If both width and height
357 * are -1, check on the current width and height value setting
358 * is performed.
359 *
360 * @param params CameraParameters to retrieve the information
361 * @param the target video frame width in pixels to check against
362 * @param the target video frame height in pixels to check against
363 * @return OK if no error
364 */
365status_t CameraSource::checkVideoSize(
366        const CameraParameters& params,
367        int32_t width, int32_t height) {
368
369    // The actual video size is the same as the preview size
370    // if the camera hal does not support separate video and
371    // preview output. In this case, we retrieve the video
372    // size from preview.
373    int32_t frameWidthActual = -1;
374    int32_t frameHeightActual = -1;
375    Vector<Size> sizes;
376    params.getSupportedVideoSizes(sizes);
377    if (sizes.size() == 0) {
378        // video size is the same as preview size
379        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
380    } else {
381        // video size may not be the same as preview
382        params.getVideoSize(&frameWidthActual, &frameHeightActual);
383    }
384    if (frameWidthActual < 0 || frameHeightActual < 0) {
385        LOGE("Failed to retrieve video frame size (%dx%d)",
386                frameWidthActual, frameHeightActual);
387        return UNKNOWN_ERROR;
388    }
389
390    // Check the actual video frame size against the target/requested
391    // video frame size.
392    if (width != -1 && height != -1) {
393        if (frameWidthActual != width || frameHeightActual != height) {
394            LOGE("Failed to set video frame size to %dx%d. "
395                    "The actual video size is %dx%d ", width, height,
396                    frameWidthActual, frameHeightActual);
397            return UNKNOWN_ERROR;
398        }
399    }
400
401    // Good now.
402    mVideoSize.width = frameWidthActual;
403    mVideoSize.height = frameHeightActual;
404    return OK;
405}
406
407/*
408 * Check the requested frame rate has been successfully configured or not.
409 * If the target frameRate is -1, check on the current frame rate value
410 * setting is performed.
411 *
412 * @param params CameraParameters to retrieve the information
413 * @param the target video frame rate to check against
414 * @return OK if no error.
415 */
416status_t CameraSource::checkFrameRate(
417        const CameraParameters& params,
418        int32_t frameRate) {
419
420    int32_t frameRateActual = params.getPreviewFrameRate();
421    if (frameRateActual < 0) {
422        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
423        return UNKNOWN_ERROR;
424    }
425
426    // Check the actual video frame rate against the target/requested
427    // video frame rate.
428    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
429        LOGE("Failed to set preview frame rate to %d fps. The actual "
430                "frame rate is %d", frameRate, frameRateActual);
431        return UNKNOWN_ERROR;
432    }
433
434    // Good now.
435    mVideoFrameRate = frameRateActual;
436    return OK;
437}
438
439/*
440 * Initialize the CameraSource to so that it becomes
441 * ready for providing the video input streams as requested.
442 * @param camera the camera object used for the video source
443 * @param cameraId if camera == 0, use camera with this id
444 *      as the video source
445 * @param videoSize the target video frame size. If both
446 *      width and height in videoSize is -1, use the current
447 *      width and heigth settings by the camera
448 * @param frameRate the target frame rate in frames per second.
449 *      if it is -1, use the current camera frame rate setting.
450 * @param storeMetaDataInVideoBuffers request to store meta
451 *      data or real YUV data in video buffers. Request to
452 *      store meta data in video buffers may not be honored
453 *      if the source does not support this feature.
454 *
455 * @return OK if no error.
456 */
457status_t CameraSource::init(
458        const sp<ICamera>& camera,
459        const sp<ICameraRecordingProxy>& proxy,
460        int32_t cameraId,
461        Size videoSize,
462        int32_t frameRate,
463        bool storeMetaDataInVideoBuffers) {
464
465    status_t err = OK;
466
467    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
468        LOGE("Camera connection could not be established.");
469        return err;
470    }
471    CameraParameters params(mCamera->getParameters());
472    if ((err = isCameraColorFormatSupported(params)) != OK) {
473        return err;
474    }
475
476    // Set the camera to use the requested video frame size
477    // and/or frame rate.
478    if ((err = configureCamera(&params,
479                    videoSize.width, videoSize.height,
480                    frameRate))) {
481        return err;
482    }
483
484    // Check on video frame size and frame rate.
485    CameraParameters newCameraParams(mCamera->getParameters());
486    if ((err = checkVideoSize(newCameraParams,
487                videoSize.width, videoSize.height)) != OK) {
488        return err;
489    }
490    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
491        return err;
492    }
493
494    // This CHECK is good, since we just passed the lock/unlock
495    // check earlier by calling mCamera->setParameters().
496    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
497
498    // By default, do not store metadata in video buffers
499    mIsMetaDataStoredInVideoBuffers = false;
500    mCamera->storeMetaDataInBuffers(false);
501    if (storeMetaDataInVideoBuffers) {
502        if (OK == mCamera->storeMetaDataInBuffers(true)) {
503            mIsMetaDataStoredInVideoBuffers = true;
504        }
505    }
506
507    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
508    if (glitchDurationUs > mGlitchDurationThresholdUs) {
509        mGlitchDurationThresholdUs = glitchDurationUs;
510    }
511
512    // XXX: query camera for the stride and slice height
513    // when the capability becomes available.
514    mMeta = new MetaData;
515    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
516    mMeta->setInt32(kKeyColorFormat, mColorFormat);
517    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
518    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
519    mMeta->setInt32(kKeyStride,      mVideoSize.width);
520    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
521    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
522    return OK;
523}
524
525CameraSource::~CameraSource() {
526    if (mStarted) {
527        stop();
528    }
529}
530
531void CameraSource::startCameraRecording() {
532    // Reset the identity to the current thread because media server owns the
533    // camera and recording is started by the applications. The applications
534    // will connect to the camera in ICameraRecordingProxy::startRecording.
535    int64_t token = IPCThreadState::self()->clearCallingIdentity();
536    mCamera->unlock();
537    mCamera.clear();
538    IPCThreadState::self()->restoreCallingIdentity(token);
539    CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this)));
540}
541
542status_t CameraSource::start(MetaData *meta) {
543    CHECK(!mStarted);
544    if (mInitCheck != OK) {
545        LOGE("CameraSource is not initialized yet");
546        return mInitCheck;
547    }
548
549    char value[PROPERTY_VALUE_MAX];
550    if (property_get("media.stagefright.record-stats", value, NULL)
551        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
552        mCollectStats = true;
553    }
554
555    mStartTimeUs = 0;
556    int64_t startTimeUs;
557    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
558        mStartTimeUs = startTimeUs;
559    }
560
561    startCameraRecording();
562
563    mStarted = true;
564    return OK;
565}
566
567void CameraSource::stopCameraRecording() {
568    mCameraRecordingProxy->stopRecording();
569}
570
571void CameraSource::releaseCamera() {
572    LOGV("releaseCamera");
573    if (mCamera != 0) {
574        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
575            LOGV("Camera was cold when we started, stopping preview");
576            mCamera->stopPreview();
577            mCamera->disconnect();
578        } else {
579            // Unlock the camera so the application can lock it back.
580            mCamera->unlock();
581        }
582        mCamera.clear();
583    }
584    if (mCameraRecordingProxy != 0) {
585        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
586        mCameraRecordingProxy.clear();
587    }
588    mCameraFlags = 0;
589}
590
591status_t CameraSource::stop() {
592    LOGD("stop: E");
593    Mutex::Autolock autoLock(mLock);
594    mStarted = false;
595    mFrameAvailableCondition.signal();
596
597    releaseQueuedFrames();
598    while (!mFramesBeingEncoded.empty()) {
599        if (NO_ERROR !=
600            mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) {
601            LOGW("Timed out waiting for outstanding frames being encoded: %d",
602                mFramesBeingEncoded.size());
603        }
604    }
605    stopCameraRecording();
606    releaseCamera();
607
608    if (mCollectStats) {
609        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
610                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
611                mLastFrameTimestampUs - mFirstFrameTimeUs);
612    }
613
614    if (mNumGlitches > 0) {
615        LOGW("%d long delays between neighboring video frames", mNumGlitches);
616    }
617
618    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
619    LOGD("stop: X");
620    return OK;
621}
622
623void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
624    if (mCameraRecordingProxy != NULL) {
625        mCameraRecordingProxy->releaseRecordingFrame(frame);
626    }
627}
628
629void CameraSource::releaseQueuedFrames() {
630    List<sp<IMemory> >::iterator it;
631    while (!mFramesReceived.empty()) {
632        it = mFramesReceived.begin();
633        releaseRecordingFrame(*it);
634        mFramesReceived.erase(it);
635        ++mNumFramesDropped;
636    }
637}
638
639sp<MetaData> CameraSource::getFormat() {
640    return mMeta;
641}
642
643void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
644    releaseRecordingFrame(frame);
645}
646
647void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
648    LOGV("signalBufferReturned: %p", buffer->data());
649    Mutex::Autolock autoLock(mLock);
650    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
651         it != mFramesBeingEncoded.end(); ++it) {
652        if ((*it)->pointer() ==  buffer->data()) {
653            releaseOneRecordingFrame((*it));
654            mFramesBeingEncoded.erase(it);
655            ++mNumFramesEncoded;
656            buffer->setObserver(0);
657            buffer->release();
658            mFrameCompleteCondition.signal();
659            return;
660        }
661    }
662    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
663}
664
665status_t CameraSource::read(
666        MediaBuffer **buffer, const ReadOptions *options) {
667    LOGV("read");
668
669    *buffer = NULL;
670
671    int64_t seekTimeUs;
672    ReadOptions::SeekMode mode;
673    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
674        return ERROR_UNSUPPORTED;
675    }
676
677    sp<IMemory> frame;
678    int64_t frameTime;
679
680    {
681        Mutex::Autolock autoLock(mLock);
682        while (mStarted && mFramesReceived.empty()) {
683            if (NO_ERROR !=
684                mFrameAvailableCondition.waitRelative(mLock, 1000000000LL)) {
685                if (!mCameraRecordingProxy->asBinder()->isBinderAlive()) {
686                    LOGW("camera recording proxy is gone");
687                    return ERROR_END_OF_STREAM;
688                }
689                LOGW("Timed out waiting for incoming camera video frames: %lld us",
690                    mLastFrameTimestampUs);
691            }
692        }
693        if (!mStarted) {
694            return OK;
695        }
696        frame = *mFramesReceived.begin();
697        mFramesReceived.erase(mFramesReceived.begin());
698
699        frameTime = *mFrameTimes.begin();
700        mFrameTimes.erase(mFrameTimes.begin());
701        mFramesBeingEncoded.push_back(frame);
702        *buffer = new MediaBuffer(frame->pointer(), frame->size());
703        (*buffer)->setObserver(this);
704        (*buffer)->add_ref();
705        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
706    }
707    return OK;
708}
709
710void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
711        int32_t msgType, const sp<IMemory> &data) {
712    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
713    Mutex::Autolock autoLock(mLock);
714    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
715        LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
716        releaseOneRecordingFrame(data);
717        return;
718    }
719
720    if (mNumFramesReceived > 0) {
721        CHECK(timestampUs > mLastFrameTimestampUs);
722        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
723            ++mNumGlitches;
724        }
725    }
726
727    // May need to skip frame or modify timestamp. Currently implemented
728    // by the subclass CameraSourceTimeLapse.
729    if (skipCurrentFrame(timestampUs)) {
730        releaseOneRecordingFrame(data);
731        return;
732    }
733
734    mLastFrameTimestampUs = timestampUs;
735    if (mNumFramesReceived == 0) {
736        mFirstFrameTimeUs = timestampUs;
737        // Initial delay
738        if (mStartTimeUs > 0) {
739            if (timestampUs < mStartTimeUs) {
740                // Frame was captured before recording was started
741                // Drop it without updating the statistical data.
742                releaseOneRecordingFrame(data);
743                return;
744            }
745            mStartTimeUs = timestampUs - mStartTimeUs;
746        }
747    }
748    ++mNumFramesReceived;
749
750    CHECK(data != NULL && data->size() > 0);
751    mFramesReceived.push_back(data);
752    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
753    mFrameTimes.push_back(timeUs);
754    LOGV("initial delay: %lld, current time stamp: %lld",
755        mStartTimeUs, timeUs);
756    mFrameAvailableCondition.signal();
757}
758
759bool CameraSource::isMetaDataStoredInVideoBuffers() const {
760    LOGV("isMetaDataStoredInVideoBuffers");
761    return mIsMetaDataStoredInVideoBuffers;
762}
763
764CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
765    mSource = source;
766}
767
768void CameraSource::ProxyListener::dataCallbackTimestamp(
769        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
770    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
771}
772
773void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
774    LOGI("Camera recording proxy died");
775}
776
777}  // namespace android
778