CameraSource.cpp revision 3bd3020c00ec8264ac1fe3870800f326487f9221
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <surfaceflinger/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36struct CameraSourceListener : public CameraListener {
37    CameraSourceListener(const sp<CameraSource> &source);
38
39    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
40    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
41
42    virtual void postDataTimestamp(
43            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
44
45protected:
46    virtual ~CameraSourceListener();
47
48private:
49    wp<CameraSource> mSource;
50
51    CameraSourceListener(const CameraSourceListener &);
52    CameraSourceListener &operator=(const CameraSourceListener &);
53};
54
55CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
56    : mSource(source) {
57}
58
59CameraSourceListener::~CameraSourceListener() {
60}
61
62void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
63    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
64}
65
66void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
67    LOGV("postData(%d, ptr:%p, size:%d)",
68         msgType, dataPtr->pointer(), dataPtr->size());
69
70    sp<CameraSource> source = mSource.promote();
71    if (source.get() != NULL) {
72        source->dataCallback(msgType, dataPtr);
73    }
74}
75
76void CameraSourceListener::postDataTimestamp(
77        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
78
79    sp<CameraSource> source = mSource.promote();
80    if (source.get() != NULL) {
81        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
82    }
83}
84
85static int32_t getColorFormat(const char* colorFormat) {
86    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
87       return OMX_COLOR_FormatYUV420Planar;
88    }
89
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
91       return OMX_COLOR_FormatYUV422SemiPlanar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
95        return OMX_COLOR_FormatYUV420SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
99        return OMX_COLOR_FormatYCbYCr;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
103       return OMX_COLOR_Format16bitRGB565;
104    }
105
106    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
107       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
108    }
109
110    LOGE("Uknown color format (%s), please add it to "
111         "CameraSource::getColorFormat", colorFormat);
112
113    CHECK_EQ(0, "Unknown color format");
114}
115
116CameraSource *CameraSource::Create() {
117    Size size;
118    size.width = -1;
119    size.height = -1;
120
121    sp<ICamera> camera;
122    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
123}
124
125// static
126CameraSource *CameraSource::CreateFromCamera(
127    const sp<ICamera>& camera,
128    const sp<ICameraRecordingProxy>& proxy,
129    int32_t cameraId,
130    Size videoSize,
131    int32_t frameRate,
132    const sp<Surface>& surface,
133    bool storeMetaDataInVideoBuffers) {
134
135    CameraSource *source = new CameraSource(camera, proxy, cameraId,
136                    videoSize, frameRate, surface,
137                    storeMetaDataInVideoBuffers);
138    return source;
139}
140
141CameraSource::CameraSource(
142    const sp<ICamera>& camera,
143    const sp<ICameraRecordingProxy>& proxy,
144    int32_t cameraId,
145    Size videoSize,
146    int32_t frameRate,
147    const sp<Surface>& surface,
148    bool storeMetaDataInVideoBuffers)
149    : mCameraFlags(0),
150      mVideoFrameRate(-1),
151      mCamera(0),
152      mSurface(surface),
153      mNumFramesReceived(0),
154      mLastFrameTimestampUs(0),
155      mStarted(false),
156      mNumFramesEncoded(0),
157      mFirstFrameTimeUs(0),
158      mNumFramesDropped(0),
159      mNumGlitches(0),
160      mGlitchDurationThresholdUs(200000),
161      mCollectStats(false) {
162    mVideoSize.width  = -1;
163    mVideoSize.height = -1;
164
165    mInitCheck = init(camera, proxy, cameraId,
166                    videoSize, frameRate,
167                    storeMetaDataInVideoBuffers);
168    if (mInitCheck != OK) releaseCamera();
169}
170
171status_t CameraSource::initCheck() const {
172    return mInitCheck;
173}
174
175status_t CameraSource::isCameraAvailable(
176    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
177    int32_t cameraId) {
178
179    if (camera == 0) {
180        mCamera = Camera::connect(cameraId);
181        if (mCamera == 0) return -EBUSY;
182        mCameraFlags &= ~FLAGS_HOT_CAMERA;
183    } else {
184        // We get the proxy from Camera, not ICamera. We need to get the proxy
185        // to the remote Camera owned by the application. Here mCamera is a
186        // local Camera object created by us. We cannot use the proxy from
187        // mCamera here.
188        mCamera = Camera::create(camera);
189        if (mCamera == 0) return -EBUSY;
190        mCameraRecordingProxy = proxy;
191        mCameraFlags |= FLAGS_HOT_CAMERA;
192        mDeathNotifier = new DeathNotifier();
193        // isBinderAlive needs linkToDeath to work.
194        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
195    }
196
197    mCamera->lock();
198
199    return OK;
200}
201
202
203/*
204 * Check to see whether the requested video width and height is one
205 * of the supported sizes.
206 * @param width the video frame width in pixels
207 * @param height the video frame height in pixels
208 * @param suppportedSizes the vector of sizes that we check against
209 * @return true if the dimension (width and height) is supported.
210 */
211static bool isVideoSizeSupported(
212    int32_t width, int32_t height,
213    const Vector<Size>& supportedSizes) {
214
215    LOGV("isVideoSizeSupported");
216    for (size_t i = 0; i < supportedSizes.size(); ++i) {
217        if (width  == supportedSizes[i].width &&
218            height == supportedSizes[i].height) {
219            return true;
220        }
221    }
222    return false;
223}
224
225/*
226 * If the preview and video output is separate, we only set the
227 * the video size, and applications should set the preview size
228 * to some proper value, and the recording framework will not
229 * change the preview size; otherwise, if the video and preview
230 * output is the same, we need to set the preview to be the same
231 * as the requested video size.
232 *
233 */
234/*
235 * Query the camera to retrieve the supported video frame sizes
236 * and also to see whether CameraParameters::setVideoSize()
237 * is supported or not.
238 * @param params CameraParameters to retrieve the information
239 * @@param isSetVideoSizeSupported retunrs whether method
240 *      CameraParameters::setVideoSize() is supported or not.
241 * @param sizes returns the vector of Size objects for the
242 *      supported video frame sizes advertised by the camera.
243 */
244static void getSupportedVideoSizes(
245    const CameraParameters& params,
246    bool *isSetVideoSizeSupported,
247    Vector<Size>& sizes) {
248
249    *isSetVideoSizeSupported = true;
250    params.getSupportedVideoSizes(sizes);
251    if (sizes.size() == 0) {
252        LOGD("Camera does not support setVideoSize()");
253        params.getSupportedPreviewSizes(sizes);
254        *isSetVideoSizeSupported = false;
255    }
256}
257
258/*
259 * Check whether the camera has the supported color format
260 * @param params CameraParameters to retrieve the information
261 * @return OK if no error.
262 */
263status_t CameraSource::isCameraColorFormatSupported(
264        const CameraParameters& params) {
265    mColorFormat = getColorFormat(params.get(
266            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
267    if (mColorFormat == -1) {
268        return BAD_VALUE;
269    }
270    return OK;
271}
272
273/*
274 * Configure the camera to use the requested video size
275 * (width and height) and/or frame rate. If both width and
276 * height are -1, configuration on the video size is skipped.
277 * if frameRate is -1, configuration on the frame rate
278 * is skipped. Skipping the configuration allows one to
279 * use the current camera setting without the need to
280 * actually know the specific values (see Create() method).
281 *
282 * @param params the CameraParameters to be configured
283 * @param width the target video frame width in pixels
284 * @param height the target video frame height in pixels
285 * @param frameRate the target frame rate in frames per second.
286 * @return OK if no error.
287 */
288status_t CameraSource::configureCamera(
289        CameraParameters* params,
290        int32_t width, int32_t height,
291        int32_t frameRate) {
292    LOGV("configureCamera");
293    Vector<Size> sizes;
294    bool isSetVideoSizeSupportedByCamera = true;
295    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
296    bool isCameraParamChanged = false;
297    if (width != -1 && height != -1) {
298        if (!isVideoSizeSupported(width, height, sizes)) {
299            LOGE("Video dimension (%dx%d) is unsupported", width, height);
300            return BAD_VALUE;
301        }
302        if (isSetVideoSizeSupportedByCamera) {
303            params->setVideoSize(width, height);
304        } else {
305            params->setPreviewSize(width, height);
306        }
307        isCameraParamChanged = true;
308    } else if ((width == -1 && height != -1) ||
309               (width != -1 && height == -1)) {
310        // If one and only one of the width and height is -1
311        // we reject such a request.
312        LOGE("Requested video size (%dx%d) is not supported", width, height);
313        return BAD_VALUE;
314    } else {  // width == -1 && height == -1
315        // Do not configure the camera.
316        // Use the current width and height value setting from the camera.
317    }
318
319    if (frameRate != -1) {
320        CHECK(frameRate > 0 && frameRate <= 120);
321        const char* supportedFrameRates =
322                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
323        CHECK(supportedFrameRates != NULL);
324        LOGV("Supported frame rates: %s", supportedFrameRates);
325        char buf[4];
326        snprintf(buf, 4, "%d", frameRate);
327        if (strstr(supportedFrameRates, buf) == NULL) {
328            LOGE("Requested frame rate (%d) is not supported: %s",
329                frameRate, supportedFrameRates);
330            return BAD_VALUE;
331        }
332
333        // The frame rate is supported, set the camera to the requested value.
334        params->setPreviewFrameRate(frameRate);
335        isCameraParamChanged = true;
336    } else {  // frameRate == -1
337        // Do not configure the camera.
338        // Use the current frame rate value setting from the camera
339    }
340
341    if (isCameraParamChanged) {
342        // Either frame rate or frame size needs to be changed.
343        String8 s = params->flatten();
344        if (OK != mCamera->setParameters(s)) {
345            LOGE("Could not change settings."
346                 " Someone else is using camera %p?", mCamera.get());
347            return -EBUSY;
348        }
349    }
350    return OK;
351}
352
353/*
354 * Check whether the requested video frame size
355 * has been successfully configured or not. If both width and height
356 * are -1, check on the current width and height value setting
357 * is performed.
358 *
359 * @param params CameraParameters to retrieve the information
360 * @param the target video frame width in pixels to check against
361 * @param the target video frame height in pixels to check against
362 * @return OK if no error
363 */
364status_t CameraSource::checkVideoSize(
365        const CameraParameters& params,
366        int32_t width, int32_t height) {
367
368    LOGV("checkVideoSize");
369    // The actual video size is the same as the preview size
370    // if the camera hal does not support separate video and
371    // preview output. In this case, we retrieve the video
372    // size from preview.
373    int32_t frameWidthActual = -1;
374    int32_t frameHeightActual = -1;
375    Vector<Size> sizes;
376    params.getSupportedVideoSizes(sizes);
377    if (sizes.size() == 0) {
378        // video size is the same as preview size
379        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
380    } else {
381        // video size may not be the same as preview
382        params.getVideoSize(&frameWidthActual, &frameHeightActual);
383    }
384    if (frameWidthActual < 0 || frameHeightActual < 0) {
385        LOGE("Failed to retrieve video frame size (%dx%d)",
386                frameWidthActual, frameHeightActual);
387        return UNKNOWN_ERROR;
388    }
389
390    // Check the actual video frame size against the target/requested
391    // video frame size.
392    if (width != -1 && height != -1) {
393        if (frameWidthActual != width || frameHeightActual != height) {
394            LOGE("Failed to set video frame size to %dx%d. "
395                    "The actual video size is %dx%d ", width, height,
396                    frameWidthActual, frameHeightActual);
397            return UNKNOWN_ERROR;
398        }
399    }
400
401    // Good now.
402    mVideoSize.width = frameWidthActual;
403    mVideoSize.height = frameHeightActual;
404    return OK;
405}
406
407/*
408 * Check the requested frame rate has been successfully configured or not.
409 * If the target frameRate is -1, check on the current frame rate value
410 * setting is performed.
411 *
412 * @param params CameraParameters to retrieve the information
413 * @param the target video frame rate to check against
414 * @return OK if no error.
415 */
416status_t CameraSource::checkFrameRate(
417        const CameraParameters& params,
418        int32_t frameRate) {
419
420    LOGV("checkFrameRate");
421    int32_t frameRateActual = params.getPreviewFrameRate();
422    if (frameRateActual < 0) {
423        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
424        return UNKNOWN_ERROR;
425    }
426
427    // Check the actual video frame rate against the target/requested
428    // video frame rate.
429    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
430        LOGE("Failed to set preview frame rate to %d fps. The actual "
431                "frame rate is %d", frameRate, frameRateActual);
432        return UNKNOWN_ERROR;
433    }
434
435    // Good now.
436    mVideoFrameRate = frameRateActual;
437    return OK;
438}
439
440/*
441 * Initialize the CameraSource to so that it becomes
442 * ready for providing the video input streams as requested.
443 * @param camera the camera object used for the video source
444 * @param cameraId if camera == 0, use camera with this id
445 *      as the video source
446 * @param videoSize the target video frame size. If both
447 *      width and height in videoSize is -1, use the current
448 *      width and heigth settings by the camera
449 * @param frameRate the target frame rate in frames per second.
450 *      if it is -1, use the current camera frame rate setting.
451 * @param storeMetaDataInVideoBuffers request to store meta
452 *      data or real YUV data in video buffers. Request to
453 *      store meta data in video buffers may not be honored
454 *      if the source does not support this feature.
455 *
456 * @return OK if no error.
457 */
458status_t CameraSource::init(
459        const sp<ICamera>& camera,
460        const sp<ICameraRecordingProxy>& proxy,
461        int32_t cameraId,
462        Size videoSize,
463        int32_t frameRate,
464        bool storeMetaDataInVideoBuffers) {
465
466    LOGV("init");
467    status_t err = OK;
468    int64_t token = IPCThreadState::self()->clearCallingIdentity();
469    err = initWithCameraAccess(camera, proxy, cameraId,
470                               videoSize, frameRate,
471                               storeMetaDataInVideoBuffers);
472    IPCThreadState::self()->restoreCallingIdentity(token);
473    return err;
474}
475
476status_t CameraSource::initWithCameraAccess(
477        const sp<ICamera>& camera,
478        const sp<ICameraRecordingProxy>& proxy,
479        int32_t cameraId,
480        Size videoSize,
481        int32_t frameRate,
482        bool storeMetaDataInVideoBuffers) {
483    LOGV("initWithCameraAccess");
484    status_t err = OK;
485
486    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
487        LOGE("Camera connection could not be established.");
488        return err;
489    }
490    CameraParameters params(mCamera->getParameters());
491    if ((err = isCameraColorFormatSupported(params)) != OK) {
492        return err;
493    }
494
495    // Set the camera to use the requested video frame size
496    // and/or frame rate.
497    if ((err = configureCamera(&params,
498                    videoSize.width, videoSize.height,
499                    frameRate))) {
500        return err;
501    }
502
503    // Check on video frame size and frame rate.
504    CameraParameters newCameraParams(mCamera->getParameters());
505    if ((err = checkVideoSize(newCameraParams,
506                videoSize.width, videoSize.height)) != OK) {
507        return err;
508    }
509    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
510        return err;
511    }
512
513    // This CHECK is good, since we just passed the lock/unlock
514    // check earlier by calling mCamera->setParameters().
515    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
516
517    // By default, do not store metadata in video buffers
518    mIsMetaDataStoredInVideoBuffers = false;
519    mCamera->storeMetaDataInBuffers(false);
520    if (storeMetaDataInVideoBuffers) {
521        if (OK == mCamera->storeMetaDataInBuffers(true)) {
522            mIsMetaDataStoredInVideoBuffers = true;
523        }
524    }
525
526    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
527    if (glitchDurationUs > mGlitchDurationThresholdUs) {
528        mGlitchDurationThresholdUs = glitchDurationUs;
529    }
530
531    // XXX: query camera for the stride and slice height
532    // when the capability becomes available.
533    mMeta = new MetaData;
534    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
535    mMeta->setInt32(kKeyColorFormat, mColorFormat);
536    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
537    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
538    mMeta->setInt32(kKeyStride,      mVideoSize.width);
539    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
540    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
541    return OK;
542}
543
544CameraSource::~CameraSource() {
545    if (mStarted) {
546        stop();
547    } else if (mInitCheck == OK) {
548        // Camera is initialized but because start() is never called,
549        // the lock on Camera is never released(). This makes sure
550        // Camera's lock is released in this case.
551        releaseCamera();
552    }
553}
554
555void CameraSource::startCameraRecording() {
556    LOGV("startCameraRecording");
557    // Reset the identity to the current thread because media server owns the
558    // camera and recording is started by the applications. The applications
559    // will connect to the camera in ICameraRecordingProxy::startRecording.
560    int64_t token = IPCThreadState::self()->clearCallingIdentity();
561    if (mCameraFlags & FLAGS_HOT_CAMERA) {
562        mCamera->unlock();
563        mCamera.clear();
564        CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this)));
565    } else {
566        mCamera->setListener(new CameraSourceListener(this));
567        mCamera->startRecording();
568        CHECK(mCamera->recordingEnabled());
569    }
570    IPCThreadState::self()->restoreCallingIdentity(token);
571}
572
573status_t CameraSource::start(MetaData *meta) {
574    LOGV("start");
575    CHECK(!mStarted);
576    if (mInitCheck != OK) {
577        LOGE("CameraSource is not initialized yet");
578        return mInitCheck;
579    }
580
581    char value[PROPERTY_VALUE_MAX];
582    if (property_get("media.stagefright.record-stats", value, NULL)
583        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
584        mCollectStats = true;
585    }
586
587    mStartTimeUs = 0;
588    int64_t startTimeUs;
589    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
590        mStartTimeUs = startTimeUs;
591    }
592
593    startCameraRecording();
594
595    mStarted = true;
596    return OK;
597}
598
599void CameraSource::stopCameraRecording() {
600    LOGV("stopCameraRecording");
601    if (mCameraFlags & FLAGS_HOT_CAMERA) {
602        mCameraRecordingProxy->stopRecording();
603    } else {
604        mCamera->setListener(NULL);
605        mCamera->stopRecording();
606    }
607}
608
609void CameraSource::releaseCamera() {
610    LOGV("releaseCamera");
611    if (mCamera != 0) {
612        int64_t token = IPCThreadState::self()->clearCallingIdentity();
613        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
614            LOGV("Camera was cold when we started, stopping preview");
615            mCamera->stopPreview();
616            mCamera->disconnect();
617        }
618        mCamera->unlock();
619        mCamera.clear();
620        mCamera = 0;
621        IPCThreadState::self()->restoreCallingIdentity(token);
622    }
623    if (mCameraRecordingProxy != 0) {
624        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
625        mCameraRecordingProxy.clear();
626    }
627    mCameraFlags = 0;
628}
629
630status_t CameraSource::stop() {
631    LOGD("stop: E");
632    Mutex::Autolock autoLock(mLock);
633    mStarted = false;
634    mFrameAvailableCondition.signal();
635
636    releaseQueuedFrames();
637    while (!mFramesBeingEncoded.empty()) {
638        if (NO_ERROR !=
639            mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) {
640            LOGW("Timed out waiting for outstanding frames being encoded: %d",
641                mFramesBeingEncoded.size());
642        }
643    }
644    stopCameraRecording();
645    releaseCamera();
646
647    if (mCollectStats) {
648        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
649                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
650                mLastFrameTimestampUs - mFirstFrameTimeUs);
651    }
652
653    if (mNumGlitches > 0) {
654        LOGW("%d long delays between neighboring video frames", mNumGlitches);
655    }
656
657    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
658    LOGD("stop: X");
659    return OK;
660}
661
662void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
663    LOGV("releaseRecordingFrame");
664    if (mCameraRecordingProxy != NULL) {
665        mCameraRecordingProxy->releaseRecordingFrame(frame);
666    } else {
667        int64_t token = IPCThreadState::self()->clearCallingIdentity();
668        mCamera->releaseRecordingFrame(frame);
669        IPCThreadState::self()->restoreCallingIdentity(token);
670    }
671}
672
673void CameraSource::releaseQueuedFrames() {
674    List<sp<IMemory> >::iterator it;
675    while (!mFramesReceived.empty()) {
676        it = mFramesReceived.begin();
677        releaseRecordingFrame(*it);
678        mFramesReceived.erase(it);
679        ++mNumFramesDropped;
680    }
681}
682
683sp<MetaData> CameraSource::getFormat() {
684    return mMeta;
685}
686
687void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
688    releaseRecordingFrame(frame);
689}
690
691void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
692    LOGV("signalBufferReturned: %p", buffer->data());
693    Mutex::Autolock autoLock(mLock);
694    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
695         it != mFramesBeingEncoded.end(); ++it) {
696        if ((*it)->pointer() ==  buffer->data()) {
697            releaseOneRecordingFrame((*it));
698            mFramesBeingEncoded.erase(it);
699            ++mNumFramesEncoded;
700            buffer->setObserver(0);
701            buffer->release();
702            mFrameCompleteCondition.signal();
703            return;
704        }
705    }
706    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
707}
708
709status_t CameraSource::read(
710        MediaBuffer **buffer, const ReadOptions *options) {
711    LOGV("read");
712
713    *buffer = NULL;
714
715    int64_t seekTimeUs;
716    ReadOptions::SeekMode mode;
717    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
718        return ERROR_UNSUPPORTED;
719    }
720
721    sp<IMemory> frame;
722    int64_t frameTime;
723
724    {
725        Mutex::Autolock autoLock(mLock);
726        while (mStarted && mFramesReceived.empty()) {
727            if (NO_ERROR !=
728                mFrameAvailableCondition.waitRelative(mLock, 1000000000LL)) {
729                if (mCameraRecordingProxy != 0 &&
730                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
731                    LOGW("camera recording proxy is gone");
732                    return ERROR_END_OF_STREAM;
733                }
734                LOGW("Timed out waiting for incoming camera video frames: %lld us",
735                    mLastFrameTimestampUs);
736            }
737        }
738        if (!mStarted) {
739            return OK;
740        }
741        frame = *mFramesReceived.begin();
742        mFramesReceived.erase(mFramesReceived.begin());
743
744        frameTime = *mFrameTimes.begin();
745        mFrameTimes.erase(mFrameTimes.begin());
746        mFramesBeingEncoded.push_back(frame);
747        *buffer = new MediaBuffer(frame->pointer(), frame->size());
748        (*buffer)->setObserver(this);
749        (*buffer)->add_ref();
750        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
751    }
752    return OK;
753}
754
755void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
756        int32_t msgType, const sp<IMemory> &data) {
757    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
758    Mutex::Autolock autoLock(mLock);
759    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
760        LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
761        releaseOneRecordingFrame(data);
762        return;
763    }
764
765    if (mNumFramesReceived > 0) {
766        CHECK(timestampUs > mLastFrameTimestampUs);
767        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
768            ++mNumGlitches;
769        }
770    }
771
772    // May need to skip frame or modify timestamp. Currently implemented
773    // by the subclass CameraSourceTimeLapse.
774    if (skipCurrentFrame(timestampUs)) {
775        releaseOneRecordingFrame(data);
776        return;
777    }
778
779    mLastFrameTimestampUs = timestampUs;
780    if (mNumFramesReceived == 0) {
781        mFirstFrameTimeUs = timestampUs;
782        // Initial delay
783        if (mStartTimeUs > 0) {
784            if (timestampUs < mStartTimeUs) {
785                // Frame was captured before recording was started
786                // Drop it without updating the statistical data.
787                releaseOneRecordingFrame(data);
788                return;
789            }
790            mStartTimeUs = timestampUs - mStartTimeUs;
791        }
792    }
793    ++mNumFramesReceived;
794
795    CHECK(data != NULL && data->size() > 0);
796    mFramesReceived.push_back(data);
797    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
798    mFrameTimes.push_back(timeUs);
799    LOGV("initial delay: %lld, current time stamp: %lld",
800        mStartTimeUs, timeUs);
801    mFrameAvailableCondition.signal();
802}
803
804bool CameraSource::isMetaDataStoredInVideoBuffers() const {
805    LOGV("isMetaDataStoredInVideoBuffers");
806    return mIsMetaDataStoredInVideoBuffers;
807}
808
809CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
810    mSource = source;
811}
812
813void CameraSource::ProxyListener::dataCallbackTimestamp(
814        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
815    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
816}
817
818void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
819    LOGI("Camera recording proxy died");
820}
821
822}  // namespace android
823