CameraSource.cpp revision bf5bea96f236adb5eef78c2f414ef82b3602a0f7
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/CameraSource.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <gui/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
37
38struct CameraSourceListener : public CameraListener {
39    CameraSourceListener(const sp<CameraSource> &source);
40
41    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
42    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
43                          camera_frame_metadata_t *metadata);
44
45    virtual void postDataTimestamp(
46            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
47
48protected:
49    virtual ~CameraSourceListener();
50
51private:
52    wp<CameraSource> mSource;
53
54    CameraSourceListener(const CameraSourceListener &);
55    CameraSourceListener &operator=(const CameraSourceListener &);
56};
57
58CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
59    : mSource(source) {
60}
61
62CameraSourceListener::~CameraSourceListener() {
63}
64
65void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
66    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
67}
68
69void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
70                                    camera_frame_metadata_t *metadata) {
71    ALOGV("postData(%d, ptr:%p, size:%d)",
72         msgType, dataPtr->pointer(), dataPtr->size());
73
74    sp<CameraSource> source = mSource.promote();
75    if (source.get() != NULL) {
76        source->dataCallback(msgType, dataPtr);
77    }
78}
79
80void CameraSourceListener::postDataTimestamp(
81        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
82
83    sp<CameraSource> source = mSource.promote();
84    if (source.get() != NULL) {
85        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
86    }
87}
88
89static int32_t getColorFormat(const char* colorFormat) {
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
91       return OMX_COLOR_FormatYUV420Planar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
95       return OMX_COLOR_FormatYUV422SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
99        return OMX_COLOR_FormatYUV420SemiPlanar;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
103        return OMX_COLOR_FormatYCbYCr;
104    }
105
106    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
107       return OMX_COLOR_Format16bitRGB565;
108    }
109
110    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
111       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
112    }
113
114    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
115        return OMX_COLOR_FormatAndroidOpaque;
116    }
117
118    ALOGE("Uknown color format (%s), please add it to "
119         "CameraSource::getColorFormat", colorFormat);
120
121    CHECK(!"Unknown color format");
122}
123
124CameraSource *CameraSource::Create() {
125    Size size;
126    size.width = -1;
127    size.height = -1;
128
129    sp<ICamera> camera;
130    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
131}
132
133// static
134CameraSource *CameraSource::CreateFromCamera(
135    const sp<ICamera>& camera,
136    const sp<ICameraRecordingProxy>& proxy,
137    int32_t cameraId,
138    Size videoSize,
139    int32_t frameRate,
140    const sp<Surface>& surface,
141    bool storeMetaDataInVideoBuffers) {
142
143    CameraSource *source = new CameraSource(camera, proxy, cameraId,
144                    videoSize, frameRate, surface,
145                    storeMetaDataInVideoBuffers);
146    return source;
147}
148
149CameraSource::CameraSource(
150    const sp<ICamera>& camera,
151    const sp<ICameraRecordingProxy>& proxy,
152    int32_t cameraId,
153    Size videoSize,
154    int32_t frameRate,
155    const sp<Surface>& surface,
156    bool storeMetaDataInVideoBuffers)
157    : mCameraFlags(0),
158      mVideoFrameRate(-1),
159      mCamera(0),
160      mSurface(surface),
161      mNumFramesReceived(0),
162      mLastFrameTimestampUs(0),
163      mStarted(false),
164      mNumFramesEncoded(0),
165      mTimeBetweenFrameCaptureUs(0),
166      mFirstFrameTimeUs(0),
167      mNumFramesDropped(0),
168      mNumGlitches(0),
169      mGlitchDurationThresholdUs(200000),
170      mCollectStats(false) {
171    mVideoSize.width  = -1;
172    mVideoSize.height = -1;
173
174    mInitCheck = init(camera, proxy, cameraId,
175                    videoSize, frameRate,
176                    storeMetaDataInVideoBuffers);
177    if (mInitCheck != OK) releaseCamera();
178}
179
180status_t CameraSource::initCheck() const {
181    return mInitCheck;
182}
183
184status_t CameraSource::isCameraAvailable(
185    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
186    int32_t cameraId) {
187
188    if (camera == 0) {
189        mCamera = Camera::connect(cameraId);
190        if (mCamera == 0) return -EBUSY;
191        mCameraFlags &= ~FLAGS_HOT_CAMERA;
192    } else {
193        // We get the proxy from Camera, not ICamera. We need to get the proxy
194        // to the remote Camera owned by the application. Here mCamera is a
195        // local Camera object created by us. We cannot use the proxy from
196        // mCamera here.
197        mCamera = Camera::create(camera);
198        if (mCamera == 0) return -EBUSY;
199        mCameraRecordingProxy = proxy;
200        mCameraFlags |= FLAGS_HOT_CAMERA;
201        mDeathNotifier = new DeathNotifier();
202        // isBinderAlive needs linkToDeath to work.
203        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
204    }
205
206    mCamera->lock();
207
208    return OK;
209}
210
211
212/*
213 * Check to see whether the requested video width and height is one
214 * of the supported sizes.
215 * @param width the video frame width in pixels
216 * @param height the video frame height in pixels
217 * @param suppportedSizes the vector of sizes that we check against
218 * @return true if the dimension (width and height) is supported.
219 */
220static bool isVideoSizeSupported(
221    int32_t width, int32_t height,
222    const Vector<Size>& supportedSizes) {
223
224    ALOGV("isVideoSizeSupported");
225    for (size_t i = 0; i < supportedSizes.size(); ++i) {
226        if (width  == supportedSizes[i].width &&
227            height == supportedSizes[i].height) {
228            return true;
229        }
230    }
231    return false;
232}
233
234/*
235 * If the preview and video output is separate, we only set the
236 * the video size, and applications should set the preview size
237 * to some proper value, and the recording framework will not
238 * change the preview size; otherwise, if the video and preview
239 * output is the same, we need to set the preview to be the same
240 * as the requested video size.
241 *
242 */
243/*
244 * Query the camera to retrieve the supported video frame sizes
245 * and also to see whether CameraParameters::setVideoSize()
246 * is supported or not.
247 * @param params CameraParameters to retrieve the information
248 * @@param isSetVideoSizeSupported retunrs whether method
249 *      CameraParameters::setVideoSize() is supported or not.
250 * @param sizes returns the vector of Size objects for the
251 *      supported video frame sizes advertised by the camera.
252 */
253static void getSupportedVideoSizes(
254    const CameraParameters& params,
255    bool *isSetVideoSizeSupported,
256    Vector<Size>& sizes) {
257
258    *isSetVideoSizeSupported = true;
259    params.getSupportedVideoSizes(sizes);
260    if (sizes.size() == 0) {
261        ALOGD("Camera does not support setVideoSize()");
262        params.getSupportedPreviewSizes(sizes);
263        *isSetVideoSizeSupported = false;
264    }
265}
266
267/*
268 * Check whether the camera has the supported color format
269 * @param params CameraParameters to retrieve the information
270 * @return OK if no error.
271 */
272status_t CameraSource::isCameraColorFormatSupported(
273        const CameraParameters& params) {
274    mColorFormat = getColorFormat(params.get(
275            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
276    if (mColorFormat == -1) {
277        return BAD_VALUE;
278    }
279    return OK;
280}
281
282/*
283 * Configure the camera to use the requested video size
284 * (width and height) and/or frame rate. If both width and
285 * height are -1, configuration on the video size is skipped.
286 * if frameRate is -1, configuration on the frame rate
287 * is skipped. Skipping the configuration allows one to
288 * use the current camera setting without the need to
289 * actually know the specific values (see Create() method).
290 *
291 * @param params the CameraParameters to be configured
292 * @param width the target video frame width in pixels
293 * @param height the target video frame height in pixels
294 * @param frameRate the target frame rate in frames per second.
295 * @return OK if no error.
296 */
297status_t CameraSource::configureCamera(
298        CameraParameters* params,
299        int32_t width, int32_t height,
300        int32_t frameRate) {
301    ALOGV("configureCamera");
302    Vector<Size> sizes;
303    bool isSetVideoSizeSupportedByCamera = true;
304    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
305    bool isCameraParamChanged = false;
306    if (width != -1 && height != -1) {
307        if (!isVideoSizeSupported(width, height, sizes)) {
308            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
309            return BAD_VALUE;
310        }
311        if (isSetVideoSizeSupportedByCamera) {
312            params->setVideoSize(width, height);
313        } else {
314            params->setPreviewSize(width, height);
315        }
316        isCameraParamChanged = true;
317    } else if ((width == -1 && height != -1) ||
318               (width != -1 && height == -1)) {
319        // If one and only one of the width and height is -1
320        // we reject such a request.
321        ALOGE("Requested video size (%dx%d) is not supported", width, height);
322        return BAD_VALUE;
323    } else {  // width == -1 && height == -1
324        // Do not configure the camera.
325        // Use the current width and height value setting from the camera.
326    }
327
328    if (frameRate != -1) {
329        CHECK(frameRate > 0 && frameRate <= 120);
330        const char* supportedFrameRates =
331                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
332        CHECK(supportedFrameRates != NULL);
333        ALOGV("Supported frame rates: %s", supportedFrameRates);
334        char buf[4];
335        snprintf(buf, 4, "%d", frameRate);
336        if (strstr(supportedFrameRates, buf) == NULL) {
337            ALOGE("Requested frame rate (%d) is not supported: %s",
338                frameRate, supportedFrameRates);
339            return BAD_VALUE;
340        }
341
342        // The frame rate is supported, set the camera to the requested value.
343        params->setPreviewFrameRate(frameRate);
344        isCameraParamChanged = true;
345    } else {  // frameRate == -1
346        // Do not configure the camera.
347        // Use the current frame rate value setting from the camera
348    }
349
350    if (isCameraParamChanged) {
351        // Either frame rate or frame size needs to be changed.
352        String8 s = params->flatten();
353        if (OK != mCamera->setParameters(s)) {
354            ALOGE("Could not change settings."
355                 " Someone else is using camera %p?", mCamera.get());
356            return -EBUSY;
357        }
358    }
359    return OK;
360}
361
362/*
363 * Check whether the requested video frame size
364 * has been successfully configured or not. If both width and height
365 * are -1, check on the current width and height value setting
366 * is performed.
367 *
368 * @param params CameraParameters to retrieve the information
369 * @param the target video frame width in pixels to check against
370 * @param the target video frame height in pixels to check against
371 * @return OK if no error
372 */
373status_t CameraSource::checkVideoSize(
374        const CameraParameters& params,
375        int32_t width, int32_t height) {
376
377    ALOGV("checkVideoSize");
378    // The actual video size is the same as the preview size
379    // if the camera hal does not support separate video and
380    // preview output. In this case, we retrieve the video
381    // size from preview.
382    int32_t frameWidthActual = -1;
383    int32_t frameHeightActual = -1;
384    Vector<Size> sizes;
385    params.getSupportedVideoSizes(sizes);
386    if (sizes.size() == 0) {
387        // video size is the same as preview size
388        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
389    } else {
390        // video size may not be the same as preview
391        params.getVideoSize(&frameWidthActual, &frameHeightActual);
392    }
393    if (frameWidthActual < 0 || frameHeightActual < 0) {
394        ALOGE("Failed to retrieve video frame size (%dx%d)",
395                frameWidthActual, frameHeightActual);
396        return UNKNOWN_ERROR;
397    }
398
399    // Check the actual video frame size against the target/requested
400    // video frame size.
401    if (width != -1 && height != -1) {
402        if (frameWidthActual != width || frameHeightActual != height) {
403            ALOGE("Failed to set video frame size to %dx%d. "
404                    "The actual video size is %dx%d ", width, height,
405                    frameWidthActual, frameHeightActual);
406            return UNKNOWN_ERROR;
407        }
408    }
409
410    // Good now.
411    mVideoSize.width = frameWidthActual;
412    mVideoSize.height = frameHeightActual;
413    return OK;
414}
415
416/*
417 * Check the requested frame rate has been successfully configured or not.
418 * If the target frameRate is -1, check on the current frame rate value
419 * setting is performed.
420 *
421 * @param params CameraParameters to retrieve the information
422 * @param the target video frame rate to check against
423 * @return OK if no error.
424 */
425status_t CameraSource::checkFrameRate(
426        const CameraParameters& params,
427        int32_t frameRate) {
428
429    ALOGV("checkFrameRate");
430    int32_t frameRateActual = params.getPreviewFrameRate();
431    if (frameRateActual < 0) {
432        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
433        return UNKNOWN_ERROR;
434    }
435
436    // Check the actual video frame rate against the target/requested
437    // video frame rate.
438    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
439        ALOGE("Failed to set preview frame rate to %d fps. The actual "
440                "frame rate is %d", frameRate, frameRateActual);
441        return UNKNOWN_ERROR;
442    }
443
444    // Good now.
445    mVideoFrameRate = frameRateActual;
446    return OK;
447}
448
449/*
450 * Initialize the CameraSource to so that it becomes
451 * ready for providing the video input streams as requested.
452 * @param camera the camera object used for the video source
453 * @param cameraId if camera == 0, use camera with this id
454 *      as the video source
455 * @param videoSize the target video frame size. If both
456 *      width and height in videoSize is -1, use the current
457 *      width and heigth settings by the camera
458 * @param frameRate the target frame rate in frames per second.
459 *      if it is -1, use the current camera frame rate setting.
460 * @param storeMetaDataInVideoBuffers request to store meta
461 *      data or real YUV data in video buffers. Request to
462 *      store meta data in video buffers may not be honored
463 *      if the source does not support this feature.
464 *
465 * @return OK if no error.
466 */
467status_t CameraSource::init(
468        const sp<ICamera>& camera,
469        const sp<ICameraRecordingProxy>& proxy,
470        int32_t cameraId,
471        Size videoSize,
472        int32_t frameRate,
473        bool storeMetaDataInVideoBuffers) {
474
475    ALOGV("init");
476    status_t err = OK;
477    int64_t token = IPCThreadState::self()->clearCallingIdentity();
478    err = initWithCameraAccess(camera, proxy, cameraId,
479                               videoSize, frameRate,
480                               storeMetaDataInVideoBuffers);
481    IPCThreadState::self()->restoreCallingIdentity(token);
482    return err;
483}
484
485status_t CameraSource::initWithCameraAccess(
486        const sp<ICamera>& camera,
487        const sp<ICameraRecordingProxy>& proxy,
488        int32_t cameraId,
489        Size videoSize,
490        int32_t frameRate,
491        bool storeMetaDataInVideoBuffers) {
492    ALOGV("initWithCameraAccess");
493    status_t err = OK;
494
495    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
496        ALOGE("Camera connection could not be established.");
497        return err;
498    }
499    CameraParameters params(mCamera->getParameters());
500    if ((err = isCameraColorFormatSupported(params)) != OK) {
501        return err;
502    }
503
504    // Set the camera to use the requested video frame size
505    // and/or frame rate.
506    if ((err = configureCamera(&params,
507                    videoSize.width, videoSize.height,
508                    frameRate))) {
509        return err;
510    }
511
512    // Check on video frame size and frame rate.
513    CameraParameters newCameraParams(mCamera->getParameters());
514    if ((err = checkVideoSize(newCameraParams,
515                videoSize.width, videoSize.height)) != OK) {
516        return err;
517    }
518    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
519        return err;
520    }
521
522    // Set the preview display. Skip this if mSurface is null because
523    // applications may already set a surface to the camera.
524    if (mSurface != NULL) {
525        // This CHECK is good, since we just passed the lock/unlock
526        // check earlier by calling mCamera->setParameters().
527        CHECK_EQ((status_t)OK, mCamera->setPreviewDisplay(mSurface));
528    }
529
530    // By default, do not store metadata in video buffers
531    mIsMetaDataStoredInVideoBuffers = false;
532    mCamera->storeMetaDataInBuffers(false);
533    if (storeMetaDataInVideoBuffers) {
534        if (OK == mCamera->storeMetaDataInBuffers(true)) {
535            mIsMetaDataStoredInVideoBuffers = true;
536        }
537    }
538
539    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
540    if (glitchDurationUs > mGlitchDurationThresholdUs) {
541        mGlitchDurationThresholdUs = glitchDurationUs;
542    }
543
544    // XXX: query camera for the stride and slice height
545    // when the capability becomes available.
546    mMeta = new MetaData;
547    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
548    mMeta->setInt32(kKeyColorFormat, mColorFormat);
549    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
550    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
551    mMeta->setInt32(kKeyStride,      mVideoSize.width);
552    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
553    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
554    return OK;
555}
556
557CameraSource::~CameraSource() {
558    if (mStarted) {
559        reset();
560    } else if (mInitCheck == OK) {
561        // Camera is initialized but because start() is never called,
562        // the lock on Camera is never released(). This makes sure
563        // Camera's lock is released in this case.
564        releaseCamera();
565    }
566}
567
568void CameraSource::startCameraRecording() {
569    ALOGV("startCameraRecording");
570    // Reset the identity to the current thread because media server owns the
571    // camera and recording is started by the applications. The applications
572    // will connect to the camera in ICameraRecordingProxy::startRecording.
573    int64_t token = IPCThreadState::self()->clearCallingIdentity();
574    if (mCameraFlags & FLAGS_HOT_CAMERA) {
575        mCamera->unlock();
576        mCamera.clear();
577        CHECK_EQ((status_t)OK,
578            mCameraRecordingProxy->startRecording(new ProxyListener(this)));
579    } else {
580        mCamera->setListener(new CameraSourceListener(this));
581        mCamera->startRecording();
582        CHECK(mCamera->recordingEnabled());
583    }
584    IPCThreadState::self()->restoreCallingIdentity(token);
585}
586
587status_t CameraSource::start(MetaData *meta) {
588    ALOGV("start");
589    CHECK(!mStarted);
590    if (mInitCheck != OK) {
591        ALOGE("CameraSource is not initialized yet");
592        return mInitCheck;
593    }
594
595    char value[PROPERTY_VALUE_MAX];
596    if (property_get("media.stagefright.record-stats", value, NULL)
597        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
598        mCollectStats = true;
599    }
600
601    mStartTimeUs = 0;
602    int64_t startTimeUs;
603    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
604        mStartTimeUs = startTimeUs;
605    }
606
607    startCameraRecording();
608
609    mStarted = true;
610    return OK;
611}
612
613void CameraSource::stopCameraRecording() {
614    ALOGV("stopCameraRecording");
615    if (mCameraFlags & FLAGS_HOT_CAMERA) {
616        mCameraRecordingProxy->stopRecording();
617    } else {
618        mCamera->setListener(NULL);
619        mCamera->stopRecording();
620    }
621}
622
623void CameraSource::releaseCamera() {
624    ALOGV("releaseCamera");
625    if (mCamera != 0) {
626        int64_t token = IPCThreadState::self()->clearCallingIdentity();
627        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
628            ALOGV("Camera was cold when we started, stopping preview");
629            mCamera->stopPreview();
630            mCamera->disconnect();
631        }
632        mCamera->unlock();
633        mCamera.clear();
634        mCamera = 0;
635        IPCThreadState::self()->restoreCallingIdentity(token);
636    }
637    if (mCameraRecordingProxy != 0) {
638        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
639        mCameraRecordingProxy.clear();
640    }
641    mCameraFlags = 0;
642}
643
644status_t CameraSource::reset() {
645    ALOGD("reset: E");
646    Mutex::Autolock autoLock(mLock);
647    mStarted = false;
648    mFrameAvailableCondition.signal();
649
650    int64_t token;
651    bool isTokenValid = false;
652    if (mCamera != 0) {
653        token = IPCThreadState::self()->clearCallingIdentity();
654        isTokenValid = true;
655    }
656    releaseQueuedFrames();
657    while (!mFramesBeingEncoded.empty()) {
658        if (NO_ERROR !=
659            mFrameCompleteCondition.waitRelative(mLock,
660                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
661            ALOGW("Timed out waiting for outstanding frames being encoded: %d",
662                mFramesBeingEncoded.size());
663        }
664    }
665    stopCameraRecording();
666    releaseCamera();
667    if (isTokenValid) {
668        IPCThreadState::self()->restoreCallingIdentity(token);
669    }
670
671    if (mCollectStats) {
672        ALOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
673                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
674                mLastFrameTimestampUs - mFirstFrameTimeUs);
675    }
676
677    if (mNumGlitches > 0) {
678        ALOGW("%d long delays between neighboring video frames", mNumGlitches);
679    }
680
681    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
682    ALOGD("reset: X");
683    return OK;
684}
685
686void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
687    ALOGV("releaseRecordingFrame");
688    if (mCameraRecordingProxy != NULL) {
689        mCameraRecordingProxy->releaseRecordingFrame(frame);
690    } else if (mCamera != NULL) {
691        int64_t token = IPCThreadState::self()->clearCallingIdentity();
692        mCamera->releaseRecordingFrame(frame);
693        IPCThreadState::self()->restoreCallingIdentity(token);
694    }
695}
696
697void CameraSource::releaseQueuedFrames() {
698    List<sp<IMemory> >::iterator it;
699    while (!mFramesReceived.empty()) {
700        it = mFramesReceived.begin();
701        releaseRecordingFrame(*it);
702        mFramesReceived.erase(it);
703        ++mNumFramesDropped;
704    }
705}
706
707sp<MetaData> CameraSource::getFormat() {
708    return mMeta;
709}
710
711void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
712    releaseRecordingFrame(frame);
713}
714
715void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
716    ALOGV("signalBufferReturned: %p", buffer->data());
717    Mutex::Autolock autoLock(mLock);
718    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
719         it != mFramesBeingEncoded.end(); ++it) {
720        if ((*it)->pointer() ==  buffer->data()) {
721            releaseOneRecordingFrame((*it));
722            mFramesBeingEncoded.erase(it);
723            ++mNumFramesEncoded;
724            buffer->setObserver(0);
725            buffer->release();
726            mFrameCompleteCondition.signal();
727            return;
728        }
729    }
730    CHECK(!"signalBufferReturned: bogus buffer");
731}
732
733status_t CameraSource::read(
734        MediaBuffer **buffer, const ReadOptions *options) {
735    ALOGV("read");
736
737    *buffer = NULL;
738
739    int64_t seekTimeUs;
740    ReadOptions::SeekMode mode;
741    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
742        return ERROR_UNSUPPORTED;
743    }
744
745    sp<IMemory> frame;
746    int64_t frameTime;
747
748    {
749        Mutex::Autolock autoLock(mLock);
750        while (mStarted && mFramesReceived.empty()) {
751            if (NO_ERROR !=
752                mFrameAvailableCondition.waitRelative(mLock,
753                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
754                if (mCameraRecordingProxy != 0 &&
755                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
756                    ALOGW("camera recording proxy is gone");
757                    return ERROR_END_OF_STREAM;
758                }
759                ALOGW("Timed out waiting for incoming camera video frames: %lld us",
760                    mLastFrameTimestampUs);
761            }
762        }
763        if (!mStarted) {
764            return OK;
765        }
766        frame = *mFramesReceived.begin();
767        mFramesReceived.erase(mFramesReceived.begin());
768
769        frameTime = *mFrameTimes.begin();
770        mFrameTimes.erase(mFrameTimes.begin());
771        mFramesBeingEncoded.push_back(frame);
772        *buffer = new MediaBuffer(frame->pointer(), frame->size());
773        (*buffer)->setObserver(this);
774        (*buffer)->add_ref();
775        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
776    }
777    return OK;
778}
779
780void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
781        int32_t msgType, const sp<IMemory> &data) {
782    ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
783    Mutex::Autolock autoLock(mLock);
784    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
785        ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
786        releaseOneRecordingFrame(data);
787        return;
788    }
789
790    if (mNumFramesReceived > 0) {
791        CHECK(timestampUs > mLastFrameTimestampUs);
792        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
793            ++mNumGlitches;
794        }
795    }
796
797    // May need to skip frame or modify timestamp. Currently implemented
798    // by the subclass CameraSourceTimeLapse.
799    if (skipCurrentFrame(timestampUs)) {
800        releaseOneRecordingFrame(data);
801        return;
802    }
803
804    mLastFrameTimestampUs = timestampUs;
805    if (mNumFramesReceived == 0) {
806        mFirstFrameTimeUs = timestampUs;
807        // Initial delay
808        if (mStartTimeUs > 0) {
809            if (timestampUs < mStartTimeUs) {
810                // Frame was captured before recording was started
811                // Drop it without updating the statistical data.
812                releaseOneRecordingFrame(data);
813                return;
814            }
815            mStartTimeUs = timestampUs - mStartTimeUs;
816        }
817    }
818    ++mNumFramesReceived;
819
820    CHECK(data != NULL && data->size() > 0);
821    mFramesReceived.push_back(data);
822    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
823    mFrameTimes.push_back(timeUs);
824    ALOGV("initial delay: %lld, current time stamp: %lld",
825        mStartTimeUs, timeUs);
826    mFrameAvailableCondition.signal();
827}
828
829bool CameraSource::isMetaDataStoredInVideoBuffers() const {
830    ALOGV("isMetaDataStoredInVideoBuffers");
831    return mIsMetaDataStoredInVideoBuffers;
832}
833
834CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
835    mSource = source;
836}
837
838void CameraSource::ProxyListener::dataCallbackTimestamp(
839        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
840    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
841}
842
843void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
844    ALOGI("Camera recording proxy died");
845}
846
847}  // namespace android
848