CameraSource.cpp revision ad4e408b8ea397caadbfee85e1e39515e7e08104
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/CameraSource.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <gui/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
37
38struct CameraSourceListener : public CameraListener {
39    CameraSourceListener(const sp<CameraSource> &source);
40
41    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
42    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
43                          camera_frame_metadata_t *metadata);
44
45    virtual void postDataTimestamp(
46            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
47
48protected:
49    virtual ~CameraSourceListener();
50
51private:
52    wp<CameraSource> mSource;
53
54    CameraSourceListener(const CameraSourceListener &);
55    CameraSourceListener &operator=(const CameraSourceListener &);
56};
57
58CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
59    : mSource(source) {
60}
61
62CameraSourceListener::~CameraSourceListener() {
63}
64
65void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
66    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
67}
68
69void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
70                                    camera_frame_metadata_t *metadata) {
71    ALOGV("postData(%d, ptr:%p, size:%d)",
72         msgType, dataPtr->pointer(), dataPtr->size());
73
74    sp<CameraSource> source = mSource.promote();
75    if (source.get() != NULL) {
76        source->dataCallback(msgType, dataPtr);
77    }
78}
79
80void CameraSourceListener::postDataTimestamp(
81        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
82
83    sp<CameraSource> source = mSource.promote();
84    if (source.get() != NULL) {
85        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
86    }
87}
88
89static int32_t getColorFormat(const char* colorFormat) {
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
91       return OMX_COLOR_FormatYUV420Planar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
95       return OMX_COLOR_FormatYUV422SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
99        return OMX_COLOR_FormatYUV420SemiPlanar;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
103        return OMX_COLOR_FormatYCbYCr;
104    }
105
106    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
107       return OMX_COLOR_Format16bitRGB565;
108    }
109
110    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
111       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
112    }
113
114    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
115        return OMX_COLOR_FormatAndroidOpaque;
116    }
117
118    ALOGE("Uknown color format (%s), please add it to "
119         "CameraSource::getColorFormat", colorFormat);
120
121    CHECK(!"Unknown color format");
122}
123
124CameraSource *CameraSource::Create() {
125    Size size;
126    size.width = -1;
127    size.height = -1;
128
129    sp<ICamera> camera;
130    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
131}
132
133// static
134CameraSource *CameraSource::CreateFromCamera(
135    const sp<ICamera>& camera,
136    const sp<ICameraRecordingProxy>& proxy,
137    int32_t cameraId,
138    Size videoSize,
139    int32_t frameRate,
140    const sp<Surface>& surface,
141    bool storeMetaDataInVideoBuffers) {
142
143    CameraSource *source = new CameraSource(camera, proxy, cameraId,
144                    videoSize, frameRate, surface,
145                    storeMetaDataInVideoBuffers);
146    return source;
147}
148
149CameraSource::CameraSource(
150    const sp<ICamera>& camera,
151    const sp<ICameraRecordingProxy>& proxy,
152    int32_t cameraId,
153    Size videoSize,
154    int32_t frameRate,
155    const sp<Surface>& surface,
156    bool storeMetaDataInVideoBuffers)
157    : mCameraFlags(0),
158      mNumInputBuffers(0),
159      mVideoFrameRate(-1),
160      mCamera(0),
161      mSurface(surface),
162      mNumFramesReceived(0),
163      mLastFrameTimestampUs(0),
164      mStarted(false),
165      mNumFramesEncoded(0),
166      mTimeBetweenFrameCaptureUs(0),
167      mFirstFrameTimeUs(0),
168      mNumFramesDropped(0),
169      mNumGlitches(0),
170      mGlitchDurationThresholdUs(200000),
171      mCollectStats(false) {
172    mVideoSize.width  = -1;
173    mVideoSize.height = -1;
174
175    mInitCheck = init(camera, proxy, cameraId,
176                    videoSize, frameRate,
177                    storeMetaDataInVideoBuffers);
178    if (mInitCheck != OK) releaseCamera();
179}
180
181status_t CameraSource::initCheck() const {
182    return mInitCheck;
183}
184
185status_t CameraSource::isCameraAvailable(
186    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
187    int32_t cameraId) {
188
189    if (camera == 0) {
190        mCamera = Camera::connect(cameraId);
191        if (mCamera == 0) return -EBUSY;
192        mCameraFlags &= ~FLAGS_HOT_CAMERA;
193    } else {
194        // We get the proxy from Camera, not ICamera. We need to get the proxy
195        // to the remote Camera owned by the application. Here mCamera is a
196        // local Camera object created by us. We cannot use the proxy from
197        // mCamera here.
198        mCamera = Camera::create(camera);
199        if (mCamera == 0) return -EBUSY;
200        mCameraRecordingProxy = proxy;
201        mCameraFlags |= FLAGS_HOT_CAMERA;
202        mDeathNotifier = new DeathNotifier();
203        // isBinderAlive needs linkToDeath to work.
204        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
205    }
206
207    mCamera->lock();
208
209    return OK;
210}
211
212
213/*
214 * Check to see whether the requested video width and height is one
215 * of the supported sizes.
216 * @param width the video frame width in pixels
217 * @param height the video frame height in pixels
218 * @param suppportedSizes the vector of sizes that we check against
219 * @return true if the dimension (width and height) is supported.
220 */
221static bool isVideoSizeSupported(
222    int32_t width, int32_t height,
223    const Vector<Size>& supportedSizes) {
224
225    ALOGV("isVideoSizeSupported");
226    for (size_t i = 0; i < supportedSizes.size(); ++i) {
227        if (width  == supportedSizes[i].width &&
228            height == supportedSizes[i].height) {
229            return true;
230        }
231    }
232    return false;
233}
234
235/*
236 * If the preview and video output is separate, we only set the
237 * the video size, and applications should set the preview size
238 * to some proper value, and the recording framework will not
239 * change the preview size; otherwise, if the video and preview
240 * output is the same, we need to set the preview to be the same
241 * as the requested video size.
242 *
243 */
244/*
245 * Query the camera to retrieve the supported video frame sizes
246 * and also to see whether CameraParameters::setVideoSize()
247 * is supported or not.
248 * @param params CameraParameters to retrieve the information
249 * @@param isSetVideoSizeSupported retunrs whether method
250 *      CameraParameters::setVideoSize() is supported or not.
251 * @param sizes returns the vector of Size objects for the
252 *      supported video frame sizes advertised by the camera.
253 */
254static void getSupportedVideoSizes(
255    const CameraParameters& params,
256    bool *isSetVideoSizeSupported,
257    Vector<Size>& sizes) {
258
259    *isSetVideoSizeSupported = true;
260    params.getSupportedVideoSizes(sizes);
261    if (sizes.size() == 0) {
262        ALOGD("Camera does not support setVideoSize()");
263        params.getSupportedPreviewSizes(sizes);
264        *isSetVideoSizeSupported = false;
265    }
266}
267
268/*
269 * Check whether the camera has the supported color format
270 * @param params CameraParameters to retrieve the information
271 * @return OK if no error.
272 */
273status_t CameraSource::isCameraColorFormatSupported(
274        const CameraParameters& params) {
275    mColorFormat = getColorFormat(params.get(
276            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
277    if (mColorFormat == -1) {
278        return BAD_VALUE;
279    }
280    return OK;
281}
282
283/*
284 * Configure the camera to use the requested video size
285 * (width and height) and/or frame rate. If both width and
286 * height are -1, configuration on the video size is skipped.
287 * if frameRate is -1, configuration on the frame rate
288 * is skipped. Skipping the configuration allows one to
289 * use the current camera setting without the need to
290 * actually know the specific values (see Create() method).
291 *
292 * @param params the CameraParameters to be configured
293 * @param width the target video frame width in pixels
294 * @param height the target video frame height in pixels
295 * @param frameRate the target frame rate in frames per second.
296 * @return OK if no error.
297 */
298status_t CameraSource::configureCamera(
299        CameraParameters* params,
300        int32_t width, int32_t height,
301        int32_t frameRate) {
302    ALOGV("configureCamera");
303    Vector<Size> sizes;
304    bool isSetVideoSizeSupportedByCamera = true;
305    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
306    bool isCameraParamChanged = false;
307    if (width != -1 && height != -1) {
308        if (!isVideoSizeSupported(width, height, sizes)) {
309            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
310            return BAD_VALUE;
311        }
312        if (isSetVideoSizeSupportedByCamera) {
313            params->setVideoSize(width, height);
314        } else {
315            params->setPreviewSize(width, height);
316        }
317        isCameraParamChanged = true;
318    } else if ((width == -1 && height != -1) ||
319               (width != -1 && height == -1)) {
320        // If one and only one of the width and height is -1
321        // we reject such a request.
322        ALOGE("Requested video size (%dx%d) is not supported", width, height);
323        return BAD_VALUE;
324    } else {  // width == -1 && height == -1
325        // Do not configure the camera.
326        // Use the current width and height value setting from the camera.
327    }
328
329    if (frameRate != -1) {
330        CHECK(frameRate > 0 && frameRate <= 120);
331        const char* supportedFrameRates =
332                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
333        CHECK(supportedFrameRates != NULL);
334        ALOGV("Supported frame rates: %s", supportedFrameRates);
335        char buf[4];
336        snprintf(buf, 4, "%d", frameRate);
337        if (strstr(supportedFrameRates, buf) == NULL) {
338            ALOGE("Requested frame rate (%d) is not supported: %s",
339                frameRate, supportedFrameRates);
340            return BAD_VALUE;
341        }
342
343        // The frame rate is supported, set the camera to the requested value.
344        params->setPreviewFrameRate(frameRate);
345        isCameraParamChanged = true;
346    } else {  // frameRate == -1
347        // Do not configure the camera.
348        // Use the current frame rate value setting from the camera
349    }
350
351    if (isCameraParamChanged) {
352        // Either frame rate or frame size needs to be changed.
353        String8 s = params->flatten();
354        if (OK != mCamera->setParameters(s)) {
355            ALOGE("Could not change settings."
356                 " Someone else is using camera %p?", mCamera.get());
357            return -EBUSY;
358        }
359    }
360    return OK;
361}
362
363/*
364 * Check whether the requested video frame size
365 * has been successfully configured or not. If both width and height
366 * are -1, check on the current width and height value setting
367 * is performed.
368 *
369 * @param params CameraParameters to retrieve the information
370 * @param the target video frame width in pixels to check against
371 * @param the target video frame height in pixels to check against
372 * @return OK if no error
373 */
374status_t CameraSource::checkVideoSize(
375        const CameraParameters& params,
376        int32_t width, int32_t height) {
377
378    ALOGV("checkVideoSize");
379    // The actual video size is the same as the preview size
380    // if the camera hal does not support separate video and
381    // preview output. In this case, we retrieve the video
382    // size from preview.
383    int32_t frameWidthActual = -1;
384    int32_t frameHeightActual = -1;
385    Vector<Size> sizes;
386    params.getSupportedVideoSizes(sizes);
387    if (sizes.size() == 0) {
388        // video size is the same as preview size
389        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
390    } else {
391        // video size may not be the same as preview
392        params.getVideoSize(&frameWidthActual, &frameHeightActual);
393    }
394    if (frameWidthActual < 0 || frameHeightActual < 0) {
395        ALOGE("Failed to retrieve video frame size (%dx%d)",
396                frameWidthActual, frameHeightActual);
397        return UNKNOWN_ERROR;
398    }
399
400    // Check the actual video frame size against the target/requested
401    // video frame size.
402    if (width != -1 && height != -1) {
403        if (frameWidthActual != width || frameHeightActual != height) {
404            ALOGE("Failed to set video frame size to %dx%d. "
405                    "The actual video size is %dx%d ", width, height,
406                    frameWidthActual, frameHeightActual);
407            return UNKNOWN_ERROR;
408        }
409    }
410
411    // Good now.
412    mVideoSize.width = frameWidthActual;
413    mVideoSize.height = frameHeightActual;
414    return OK;
415}
416
417/*
418 * Check the requested frame rate has been successfully configured or not.
419 * If the target frameRate is -1, check on the current frame rate value
420 * setting is performed.
421 *
422 * @param params CameraParameters to retrieve the information
423 * @param the target video frame rate to check against
424 * @return OK if no error.
425 */
426status_t CameraSource::checkFrameRate(
427        const CameraParameters& params,
428        int32_t frameRate) {
429
430    ALOGV("checkFrameRate");
431    int32_t frameRateActual = params.getPreviewFrameRate();
432    if (frameRateActual < 0) {
433        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
434        return UNKNOWN_ERROR;
435    }
436
437    // Check the actual video frame rate against the target/requested
438    // video frame rate.
439    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
440        ALOGE("Failed to set preview frame rate to %d fps. The actual "
441                "frame rate is %d", frameRate, frameRateActual);
442        return UNKNOWN_ERROR;
443    }
444
445    // Good now.
446    mVideoFrameRate = frameRateActual;
447    return OK;
448}
449
450/*
451 * Initialize the CameraSource to so that it becomes
452 * ready for providing the video input streams as requested.
453 * @param camera the camera object used for the video source
454 * @param cameraId if camera == 0, use camera with this id
455 *      as the video source
456 * @param videoSize the target video frame size. If both
457 *      width and height in videoSize is -1, use the current
458 *      width and heigth settings by the camera
459 * @param frameRate the target frame rate in frames per second.
460 *      if it is -1, use the current camera frame rate setting.
461 * @param storeMetaDataInVideoBuffers request to store meta
462 *      data or real YUV data in video buffers. Request to
463 *      store meta data in video buffers may not be honored
464 *      if the source does not support this feature.
465 *
466 * @return OK if no error.
467 */
468status_t CameraSource::init(
469        const sp<ICamera>& camera,
470        const sp<ICameraRecordingProxy>& proxy,
471        int32_t cameraId,
472        Size videoSize,
473        int32_t frameRate,
474        bool storeMetaDataInVideoBuffers) {
475
476    ALOGV("init");
477    status_t err = OK;
478    int64_t token = IPCThreadState::self()->clearCallingIdentity();
479    err = initWithCameraAccess(camera, proxy, cameraId,
480                               videoSize, frameRate,
481                               storeMetaDataInVideoBuffers);
482    IPCThreadState::self()->restoreCallingIdentity(token);
483    return err;
484}
485
486status_t CameraSource::initWithCameraAccess(
487        const sp<ICamera>& camera,
488        const sp<ICameraRecordingProxy>& proxy,
489        int32_t cameraId,
490        Size videoSize,
491        int32_t frameRate,
492        bool storeMetaDataInVideoBuffers) {
493    ALOGV("initWithCameraAccess");
494    status_t err = OK;
495
496    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
497        ALOGE("Camera connection could not be established.");
498        return err;
499    }
500    CameraParameters params(mCamera->getParameters());
501    if ((err = isCameraColorFormatSupported(params)) != OK) {
502        return err;
503    }
504
505    // Set the camera to use the requested video frame size
506    // and/or frame rate.
507    if ((err = configureCamera(&params,
508                    videoSize.width, videoSize.height,
509                    frameRate))) {
510        return err;
511    }
512
513    // Check on video frame size and frame rate.
514    CameraParameters newCameraParams(mCamera->getParameters());
515    if ((err = checkVideoSize(newCameraParams,
516                videoSize.width, videoSize.height)) != OK) {
517        return err;
518    }
519    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
520        return err;
521    }
522
523    // Set the preview display. Skip this if mSurface is null because
524    // applications may already set a surface to the camera.
525    if (mSurface != NULL) {
526        // This CHECK is good, since we just passed the lock/unlock
527        // check earlier by calling mCamera->setParameters().
528        CHECK_EQ((status_t)OK, mCamera->setPreviewDisplay(mSurface));
529    }
530
531    // By default, do not store metadata in video buffers
532    mIsMetaDataStoredInVideoBuffers = false;
533    mCamera->storeMetaDataInBuffers(false);
534    if (storeMetaDataInVideoBuffers) {
535        if (OK == mCamera->storeMetaDataInBuffers(true)) {
536            mIsMetaDataStoredInVideoBuffers = true;
537        }
538    }
539
540    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
541    if (glitchDurationUs > mGlitchDurationThresholdUs) {
542        mGlitchDurationThresholdUs = glitchDurationUs;
543    }
544
545    // XXX: query camera for the stride and slice height
546    // when the capability becomes available.
547    mMeta = new MetaData;
548    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
549    mMeta->setInt32(kKeyColorFormat, mColorFormat);
550    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
551    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
552    mMeta->setInt32(kKeyStride,      mVideoSize.width);
553    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
554    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
555    return OK;
556}
557
558CameraSource::~CameraSource() {
559    if (mStarted) {
560        reset();
561    } else if (mInitCheck == OK) {
562        // Camera is initialized but because start() is never called,
563        // the lock on Camera is never released(). This makes sure
564        // Camera's lock is released in this case.
565        releaseCamera();
566    }
567}
568
569void CameraSource::startCameraRecording() {
570    ALOGV("startCameraRecording");
571    // Reset the identity to the current thread because media server owns the
572    // camera and recording is started by the applications. The applications
573    // will connect to the camera in ICameraRecordingProxy::startRecording.
574    int64_t token = IPCThreadState::self()->clearCallingIdentity();
575    if (mNumInputBuffers > 0) {
576        status_t err = mCamera->sendCommand(
577            CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
578
579        // This could happen for CameraHAL1 clients; thus the failure is
580        // not a fatal error
581        if (err != OK) {
582            ALOGW("Failed to set video buffer count to %d due to %d",
583                mNumInputBuffers, err);
584        }
585    }
586
587    if (mCameraFlags & FLAGS_HOT_CAMERA) {
588        mCamera->unlock();
589        mCamera.clear();
590        CHECK_EQ((status_t)OK,
591            mCameraRecordingProxy->startRecording(new ProxyListener(this)));
592    } else {
593        mCamera->setListener(new CameraSourceListener(this));
594        mCamera->startRecording();
595        CHECK(mCamera->recordingEnabled());
596    }
597    IPCThreadState::self()->restoreCallingIdentity(token);
598}
599
600status_t CameraSource::start(MetaData *meta) {
601    ALOGV("start");
602    CHECK(!mStarted);
603    if (mInitCheck != OK) {
604        ALOGE("CameraSource is not initialized yet");
605        return mInitCheck;
606    }
607
608    char value[PROPERTY_VALUE_MAX];
609    if (property_get("media.stagefright.record-stats", value, NULL)
610        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
611        mCollectStats = true;
612    }
613
614    mStartTimeUs = 0;
615    mNumInputBuffers = 0;
616    if (meta) {
617        int64_t startTimeUs;
618        if (meta->findInt64(kKeyTime, &startTimeUs)) {
619            mStartTimeUs = startTimeUs;
620        }
621
622        int32_t nBuffers;
623        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
624            CHECK_GT(nBuffers, 0);
625            mNumInputBuffers = nBuffers;
626        }
627    }
628
629    startCameraRecording();
630
631    mStarted = true;
632    return OK;
633}
634
635void CameraSource::stopCameraRecording() {
636    ALOGV("stopCameraRecording");
637    if (mCameraFlags & FLAGS_HOT_CAMERA) {
638        mCameraRecordingProxy->stopRecording();
639    } else {
640        mCamera->setListener(NULL);
641        mCamera->stopRecording();
642    }
643}
644
645void CameraSource::releaseCamera() {
646    ALOGV("releaseCamera");
647    if (mCamera != 0) {
648        int64_t token = IPCThreadState::self()->clearCallingIdentity();
649        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
650            ALOGV("Camera was cold when we started, stopping preview");
651            mCamera->stopPreview();
652            mCamera->disconnect();
653        }
654        mCamera->unlock();
655        mCamera.clear();
656        mCamera = 0;
657        IPCThreadState::self()->restoreCallingIdentity(token);
658    }
659    if (mCameraRecordingProxy != 0) {
660        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
661        mCameraRecordingProxy.clear();
662    }
663    mCameraFlags = 0;
664}
665
666status_t CameraSource::reset() {
667    ALOGD("reset: E");
668    Mutex::Autolock autoLock(mLock);
669    mStarted = false;
670    mFrameAvailableCondition.signal();
671
672    int64_t token;
673    bool isTokenValid = false;
674    if (mCamera != 0) {
675        token = IPCThreadState::self()->clearCallingIdentity();
676        isTokenValid = true;
677    }
678    releaseQueuedFrames();
679    while (!mFramesBeingEncoded.empty()) {
680        if (NO_ERROR !=
681            mFrameCompleteCondition.waitRelative(mLock,
682                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
683            ALOGW("Timed out waiting for outstanding frames being encoded: %d",
684                mFramesBeingEncoded.size());
685        }
686    }
687    stopCameraRecording();
688    releaseCamera();
689    if (isTokenValid) {
690        IPCThreadState::self()->restoreCallingIdentity(token);
691    }
692
693    if (mCollectStats) {
694        ALOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
695                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
696                mLastFrameTimestampUs - mFirstFrameTimeUs);
697    }
698
699    if (mNumGlitches > 0) {
700        ALOGW("%d long delays between neighboring video frames", mNumGlitches);
701    }
702
703    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
704    ALOGD("reset: X");
705    return OK;
706}
707
708void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
709    ALOGV("releaseRecordingFrame");
710    if (mCameraRecordingProxy != NULL) {
711        mCameraRecordingProxy->releaseRecordingFrame(frame);
712    } else if (mCamera != NULL) {
713        int64_t token = IPCThreadState::self()->clearCallingIdentity();
714        mCamera->releaseRecordingFrame(frame);
715        IPCThreadState::self()->restoreCallingIdentity(token);
716    }
717}
718
719void CameraSource::releaseQueuedFrames() {
720    List<sp<IMemory> >::iterator it;
721    while (!mFramesReceived.empty()) {
722        it = mFramesReceived.begin();
723        releaseRecordingFrame(*it);
724        mFramesReceived.erase(it);
725        ++mNumFramesDropped;
726    }
727}
728
729sp<MetaData> CameraSource::getFormat() {
730    return mMeta;
731}
732
733void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
734    releaseRecordingFrame(frame);
735}
736
737void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
738    ALOGV("signalBufferReturned: %p", buffer->data());
739    Mutex::Autolock autoLock(mLock);
740    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
741         it != mFramesBeingEncoded.end(); ++it) {
742        if ((*it)->pointer() ==  buffer->data()) {
743            releaseOneRecordingFrame((*it));
744            mFramesBeingEncoded.erase(it);
745            ++mNumFramesEncoded;
746            buffer->setObserver(0);
747            buffer->release();
748            mFrameCompleteCondition.signal();
749            return;
750        }
751    }
752    CHECK(!"signalBufferReturned: bogus buffer");
753}
754
755status_t CameraSource::read(
756        MediaBuffer **buffer, const ReadOptions *options) {
757    ALOGV("read");
758
759    *buffer = NULL;
760
761    int64_t seekTimeUs;
762    ReadOptions::SeekMode mode;
763    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
764        return ERROR_UNSUPPORTED;
765    }
766
767    sp<IMemory> frame;
768    int64_t frameTime;
769
770    {
771        Mutex::Autolock autoLock(mLock);
772        while (mStarted && mFramesReceived.empty()) {
773            if (NO_ERROR !=
774                mFrameAvailableCondition.waitRelative(mLock,
775                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
776                if (mCameraRecordingProxy != 0 &&
777                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
778                    ALOGW("camera recording proxy is gone");
779                    return ERROR_END_OF_STREAM;
780                }
781                ALOGW("Timed out waiting for incoming camera video frames: %lld us",
782                    mLastFrameTimestampUs);
783            }
784        }
785        if (!mStarted) {
786            return OK;
787        }
788        frame = *mFramesReceived.begin();
789        mFramesReceived.erase(mFramesReceived.begin());
790
791        frameTime = *mFrameTimes.begin();
792        mFrameTimes.erase(mFrameTimes.begin());
793        mFramesBeingEncoded.push_back(frame);
794        *buffer = new MediaBuffer(frame->pointer(), frame->size());
795        (*buffer)->setObserver(this);
796        (*buffer)->add_ref();
797        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
798    }
799    return OK;
800}
801
802void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
803        int32_t msgType, const sp<IMemory> &data) {
804    ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
805    Mutex::Autolock autoLock(mLock);
806    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
807        ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
808        releaseOneRecordingFrame(data);
809        return;
810    }
811
812    if (mNumFramesReceived > 0) {
813        CHECK(timestampUs > mLastFrameTimestampUs);
814        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
815            ++mNumGlitches;
816        }
817    }
818
819    // May need to skip frame or modify timestamp. Currently implemented
820    // by the subclass CameraSourceTimeLapse.
821    if (skipCurrentFrame(timestampUs)) {
822        releaseOneRecordingFrame(data);
823        return;
824    }
825
826    mLastFrameTimestampUs = timestampUs;
827    if (mNumFramesReceived == 0) {
828        mFirstFrameTimeUs = timestampUs;
829        // Initial delay
830        if (mStartTimeUs > 0) {
831            if (timestampUs < mStartTimeUs) {
832                // Frame was captured before recording was started
833                // Drop it without updating the statistical data.
834                releaseOneRecordingFrame(data);
835                return;
836            }
837            mStartTimeUs = timestampUs - mStartTimeUs;
838        }
839    }
840    ++mNumFramesReceived;
841
842    CHECK(data != NULL && data->size() > 0);
843    mFramesReceived.push_back(data);
844    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
845    mFrameTimes.push_back(timeUs);
846    ALOGV("initial delay: %lld, current time stamp: %lld",
847        mStartTimeUs, timeUs);
848    mFrameAvailableCondition.signal();
849}
850
851bool CameraSource::isMetaDataStoredInVideoBuffers() const {
852    ALOGV("isMetaDataStoredInVideoBuffers");
853    return mIsMetaDataStoredInVideoBuffers;
854}
855
856CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
857    mSource = source;
858}
859
860void CameraSource::ProxyListener::dataCallbackTimestamp(
861        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
862    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
863}
864
865void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
866    ALOGI("Camera recording proxy died");
867}
868
869}  // namespace android
870