CameraSource.cpp revision 9197441e59be323e2f6f0c6506f7248eb6a944b2
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <surfaceflinger/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36struct CameraSourceListener : public CameraListener {
37    CameraSourceListener(const sp<CameraSource> &source);
38
39    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
40    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
41                          camera_frame_metadata_t *metadata);
42
43    virtual void postDataTimestamp(
44            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
45
46protected:
47    virtual ~CameraSourceListener();
48
49private:
50    wp<CameraSource> mSource;
51
52    CameraSourceListener(const CameraSourceListener &);
53    CameraSourceListener &operator=(const CameraSourceListener &);
54};
55
56CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
57    : mSource(source) {
58}
59
60CameraSourceListener::~CameraSourceListener() {
61}
62
63void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
64    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
65}
66
67void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
68                                    camera_frame_metadata_t *metadata) {
69    LOGV("postData(%d, ptr:%p, size:%d)",
70         msgType, dataPtr->pointer(), dataPtr->size());
71
72    sp<CameraSource> source = mSource.promote();
73    if (source.get() != NULL) {
74        source->dataCallback(msgType, dataPtr);
75    }
76}
77
78void CameraSourceListener::postDataTimestamp(
79        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
80
81    sp<CameraSource> source = mSource.promote();
82    if (source.get() != NULL) {
83        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
84    }
85}
86
87static int32_t getColorFormat(const char* colorFormat) {
88    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
89       return OMX_COLOR_FormatYUV420Planar;
90    }
91
92    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
93       return OMX_COLOR_FormatYUV422SemiPlanar;
94    }
95
96    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
97        return OMX_COLOR_FormatYUV420SemiPlanar;
98    }
99
100    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
101        return OMX_COLOR_FormatYCbYCr;
102    }
103
104    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
105       return OMX_COLOR_Format16bitRGB565;
106    }
107
108    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
109       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
110    }
111
112    LOGE("Uknown color format (%s), please add it to "
113         "CameraSource::getColorFormat", colorFormat);
114
115    CHECK_EQ(0, "Unknown color format");
116}
117
118CameraSource *CameraSource::Create() {
119    Size size;
120    size.width = -1;
121    size.height = -1;
122
123    sp<ICamera> camera;
124    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
125}
126
127// static
128CameraSource *CameraSource::CreateFromCamera(
129    const sp<ICamera>& camera,
130    const sp<ICameraRecordingProxy>& proxy,
131    int32_t cameraId,
132    Size videoSize,
133    int32_t frameRate,
134    const sp<Surface>& surface,
135    bool storeMetaDataInVideoBuffers) {
136
137    CameraSource *source = new CameraSource(camera, proxy, cameraId,
138                    videoSize, frameRate, surface,
139                    storeMetaDataInVideoBuffers);
140    return source;
141}
142
143CameraSource::CameraSource(
144    const sp<ICamera>& camera,
145    const sp<ICameraRecordingProxy>& proxy,
146    int32_t cameraId,
147    Size videoSize,
148    int32_t frameRate,
149    const sp<Surface>& surface,
150    bool storeMetaDataInVideoBuffers)
151    : mCameraFlags(0),
152      mVideoFrameRate(-1),
153      mCamera(0),
154      mSurface(surface),
155      mNumFramesReceived(0),
156      mLastFrameTimestampUs(0),
157      mStarted(false),
158      mNumFramesEncoded(0),
159      mFirstFrameTimeUs(0),
160      mNumFramesDropped(0),
161      mNumGlitches(0),
162      mGlitchDurationThresholdUs(200000),
163      mCollectStats(false) {
164    mVideoSize.width  = -1;
165    mVideoSize.height = -1;
166
167    mInitCheck = init(camera, proxy, cameraId,
168                    videoSize, frameRate,
169                    storeMetaDataInVideoBuffers);
170    if (mInitCheck != OK) releaseCamera();
171}
172
173status_t CameraSource::initCheck() const {
174    return mInitCheck;
175}
176
177status_t CameraSource::isCameraAvailable(
178    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
179    int32_t cameraId) {
180
181    if (camera == 0) {
182        mCamera = Camera::connect(cameraId);
183        if (mCamera == 0) return -EBUSY;
184        mCameraFlags &= ~FLAGS_HOT_CAMERA;
185    } else {
186        // We get the proxy from Camera, not ICamera. We need to get the proxy
187        // to the remote Camera owned by the application. Here mCamera is a
188        // local Camera object created by us. We cannot use the proxy from
189        // mCamera here.
190        mCamera = Camera::create(camera);
191        if (mCamera == 0) return -EBUSY;
192        mCameraRecordingProxy = proxy;
193        mCameraFlags |= FLAGS_HOT_CAMERA;
194        mDeathNotifier = new DeathNotifier();
195        // isBinderAlive needs linkToDeath to work.
196        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
197    }
198
199    mCamera->lock();
200
201    return OK;
202}
203
204
205/*
206 * Check to see whether the requested video width and height is one
207 * of the supported sizes.
208 * @param width the video frame width in pixels
209 * @param height the video frame height in pixels
210 * @param suppportedSizes the vector of sizes that we check against
211 * @return true if the dimension (width and height) is supported.
212 */
213static bool isVideoSizeSupported(
214    int32_t width, int32_t height,
215    const Vector<Size>& supportedSizes) {
216
217    LOGV("isVideoSizeSupported");
218    for (size_t i = 0; i < supportedSizes.size(); ++i) {
219        if (width  == supportedSizes[i].width &&
220            height == supportedSizes[i].height) {
221            return true;
222        }
223    }
224    return false;
225}
226
227/*
228 * If the preview and video output is separate, we only set the
229 * the video size, and applications should set the preview size
230 * to some proper value, and the recording framework will not
231 * change the preview size; otherwise, if the video and preview
232 * output is the same, we need to set the preview to be the same
233 * as the requested video size.
234 *
235 */
236/*
237 * Query the camera to retrieve the supported video frame sizes
238 * and also to see whether CameraParameters::setVideoSize()
239 * is supported or not.
240 * @param params CameraParameters to retrieve the information
241 * @@param isSetVideoSizeSupported retunrs whether method
242 *      CameraParameters::setVideoSize() is supported or not.
243 * @param sizes returns the vector of Size objects for the
244 *      supported video frame sizes advertised by the camera.
245 */
246static void getSupportedVideoSizes(
247    const CameraParameters& params,
248    bool *isSetVideoSizeSupported,
249    Vector<Size>& sizes) {
250
251    *isSetVideoSizeSupported = true;
252    params.getSupportedVideoSizes(sizes);
253    if (sizes.size() == 0) {
254        LOGD("Camera does not support setVideoSize()");
255        params.getSupportedPreviewSizes(sizes);
256        *isSetVideoSizeSupported = false;
257    }
258}
259
260/*
261 * Check whether the camera has the supported color format
262 * @param params CameraParameters to retrieve the information
263 * @return OK if no error.
264 */
265status_t CameraSource::isCameraColorFormatSupported(
266        const CameraParameters& params) {
267    mColorFormat = getColorFormat(params.get(
268            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
269    if (mColorFormat == -1) {
270        return BAD_VALUE;
271    }
272    return OK;
273}
274
275/*
276 * Configure the camera to use the requested video size
277 * (width and height) and/or frame rate. If both width and
278 * height are -1, configuration on the video size is skipped.
279 * if frameRate is -1, configuration on the frame rate
280 * is skipped. Skipping the configuration allows one to
281 * use the current camera setting without the need to
282 * actually know the specific values (see Create() method).
283 *
284 * @param params the CameraParameters to be configured
285 * @param width the target video frame width in pixels
286 * @param height the target video frame height in pixels
287 * @param frameRate the target frame rate in frames per second.
288 * @return OK if no error.
289 */
290status_t CameraSource::configureCamera(
291        CameraParameters* params,
292        int32_t width, int32_t height,
293        int32_t frameRate) {
294    LOGV("configureCamera");
295    Vector<Size> sizes;
296    bool isSetVideoSizeSupportedByCamera = true;
297    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
298    bool isCameraParamChanged = false;
299    if (width != -1 && height != -1) {
300        if (!isVideoSizeSupported(width, height, sizes)) {
301            LOGE("Video dimension (%dx%d) is unsupported", width, height);
302            return BAD_VALUE;
303        }
304        if (isSetVideoSizeSupportedByCamera) {
305            params->setVideoSize(width, height);
306        } else {
307            params->setPreviewSize(width, height);
308        }
309        isCameraParamChanged = true;
310    } else if ((width == -1 && height != -1) ||
311               (width != -1 && height == -1)) {
312        // If one and only one of the width and height is -1
313        // we reject such a request.
314        LOGE("Requested video size (%dx%d) is not supported", width, height);
315        return BAD_VALUE;
316    } else {  // width == -1 && height == -1
317        // Do not configure the camera.
318        // Use the current width and height value setting from the camera.
319    }
320
321    if (frameRate != -1) {
322        CHECK(frameRate > 0 && frameRate <= 120);
323        const char* supportedFrameRates =
324                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
325        CHECK(supportedFrameRates != NULL);
326        LOGV("Supported frame rates: %s", supportedFrameRates);
327        char buf[4];
328        snprintf(buf, 4, "%d", frameRate);
329        if (strstr(supportedFrameRates, buf) == NULL) {
330            LOGE("Requested frame rate (%d) is not supported: %s",
331                frameRate, supportedFrameRates);
332            return BAD_VALUE;
333        }
334
335        // The frame rate is supported, set the camera to the requested value.
336        params->setPreviewFrameRate(frameRate);
337        isCameraParamChanged = true;
338    } else {  // frameRate == -1
339        // Do not configure the camera.
340        // Use the current frame rate value setting from the camera
341    }
342
343    if (isCameraParamChanged) {
344        // Either frame rate or frame size needs to be changed.
345        String8 s = params->flatten();
346        if (OK != mCamera->setParameters(s)) {
347            LOGE("Could not change settings."
348                 " Someone else is using camera %p?", mCamera.get());
349            return -EBUSY;
350        }
351    }
352    return OK;
353}
354
355/*
356 * Check whether the requested video frame size
357 * has been successfully configured or not. If both width and height
358 * are -1, check on the current width and height value setting
359 * is performed.
360 *
361 * @param params CameraParameters to retrieve the information
362 * @param the target video frame width in pixels to check against
363 * @param the target video frame height in pixels to check against
364 * @return OK if no error
365 */
366status_t CameraSource::checkVideoSize(
367        const CameraParameters& params,
368        int32_t width, int32_t height) {
369
370    LOGV("checkVideoSize");
371    // The actual video size is the same as the preview size
372    // if the camera hal does not support separate video and
373    // preview output. In this case, we retrieve the video
374    // size from preview.
375    int32_t frameWidthActual = -1;
376    int32_t frameHeightActual = -1;
377    Vector<Size> sizes;
378    params.getSupportedVideoSizes(sizes);
379    if (sizes.size() == 0) {
380        // video size is the same as preview size
381        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
382    } else {
383        // video size may not be the same as preview
384        params.getVideoSize(&frameWidthActual, &frameHeightActual);
385    }
386    if (frameWidthActual < 0 || frameHeightActual < 0) {
387        LOGE("Failed to retrieve video frame size (%dx%d)",
388                frameWidthActual, frameHeightActual);
389        return UNKNOWN_ERROR;
390    }
391
392    // Check the actual video frame size against the target/requested
393    // video frame size.
394    if (width != -1 && height != -1) {
395        if (frameWidthActual != width || frameHeightActual != height) {
396            LOGE("Failed to set video frame size to %dx%d. "
397                    "The actual video size is %dx%d ", width, height,
398                    frameWidthActual, frameHeightActual);
399            return UNKNOWN_ERROR;
400        }
401    }
402
403    // Good now.
404    mVideoSize.width = frameWidthActual;
405    mVideoSize.height = frameHeightActual;
406    return OK;
407}
408
409/*
410 * Check the requested frame rate has been successfully configured or not.
411 * If the target frameRate is -1, check on the current frame rate value
412 * setting is performed.
413 *
414 * @param params CameraParameters to retrieve the information
415 * @param the target video frame rate to check against
416 * @return OK if no error.
417 */
418status_t CameraSource::checkFrameRate(
419        const CameraParameters& params,
420        int32_t frameRate) {
421
422    LOGV("checkFrameRate");
423    int32_t frameRateActual = params.getPreviewFrameRate();
424    if (frameRateActual < 0) {
425        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
426        return UNKNOWN_ERROR;
427    }
428
429    // Check the actual video frame rate against the target/requested
430    // video frame rate.
431    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
432        LOGE("Failed to set preview frame rate to %d fps. The actual "
433                "frame rate is %d", frameRate, frameRateActual);
434        return UNKNOWN_ERROR;
435    }
436
437    // Good now.
438    mVideoFrameRate = frameRateActual;
439    return OK;
440}
441
442/*
443 * Initialize the CameraSource to so that it becomes
444 * ready for providing the video input streams as requested.
445 * @param camera the camera object used for the video source
446 * @param cameraId if camera == 0, use camera with this id
447 *      as the video source
448 * @param videoSize the target video frame size. If both
449 *      width and height in videoSize is -1, use the current
450 *      width and heigth settings by the camera
451 * @param frameRate the target frame rate in frames per second.
452 *      if it is -1, use the current camera frame rate setting.
453 * @param storeMetaDataInVideoBuffers request to store meta
454 *      data or real YUV data in video buffers. Request to
455 *      store meta data in video buffers may not be honored
456 *      if the source does not support this feature.
457 *
458 * @return OK if no error.
459 */
460status_t CameraSource::init(
461        const sp<ICamera>& camera,
462        const sp<ICameraRecordingProxy>& proxy,
463        int32_t cameraId,
464        Size videoSize,
465        int32_t frameRate,
466        bool storeMetaDataInVideoBuffers) {
467
468    LOGV("init");
469    status_t err = OK;
470    int64_t token = IPCThreadState::self()->clearCallingIdentity();
471    err = initWithCameraAccess(camera, proxy, cameraId,
472                               videoSize, frameRate,
473                               storeMetaDataInVideoBuffers);
474    IPCThreadState::self()->restoreCallingIdentity(token);
475    return err;
476}
477
478status_t CameraSource::initWithCameraAccess(
479        const sp<ICamera>& camera,
480        const sp<ICameraRecordingProxy>& proxy,
481        int32_t cameraId,
482        Size videoSize,
483        int32_t frameRate,
484        bool storeMetaDataInVideoBuffers) {
485    LOGV("initWithCameraAccess");
486    status_t err = OK;
487
488    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
489        LOGE("Camera connection could not be established.");
490        return err;
491    }
492    CameraParameters params(mCamera->getParameters());
493    if ((err = isCameraColorFormatSupported(params)) != OK) {
494        return err;
495    }
496
497    // Set the camera to use the requested video frame size
498    // and/or frame rate.
499    if ((err = configureCamera(&params,
500                    videoSize.width, videoSize.height,
501                    frameRate))) {
502        return err;
503    }
504
505    // Check on video frame size and frame rate.
506    CameraParameters newCameraParams(mCamera->getParameters());
507    if ((err = checkVideoSize(newCameraParams,
508                videoSize.width, videoSize.height)) != OK) {
509        return err;
510    }
511    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
512        return err;
513    }
514
515    // This CHECK is good, since we just passed the lock/unlock
516    // check earlier by calling mCamera->setParameters().
517    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
518
519    // By default, do not store metadata in video buffers
520    mIsMetaDataStoredInVideoBuffers = false;
521    mCamera->storeMetaDataInBuffers(false);
522    if (storeMetaDataInVideoBuffers) {
523        if (OK == mCamera->storeMetaDataInBuffers(true)) {
524            mIsMetaDataStoredInVideoBuffers = true;
525        }
526    }
527
528    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
529    if (glitchDurationUs > mGlitchDurationThresholdUs) {
530        mGlitchDurationThresholdUs = glitchDurationUs;
531    }
532
533    // XXX: query camera for the stride and slice height
534    // when the capability becomes available.
535    mMeta = new MetaData;
536    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
537    mMeta->setInt32(kKeyColorFormat, mColorFormat);
538    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
539    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
540    mMeta->setInt32(kKeyStride,      mVideoSize.width);
541    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
542    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
543    return OK;
544}
545
546CameraSource::~CameraSource() {
547    if (mStarted) {
548        stop();
549    } else if (mInitCheck == OK) {
550        // Camera is initialized but because start() is never called,
551        // the lock on Camera is never released(). This makes sure
552        // Camera's lock is released in this case.
553        releaseCamera();
554    }
555}
556
557void CameraSource::startCameraRecording() {
558    LOGV("startCameraRecording");
559    // Reset the identity to the current thread because media server owns the
560    // camera and recording is started by the applications. The applications
561    // will connect to the camera in ICameraRecordingProxy::startRecording.
562    int64_t token = IPCThreadState::self()->clearCallingIdentity();
563    if (mCameraFlags & FLAGS_HOT_CAMERA) {
564        mCamera->unlock();
565        mCamera.clear();
566        CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this)));
567    } else {
568        mCamera->setListener(new CameraSourceListener(this));
569        mCamera->startRecording();
570        CHECK(mCamera->recordingEnabled());
571    }
572    IPCThreadState::self()->restoreCallingIdentity(token);
573}
574
575status_t CameraSource::start(MetaData *meta) {
576    LOGV("start");
577    CHECK(!mStarted);
578    if (mInitCheck != OK) {
579        LOGE("CameraSource is not initialized yet");
580        return mInitCheck;
581    }
582
583    char value[PROPERTY_VALUE_MAX];
584    if (property_get("media.stagefright.record-stats", value, NULL)
585        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
586        mCollectStats = true;
587    }
588
589    mStartTimeUs = 0;
590    int64_t startTimeUs;
591    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
592        mStartTimeUs = startTimeUs;
593    }
594
595    startCameraRecording();
596
597    mStarted = true;
598    return OK;
599}
600
601void CameraSource::stopCameraRecording() {
602    LOGV("stopCameraRecording");
603    if (mCameraFlags & FLAGS_HOT_CAMERA) {
604        mCameraRecordingProxy->stopRecording();
605    } else {
606        mCamera->setListener(NULL);
607        mCamera->stopRecording();
608    }
609}
610
611void CameraSource::releaseCamera() {
612    LOGV("releaseCamera");
613    if (mCamera != 0) {
614        int64_t token = IPCThreadState::self()->clearCallingIdentity();
615        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
616            LOGV("Camera was cold when we started, stopping preview");
617            mCamera->stopPreview();
618            mCamera->disconnect();
619        }
620        mCamera->unlock();
621        mCamera.clear();
622        mCamera = 0;
623        IPCThreadState::self()->restoreCallingIdentity(token);
624    }
625    if (mCameraRecordingProxy != 0) {
626        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
627        mCameraRecordingProxy.clear();
628    }
629    mCameraFlags = 0;
630}
631
632status_t CameraSource::stop() {
633    LOGD("stop: E");
634    Mutex::Autolock autoLock(mLock);
635    mStarted = false;
636    mFrameAvailableCondition.signal();
637
638    int64_t token;
639    bool isTokenValid = false;
640    if (mCamera != 0) {
641        token = IPCThreadState::self()->clearCallingIdentity();
642        isTokenValid = true;
643    }
644    releaseQueuedFrames();
645    while (!mFramesBeingEncoded.empty()) {
646        if (NO_ERROR !=
647            mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) {
648            LOGW("Timed out waiting for outstanding frames being encoded: %d",
649                mFramesBeingEncoded.size());
650        }
651    }
652    stopCameraRecording();
653    releaseCamera();
654    if (isTokenValid) {
655        IPCThreadState::self()->restoreCallingIdentity(token);
656    }
657
658    if (mCollectStats) {
659        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
660                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
661                mLastFrameTimestampUs - mFirstFrameTimeUs);
662    }
663
664    if (mNumGlitches > 0) {
665        LOGW("%d long delays between neighboring video frames", mNumGlitches);
666    }
667
668    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
669    LOGD("stop: X");
670    return OK;
671}
672
673void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
674    LOGV("releaseRecordingFrame");
675    if (mCameraRecordingProxy != NULL) {
676        mCameraRecordingProxy->releaseRecordingFrame(frame);
677    } else if (mCamera != NULL) {
678        int64_t token = IPCThreadState::self()->clearCallingIdentity();
679        mCamera->releaseRecordingFrame(frame);
680        IPCThreadState::self()->restoreCallingIdentity(token);
681    }
682}
683
684void CameraSource::releaseQueuedFrames() {
685    List<sp<IMemory> >::iterator it;
686    while (!mFramesReceived.empty()) {
687        it = mFramesReceived.begin();
688        releaseRecordingFrame(*it);
689        mFramesReceived.erase(it);
690        ++mNumFramesDropped;
691    }
692}
693
694sp<MetaData> CameraSource::getFormat() {
695    return mMeta;
696}
697
698void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
699    releaseRecordingFrame(frame);
700}
701
702void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
703    LOGV("signalBufferReturned: %p", buffer->data());
704    Mutex::Autolock autoLock(mLock);
705    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
706         it != mFramesBeingEncoded.end(); ++it) {
707        if ((*it)->pointer() ==  buffer->data()) {
708            releaseOneRecordingFrame((*it));
709            mFramesBeingEncoded.erase(it);
710            ++mNumFramesEncoded;
711            buffer->setObserver(0);
712            buffer->release();
713            mFrameCompleteCondition.signal();
714            return;
715        }
716    }
717    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
718}
719
720status_t CameraSource::read(
721        MediaBuffer **buffer, const ReadOptions *options) {
722    LOGV("read");
723
724    *buffer = NULL;
725
726    int64_t seekTimeUs;
727    ReadOptions::SeekMode mode;
728    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
729        return ERROR_UNSUPPORTED;
730    }
731
732    sp<IMemory> frame;
733    int64_t frameTime;
734
735    {
736        Mutex::Autolock autoLock(mLock);
737        while (mStarted && mFramesReceived.empty()) {
738            if (NO_ERROR !=
739                mFrameAvailableCondition.waitRelative(mLock, 1000000000LL)) {
740                if (mCameraRecordingProxy != 0 &&
741                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
742                    LOGW("camera recording proxy is gone");
743                    return ERROR_END_OF_STREAM;
744                }
745                LOGW("Timed out waiting for incoming camera video frames: %lld us",
746                    mLastFrameTimestampUs);
747            }
748        }
749        if (!mStarted) {
750            return OK;
751        }
752        frame = *mFramesReceived.begin();
753        mFramesReceived.erase(mFramesReceived.begin());
754
755        frameTime = *mFrameTimes.begin();
756        mFrameTimes.erase(mFrameTimes.begin());
757        mFramesBeingEncoded.push_back(frame);
758        *buffer = new MediaBuffer(frame->pointer(), frame->size());
759        (*buffer)->setObserver(this);
760        (*buffer)->add_ref();
761        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
762    }
763    return OK;
764}
765
766void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
767        int32_t msgType, const sp<IMemory> &data) {
768    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
769    Mutex::Autolock autoLock(mLock);
770    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
771        LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
772        releaseOneRecordingFrame(data);
773        return;
774    }
775
776    if (mNumFramesReceived > 0) {
777        CHECK(timestampUs > mLastFrameTimestampUs);
778        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
779            ++mNumGlitches;
780        }
781    }
782
783    // May need to skip frame or modify timestamp. Currently implemented
784    // by the subclass CameraSourceTimeLapse.
785    if (skipCurrentFrame(timestampUs)) {
786        releaseOneRecordingFrame(data);
787        return;
788    }
789
790    mLastFrameTimestampUs = timestampUs;
791    if (mNumFramesReceived == 0) {
792        mFirstFrameTimeUs = timestampUs;
793        // Initial delay
794        if (mStartTimeUs > 0) {
795            if (timestampUs < mStartTimeUs) {
796                // Frame was captured before recording was started
797                // Drop it without updating the statistical data.
798                releaseOneRecordingFrame(data);
799                return;
800            }
801            mStartTimeUs = timestampUs - mStartTimeUs;
802        }
803    }
804    ++mNumFramesReceived;
805
806    CHECK(data != NULL && data->size() > 0);
807    mFramesReceived.push_back(data);
808    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
809    mFrameTimes.push_back(timeUs);
810    LOGV("initial delay: %lld, current time stamp: %lld",
811        mStartTimeUs, timeUs);
812    mFrameAvailableCondition.signal();
813}
814
815bool CameraSource::isMetaDataStoredInVideoBuffers() const {
816    LOGV("isMetaDataStoredInVideoBuffers");
817    return mIsMetaDataStoredInVideoBuffers;
818}
819
820CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
821    mSource = source;
822}
823
824void CameraSource::ProxyListener::dataCallbackTimestamp(
825        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
826    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
827}
828
829void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
830    LOGI("Camera recording proxy died");
831}
832
833}  // namespace android
834