CameraSource.cpp revision e8e5f86e9e310b065596c8cbbca1543eb833dee1
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <surfaceflinger/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34namespace android {
35
36static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
37
38struct CameraSourceListener : public CameraListener {
39    CameraSourceListener(const sp<CameraSource> &source);
40
41    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
42    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
43                          camera_frame_metadata_t *metadata);
44
45    virtual void postDataTimestamp(
46            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
47
48protected:
49    virtual ~CameraSourceListener();
50
51private:
52    wp<CameraSource> mSource;
53
54    CameraSourceListener(const CameraSourceListener &);
55    CameraSourceListener &operator=(const CameraSourceListener &);
56};
57
58CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
59    : mSource(source) {
60}
61
62CameraSourceListener::~CameraSourceListener() {
63}
64
65void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
66    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
67}
68
69void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
70                                    camera_frame_metadata_t *metadata) {
71    LOGV("postData(%d, ptr:%p, size:%d)",
72         msgType, dataPtr->pointer(), dataPtr->size());
73
74    sp<CameraSource> source = mSource.promote();
75    if (source.get() != NULL) {
76        source->dataCallback(msgType, dataPtr);
77    }
78}
79
80void CameraSourceListener::postDataTimestamp(
81        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
82
83    sp<CameraSource> source = mSource.promote();
84    if (source.get() != NULL) {
85        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
86    }
87}
88
89static int32_t getColorFormat(const char* colorFormat) {
90    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
91       return OMX_COLOR_FormatYUV420Planar;
92    }
93
94    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
95       return OMX_COLOR_FormatYUV422SemiPlanar;
96    }
97
98    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
99        return OMX_COLOR_FormatYUV420SemiPlanar;
100    }
101
102    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
103        return OMX_COLOR_FormatYCbYCr;
104    }
105
106    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
107       return OMX_COLOR_Format16bitRGB565;
108    }
109
110    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
111       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
112    }
113
114    LOGE("Uknown color format (%s), please add it to "
115         "CameraSource::getColorFormat", colorFormat);
116
117    CHECK_EQ(0, "Unknown color format");
118}
119
120CameraSource *CameraSource::Create() {
121    Size size;
122    size.width = -1;
123    size.height = -1;
124
125    sp<ICamera> camera;
126    return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
127}
128
129// static
130CameraSource *CameraSource::CreateFromCamera(
131    const sp<ICamera>& camera,
132    const sp<ICameraRecordingProxy>& proxy,
133    int32_t cameraId,
134    Size videoSize,
135    int32_t frameRate,
136    const sp<Surface>& surface,
137    bool storeMetaDataInVideoBuffers) {
138
139    CameraSource *source = new CameraSource(camera, proxy, cameraId,
140                    videoSize, frameRate, surface,
141                    storeMetaDataInVideoBuffers);
142    return source;
143}
144
145CameraSource::CameraSource(
146    const sp<ICamera>& camera,
147    const sp<ICameraRecordingProxy>& proxy,
148    int32_t cameraId,
149    Size videoSize,
150    int32_t frameRate,
151    const sp<Surface>& surface,
152    bool storeMetaDataInVideoBuffers)
153    : mCameraFlags(0),
154      mVideoFrameRate(-1),
155      mCamera(0),
156      mSurface(surface),
157      mNumFramesReceived(0),
158      mLastFrameTimestampUs(0),
159      mStarted(false),
160      mNumFramesEncoded(0),
161      mTimeBetweenFrameCaptureUs(0),
162      mFirstFrameTimeUs(0),
163      mNumFramesDropped(0),
164      mNumGlitches(0),
165      mGlitchDurationThresholdUs(200000),
166      mCollectStats(false) {
167    mVideoSize.width  = -1;
168    mVideoSize.height = -1;
169
170    mInitCheck = init(camera, proxy, cameraId,
171                    videoSize, frameRate,
172                    storeMetaDataInVideoBuffers);
173    if (mInitCheck != OK) releaseCamera();
174}
175
176status_t CameraSource::initCheck() const {
177    return mInitCheck;
178}
179
180status_t CameraSource::isCameraAvailable(
181    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
182    int32_t cameraId) {
183
184    if (camera == 0) {
185        mCamera = Camera::connect(cameraId);
186        if (mCamera == 0) return -EBUSY;
187        mCameraFlags &= ~FLAGS_HOT_CAMERA;
188    } else {
189        // We get the proxy from Camera, not ICamera. We need to get the proxy
190        // to the remote Camera owned by the application. Here mCamera is a
191        // local Camera object created by us. We cannot use the proxy from
192        // mCamera here.
193        mCamera = Camera::create(camera);
194        if (mCamera == 0) return -EBUSY;
195        mCameraRecordingProxy = proxy;
196        mCameraFlags |= FLAGS_HOT_CAMERA;
197        mDeathNotifier = new DeathNotifier();
198        // isBinderAlive needs linkToDeath to work.
199        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
200    }
201
202    mCamera->lock();
203
204    return OK;
205}
206
207
208/*
209 * Check to see whether the requested video width and height is one
210 * of the supported sizes.
211 * @param width the video frame width in pixels
212 * @param height the video frame height in pixels
213 * @param suppportedSizes the vector of sizes that we check against
214 * @return true if the dimension (width and height) is supported.
215 */
216static bool isVideoSizeSupported(
217    int32_t width, int32_t height,
218    const Vector<Size>& supportedSizes) {
219
220    LOGV("isVideoSizeSupported");
221    for (size_t i = 0; i < supportedSizes.size(); ++i) {
222        if (width  == supportedSizes[i].width &&
223            height == supportedSizes[i].height) {
224            return true;
225        }
226    }
227    return false;
228}
229
230/*
231 * If the preview and video output is separate, we only set the
232 * the video size, and applications should set the preview size
233 * to some proper value, and the recording framework will not
234 * change the preview size; otherwise, if the video and preview
235 * output is the same, we need to set the preview to be the same
236 * as the requested video size.
237 *
238 */
239/*
240 * Query the camera to retrieve the supported video frame sizes
241 * and also to see whether CameraParameters::setVideoSize()
242 * is supported or not.
243 * @param params CameraParameters to retrieve the information
244 * @@param isSetVideoSizeSupported retunrs whether method
245 *      CameraParameters::setVideoSize() is supported or not.
246 * @param sizes returns the vector of Size objects for the
247 *      supported video frame sizes advertised by the camera.
248 */
249static void getSupportedVideoSizes(
250    const CameraParameters& params,
251    bool *isSetVideoSizeSupported,
252    Vector<Size>& sizes) {
253
254    *isSetVideoSizeSupported = true;
255    params.getSupportedVideoSizes(sizes);
256    if (sizes.size() == 0) {
257        LOGD("Camera does not support setVideoSize()");
258        params.getSupportedPreviewSizes(sizes);
259        *isSetVideoSizeSupported = false;
260    }
261}
262
263/*
264 * Check whether the camera has the supported color format
265 * @param params CameraParameters to retrieve the information
266 * @return OK if no error.
267 */
268status_t CameraSource::isCameraColorFormatSupported(
269        const CameraParameters& params) {
270    mColorFormat = getColorFormat(params.get(
271            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
272    if (mColorFormat == -1) {
273        return BAD_VALUE;
274    }
275    return OK;
276}
277
278/*
279 * Configure the camera to use the requested video size
280 * (width and height) and/or frame rate. If both width and
281 * height are -1, configuration on the video size is skipped.
282 * if frameRate is -1, configuration on the frame rate
283 * is skipped. Skipping the configuration allows one to
284 * use the current camera setting without the need to
285 * actually know the specific values (see Create() method).
286 *
287 * @param params the CameraParameters to be configured
288 * @param width the target video frame width in pixels
289 * @param height the target video frame height in pixels
290 * @param frameRate the target frame rate in frames per second.
291 * @return OK if no error.
292 */
293status_t CameraSource::configureCamera(
294        CameraParameters* params,
295        int32_t width, int32_t height,
296        int32_t frameRate) {
297    LOGV("configureCamera");
298    Vector<Size> sizes;
299    bool isSetVideoSizeSupportedByCamera = true;
300    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
301    bool isCameraParamChanged = false;
302    if (width != -1 && height != -1) {
303        if (!isVideoSizeSupported(width, height, sizes)) {
304            LOGE("Video dimension (%dx%d) is unsupported", width, height);
305            return BAD_VALUE;
306        }
307        if (isSetVideoSizeSupportedByCamera) {
308            params->setVideoSize(width, height);
309        } else {
310            params->setPreviewSize(width, height);
311        }
312        isCameraParamChanged = true;
313    } else if ((width == -1 && height != -1) ||
314               (width != -1 && height == -1)) {
315        // If one and only one of the width and height is -1
316        // we reject such a request.
317        LOGE("Requested video size (%dx%d) is not supported", width, height);
318        return BAD_VALUE;
319    } else {  // width == -1 && height == -1
320        // Do not configure the camera.
321        // Use the current width and height value setting from the camera.
322    }
323
324    if (frameRate != -1) {
325        CHECK(frameRate > 0 && frameRate <= 120);
326        const char* supportedFrameRates =
327                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
328        CHECK(supportedFrameRates != NULL);
329        LOGV("Supported frame rates: %s", supportedFrameRates);
330        char buf[4];
331        snprintf(buf, 4, "%d", frameRate);
332        if (strstr(supportedFrameRates, buf) == NULL) {
333            LOGE("Requested frame rate (%d) is not supported: %s",
334                frameRate, supportedFrameRates);
335            return BAD_VALUE;
336        }
337
338        // The frame rate is supported, set the camera to the requested value.
339        params->setPreviewFrameRate(frameRate);
340        isCameraParamChanged = true;
341    } else {  // frameRate == -1
342        // Do not configure the camera.
343        // Use the current frame rate value setting from the camera
344    }
345
346    if (isCameraParamChanged) {
347        // Either frame rate or frame size needs to be changed.
348        String8 s = params->flatten();
349        if (OK != mCamera->setParameters(s)) {
350            LOGE("Could not change settings."
351                 " Someone else is using camera %p?", mCamera.get());
352            return -EBUSY;
353        }
354    }
355    return OK;
356}
357
358/*
359 * Check whether the requested video frame size
360 * has been successfully configured or not. If both width and height
361 * are -1, check on the current width and height value setting
362 * is performed.
363 *
364 * @param params CameraParameters to retrieve the information
365 * @param the target video frame width in pixels to check against
366 * @param the target video frame height in pixels to check against
367 * @return OK if no error
368 */
369status_t CameraSource::checkVideoSize(
370        const CameraParameters& params,
371        int32_t width, int32_t height) {
372
373    LOGV("checkVideoSize");
374    // The actual video size is the same as the preview size
375    // if the camera hal does not support separate video and
376    // preview output. In this case, we retrieve the video
377    // size from preview.
378    int32_t frameWidthActual = -1;
379    int32_t frameHeightActual = -1;
380    Vector<Size> sizes;
381    params.getSupportedVideoSizes(sizes);
382    if (sizes.size() == 0) {
383        // video size is the same as preview size
384        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
385    } else {
386        // video size may not be the same as preview
387        params.getVideoSize(&frameWidthActual, &frameHeightActual);
388    }
389    if (frameWidthActual < 0 || frameHeightActual < 0) {
390        LOGE("Failed to retrieve video frame size (%dx%d)",
391                frameWidthActual, frameHeightActual);
392        return UNKNOWN_ERROR;
393    }
394
395    // Check the actual video frame size against the target/requested
396    // video frame size.
397    if (width != -1 && height != -1) {
398        if (frameWidthActual != width || frameHeightActual != height) {
399            LOGE("Failed to set video frame size to %dx%d. "
400                    "The actual video size is %dx%d ", width, height,
401                    frameWidthActual, frameHeightActual);
402            return UNKNOWN_ERROR;
403        }
404    }
405
406    // Good now.
407    mVideoSize.width = frameWidthActual;
408    mVideoSize.height = frameHeightActual;
409    return OK;
410}
411
412/*
413 * Check the requested frame rate has been successfully configured or not.
414 * If the target frameRate is -1, check on the current frame rate value
415 * setting is performed.
416 *
417 * @param params CameraParameters to retrieve the information
418 * @param the target video frame rate to check against
419 * @return OK if no error.
420 */
421status_t CameraSource::checkFrameRate(
422        const CameraParameters& params,
423        int32_t frameRate) {
424
425    LOGV("checkFrameRate");
426    int32_t frameRateActual = params.getPreviewFrameRate();
427    if (frameRateActual < 0) {
428        LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
429        return UNKNOWN_ERROR;
430    }
431
432    // Check the actual video frame rate against the target/requested
433    // video frame rate.
434    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
435        LOGE("Failed to set preview frame rate to %d fps. The actual "
436                "frame rate is %d", frameRate, frameRateActual);
437        return UNKNOWN_ERROR;
438    }
439
440    // Good now.
441    mVideoFrameRate = frameRateActual;
442    return OK;
443}
444
445/*
446 * Initialize the CameraSource to so that it becomes
447 * ready for providing the video input streams as requested.
448 * @param camera the camera object used for the video source
449 * @param cameraId if camera == 0, use camera with this id
450 *      as the video source
451 * @param videoSize the target video frame size. If both
452 *      width and height in videoSize is -1, use the current
453 *      width and heigth settings by the camera
454 * @param frameRate the target frame rate in frames per second.
455 *      if it is -1, use the current camera frame rate setting.
456 * @param storeMetaDataInVideoBuffers request to store meta
457 *      data or real YUV data in video buffers. Request to
458 *      store meta data in video buffers may not be honored
459 *      if the source does not support this feature.
460 *
461 * @return OK if no error.
462 */
463status_t CameraSource::init(
464        const sp<ICamera>& camera,
465        const sp<ICameraRecordingProxy>& proxy,
466        int32_t cameraId,
467        Size videoSize,
468        int32_t frameRate,
469        bool storeMetaDataInVideoBuffers) {
470
471    LOGV("init");
472    status_t err = OK;
473    int64_t token = IPCThreadState::self()->clearCallingIdentity();
474    err = initWithCameraAccess(camera, proxy, cameraId,
475                               videoSize, frameRate,
476                               storeMetaDataInVideoBuffers);
477    IPCThreadState::self()->restoreCallingIdentity(token);
478    return err;
479}
480
481status_t CameraSource::initWithCameraAccess(
482        const sp<ICamera>& camera,
483        const sp<ICameraRecordingProxy>& proxy,
484        int32_t cameraId,
485        Size videoSize,
486        int32_t frameRate,
487        bool storeMetaDataInVideoBuffers) {
488    LOGV("initWithCameraAccess");
489    status_t err = OK;
490
491    if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
492        LOGE("Camera connection could not be established.");
493        return err;
494    }
495    CameraParameters params(mCamera->getParameters());
496    if ((err = isCameraColorFormatSupported(params)) != OK) {
497        return err;
498    }
499
500    // Set the camera to use the requested video frame size
501    // and/or frame rate.
502    if ((err = configureCamera(&params,
503                    videoSize.width, videoSize.height,
504                    frameRate))) {
505        return err;
506    }
507
508    // Check on video frame size and frame rate.
509    CameraParameters newCameraParams(mCamera->getParameters());
510    if ((err = checkVideoSize(newCameraParams,
511                videoSize.width, videoSize.height)) != OK) {
512        return err;
513    }
514    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
515        return err;
516    }
517
518    // This CHECK is good, since we just passed the lock/unlock
519    // check earlier by calling mCamera->setParameters().
520    CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
521
522    // By default, do not store metadata in video buffers
523    mIsMetaDataStoredInVideoBuffers = false;
524    mCamera->storeMetaDataInBuffers(false);
525    if (storeMetaDataInVideoBuffers) {
526        if (OK == mCamera->storeMetaDataInBuffers(true)) {
527            mIsMetaDataStoredInVideoBuffers = true;
528        }
529    }
530
531    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
532    if (glitchDurationUs > mGlitchDurationThresholdUs) {
533        mGlitchDurationThresholdUs = glitchDurationUs;
534    }
535
536    // XXX: query camera for the stride and slice height
537    // when the capability becomes available.
538    mMeta = new MetaData;
539    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
540    mMeta->setInt32(kKeyColorFormat, mColorFormat);
541    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
542    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
543    mMeta->setInt32(kKeyStride,      mVideoSize.width);
544    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
545    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
546    return OK;
547}
548
549CameraSource::~CameraSource() {
550    if (mStarted) {
551        stop();
552    } else if (mInitCheck == OK) {
553        // Camera is initialized but because start() is never called,
554        // the lock on Camera is never released(). This makes sure
555        // Camera's lock is released in this case.
556        releaseCamera();
557    }
558}
559
560void CameraSource::startCameraRecording() {
561    LOGV("startCameraRecording");
562    // Reset the identity to the current thread because media server owns the
563    // camera and recording is started by the applications. The applications
564    // will connect to the camera in ICameraRecordingProxy::startRecording.
565    int64_t token = IPCThreadState::self()->clearCallingIdentity();
566    if (mCameraFlags & FLAGS_HOT_CAMERA) {
567        mCamera->unlock();
568        mCamera.clear();
569        CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this)));
570    } else {
571        mCamera->setListener(new CameraSourceListener(this));
572        mCamera->startRecording();
573        CHECK(mCamera->recordingEnabled());
574    }
575    IPCThreadState::self()->restoreCallingIdentity(token);
576}
577
578status_t CameraSource::start(MetaData *meta) {
579    LOGV("start");
580    CHECK(!mStarted);
581    if (mInitCheck != OK) {
582        LOGE("CameraSource is not initialized yet");
583        return mInitCheck;
584    }
585
586    char value[PROPERTY_VALUE_MAX];
587    if (property_get("media.stagefright.record-stats", value, NULL)
588        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
589        mCollectStats = true;
590    }
591
592    mStartTimeUs = 0;
593    int64_t startTimeUs;
594    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
595        mStartTimeUs = startTimeUs;
596    }
597
598    startCameraRecording();
599
600    mStarted = true;
601    return OK;
602}
603
604void CameraSource::stopCameraRecording() {
605    LOGV("stopCameraRecording");
606    if (mCameraFlags & FLAGS_HOT_CAMERA) {
607        mCameraRecordingProxy->stopRecording();
608    } else {
609        mCamera->setListener(NULL);
610        mCamera->stopRecording();
611    }
612}
613
614void CameraSource::releaseCamera() {
615    LOGV("releaseCamera");
616    if (mCamera != 0) {
617        int64_t token = IPCThreadState::self()->clearCallingIdentity();
618        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
619            LOGV("Camera was cold when we started, stopping preview");
620            mCamera->stopPreview();
621            mCamera->disconnect();
622        }
623        mCamera->unlock();
624        mCamera.clear();
625        mCamera = 0;
626        IPCThreadState::self()->restoreCallingIdentity(token);
627    }
628    if (mCameraRecordingProxy != 0) {
629        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
630        mCameraRecordingProxy.clear();
631    }
632    mCameraFlags = 0;
633}
634
635status_t CameraSource::stop() {
636    LOGD("stop: E");
637    Mutex::Autolock autoLock(mLock);
638    mStarted = false;
639    mFrameAvailableCondition.signal();
640
641    int64_t token;
642    bool isTokenValid = false;
643    if (mCamera != 0) {
644        token = IPCThreadState::self()->clearCallingIdentity();
645        isTokenValid = true;
646    }
647    releaseQueuedFrames();
648    while (!mFramesBeingEncoded.empty()) {
649        if (NO_ERROR !=
650            mFrameCompleteCondition.waitRelative(mLock,
651                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
652            LOGW("Timed out waiting for outstanding frames being encoded: %d",
653                mFramesBeingEncoded.size());
654        }
655    }
656    stopCameraRecording();
657    releaseCamera();
658    if (isTokenValid) {
659        IPCThreadState::self()->restoreCallingIdentity(token);
660    }
661
662    if (mCollectStats) {
663        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
664                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
665                mLastFrameTimestampUs - mFirstFrameTimeUs);
666    }
667
668    if (mNumGlitches > 0) {
669        LOGW("%d long delays between neighboring video frames", mNumGlitches);
670    }
671
672    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
673    LOGD("stop: X");
674    return OK;
675}
676
677void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
678    LOGV("releaseRecordingFrame");
679    if (mCameraRecordingProxy != NULL) {
680        mCameraRecordingProxy->releaseRecordingFrame(frame);
681    } else if (mCamera != NULL) {
682        int64_t token = IPCThreadState::self()->clearCallingIdentity();
683        mCamera->releaseRecordingFrame(frame);
684        IPCThreadState::self()->restoreCallingIdentity(token);
685    }
686}
687
688void CameraSource::releaseQueuedFrames() {
689    List<sp<IMemory> >::iterator it;
690    while (!mFramesReceived.empty()) {
691        it = mFramesReceived.begin();
692        releaseRecordingFrame(*it);
693        mFramesReceived.erase(it);
694        ++mNumFramesDropped;
695    }
696}
697
698sp<MetaData> CameraSource::getFormat() {
699    return mMeta;
700}
701
702void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
703    releaseRecordingFrame(frame);
704}
705
706void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
707    LOGV("signalBufferReturned: %p", buffer->data());
708    Mutex::Autolock autoLock(mLock);
709    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
710         it != mFramesBeingEncoded.end(); ++it) {
711        if ((*it)->pointer() ==  buffer->data()) {
712            releaseOneRecordingFrame((*it));
713            mFramesBeingEncoded.erase(it);
714            ++mNumFramesEncoded;
715            buffer->setObserver(0);
716            buffer->release();
717            mFrameCompleteCondition.signal();
718            return;
719        }
720    }
721    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
722}
723
724status_t CameraSource::read(
725        MediaBuffer **buffer, const ReadOptions *options) {
726    LOGV("read");
727
728    *buffer = NULL;
729
730    int64_t seekTimeUs;
731    ReadOptions::SeekMode mode;
732    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
733        return ERROR_UNSUPPORTED;
734    }
735
736    sp<IMemory> frame;
737    int64_t frameTime;
738
739    {
740        Mutex::Autolock autoLock(mLock);
741        while (mStarted && mFramesReceived.empty()) {
742            if (NO_ERROR !=
743                mFrameAvailableCondition.waitRelative(mLock,
744                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
745                if (mCameraRecordingProxy != 0 &&
746                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
747                    LOGW("camera recording proxy is gone");
748                    return ERROR_END_OF_STREAM;
749                }
750                LOGW("Timed out waiting for incoming camera video frames: %lld us",
751                    mLastFrameTimestampUs);
752            }
753        }
754        if (!mStarted) {
755            return OK;
756        }
757        frame = *mFramesReceived.begin();
758        mFramesReceived.erase(mFramesReceived.begin());
759
760        frameTime = *mFrameTimes.begin();
761        mFrameTimes.erase(mFrameTimes.begin());
762        mFramesBeingEncoded.push_back(frame);
763        *buffer = new MediaBuffer(frame->pointer(), frame->size());
764        (*buffer)->setObserver(this);
765        (*buffer)->add_ref();
766        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
767    }
768    return OK;
769}
770
771void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
772        int32_t msgType, const sp<IMemory> &data) {
773    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
774    Mutex::Autolock autoLock(mLock);
775    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
776        LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
777        releaseOneRecordingFrame(data);
778        return;
779    }
780
781    if (mNumFramesReceived > 0) {
782        CHECK(timestampUs > mLastFrameTimestampUs);
783        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
784            ++mNumGlitches;
785        }
786    }
787
788    // May need to skip frame or modify timestamp. Currently implemented
789    // by the subclass CameraSourceTimeLapse.
790    if (skipCurrentFrame(timestampUs)) {
791        releaseOneRecordingFrame(data);
792        return;
793    }
794
795    mLastFrameTimestampUs = timestampUs;
796    if (mNumFramesReceived == 0) {
797        mFirstFrameTimeUs = timestampUs;
798        // Initial delay
799        if (mStartTimeUs > 0) {
800            if (timestampUs < mStartTimeUs) {
801                // Frame was captured before recording was started
802                // Drop it without updating the statistical data.
803                releaseOneRecordingFrame(data);
804                return;
805            }
806            mStartTimeUs = timestampUs - mStartTimeUs;
807        }
808    }
809    ++mNumFramesReceived;
810
811    CHECK(data != NULL && data->size() > 0);
812    mFramesReceived.push_back(data);
813    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
814    mFrameTimes.push_back(timeUs);
815    LOGV("initial delay: %lld, current time stamp: %lld",
816        mStartTimeUs, timeUs);
817    mFrameAvailableCondition.signal();
818}
819
820bool CameraSource::isMetaDataStoredInVideoBuffers() const {
821    LOGV("isMetaDataStoredInVideoBuffers");
822    return mIsMetaDataStoredInVideoBuffers;
823}
824
825CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
826    mSource = source;
827}
828
829void CameraSource::ProxyListener::dataCallbackTimestamp(
830        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
831    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
832}
833
834void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
835    LOGI("Camera recording proxy died");
836}
837
838}  // namespace android
839