CameraSource.cpp revision 84333e0475bc911adc16417f4ca327c975cf6c36
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/CameraSource.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <gui/Surface.h>
31#include <utils/String8.h>
32#include <cutils/properties.h>
33
34#if LOG_NDEBUG
35#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
36#else
37#define UNUSED_UNLESS_VERBOSE(x)
38#endif
39
40namespace android {
41
42static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
43
44struct CameraSourceListener : public CameraListener {
45    CameraSourceListener(const sp<CameraSource> &source);
46
47    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
48    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
49                          camera_frame_metadata_t *metadata);
50
51    virtual void postDataTimestamp(
52            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
53
54protected:
55    virtual ~CameraSourceListener();
56
57private:
58    wp<CameraSource> mSource;
59
60    CameraSourceListener(const CameraSourceListener &);
61    CameraSourceListener &operator=(const CameraSourceListener &);
62};
63
64CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
65    : mSource(source) {
66}
67
68CameraSourceListener::~CameraSourceListener() {
69}
70
71void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
72    UNUSED_UNLESS_VERBOSE(msgType);
73    UNUSED_UNLESS_VERBOSE(ext1);
74    UNUSED_UNLESS_VERBOSE(ext2);
75    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
76}
77
78void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
79                                    camera_frame_metadata_t * /* metadata */) {
80    ALOGV("postData(%d, ptr:%p, size:%d)",
81         msgType, dataPtr->pointer(), dataPtr->size());
82
83    sp<CameraSource> source = mSource.promote();
84    if (source.get() != NULL) {
85        source->dataCallback(msgType, dataPtr);
86    }
87}
88
89void CameraSourceListener::postDataTimestamp(
90        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
91
92    sp<CameraSource> source = mSource.promote();
93    if (source.get() != NULL) {
94        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
95    }
96}
97
98static int32_t getColorFormat(const char* colorFormat) {
99    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
100       return OMX_COLOR_FormatYUV420Planar;
101    }
102
103    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
104       return OMX_COLOR_FormatYUV422SemiPlanar;
105    }
106
107    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
108        return OMX_COLOR_FormatYUV420SemiPlanar;
109    }
110
111    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
112        return OMX_COLOR_FormatYCbYCr;
113    }
114
115    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
116       return OMX_COLOR_Format16bitRGB565;
117    }
118
119    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
120       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
121    }
122
123    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
124        return OMX_COLOR_FormatAndroidOpaque;
125    }
126
127    ALOGE("Uknown color format (%s), please add it to "
128         "CameraSource::getColorFormat", colorFormat);
129
130    CHECK(!"Unknown color format");
131}
132
133CameraSource *CameraSource::Create(const String16 &clientName) {
134    Size size;
135    size.width = -1;
136    size.height = -1;
137
138    sp<ICamera> camera;
139    return new CameraSource(camera, NULL, 0, clientName, -1,
140            size, -1, NULL, false);
141}
142
143// static
144CameraSource *CameraSource::CreateFromCamera(
145    const sp<ICamera>& camera,
146    const sp<ICameraRecordingProxy>& proxy,
147    int32_t cameraId,
148    const String16& clientName,
149    uid_t clientUid,
150    Size videoSize,
151    int32_t frameRate,
152    const sp<IGraphicBufferProducer>& surface,
153    bool storeMetaDataInVideoBuffers) {
154
155    CameraSource *source = new CameraSource(camera, proxy, cameraId,
156            clientName, clientUid, videoSize, frameRate, surface,
157            storeMetaDataInVideoBuffers);
158    return source;
159}
160
161CameraSource::CameraSource(
162    const sp<ICamera>& camera,
163    const sp<ICameraRecordingProxy>& proxy,
164    int32_t cameraId,
165    const String16& clientName,
166    uid_t clientUid,
167    Size videoSize,
168    int32_t frameRate,
169    const sp<IGraphicBufferProducer>& surface,
170    bool storeMetaDataInVideoBuffers)
171    : mCameraFlags(0),
172      mNumInputBuffers(0),
173      mVideoFrameRate(-1),
174      mCamera(0),
175      mSurface(surface),
176      mNumFramesReceived(0),
177      mLastFrameTimestampUs(0),
178      mStarted(false),
179      mNumFramesEncoded(0),
180      mTimeBetweenFrameCaptureUs(0),
181      mFirstFrameTimeUs(0),
182      mNumFramesDropped(0),
183      mNumGlitches(0),
184      mGlitchDurationThresholdUs(200000),
185      mCollectStats(false) {
186    mVideoSize.width  = -1;
187    mVideoSize.height = -1;
188
189    mInitCheck = init(camera, proxy, cameraId,
190                    clientName, clientUid,
191                    videoSize, frameRate,
192                    storeMetaDataInVideoBuffers);
193    if (mInitCheck != OK) releaseCamera();
194}
195
196status_t CameraSource::initCheck() const {
197    return mInitCheck;
198}
199
200status_t CameraSource::isCameraAvailable(
201    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
202    int32_t cameraId, const String16& clientName, uid_t clientUid) {
203
204    if (camera == 0) {
205        mCamera = Camera::connect(cameraId, clientName, clientUid);
206        if (mCamera == 0) return -EBUSY;
207        mCameraFlags &= ~FLAGS_HOT_CAMERA;
208    } else {
209        // We get the proxy from Camera, not ICamera. We need to get the proxy
210        // to the remote Camera owned by the application. Here mCamera is a
211        // local Camera object created by us. We cannot use the proxy from
212        // mCamera here.
213        mCamera = Camera::create(camera);
214        if (mCamera == 0) return -EBUSY;
215        mCameraRecordingProxy = proxy;
216        mCameraFlags |= FLAGS_HOT_CAMERA;
217        mDeathNotifier = new DeathNotifier();
218        // isBinderAlive needs linkToDeath to work.
219        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
220    }
221
222    mCamera->lock();
223
224    return OK;
225}
226
227
228/*
229 * Check to see whether the requested video width and height is one
230 * of the supported sizes.
231 * @param width the video frame width in pixels
232 * @param height the video frame height in pixels
233 * @param suppportedSizes the vector of sizes that we check against
234 * @return true if the dimension (width and height) is supported.
235 */
236static bool isVideoSizeSupported(
237    int32_t width, int32_t height,
238    const Vector<Size>& supportedSizes) {
239
240    ALOGV("isVideoSizeSupported");
241    for (size_t i = 0; i < supportedSizes.size(); ++i) {
242        if (width  == supportedSizes[i].width &&
243            height == supportedSizes[i].height) {
244            return true;
245        }
246    }
247    return false;
248}
249
250/*
251 * If the preview and video output is separate, we only set the
252 * the video size, and applications should set the preview size
253 * to some proper value, and the recording framework will not
254 * change the preview size; otherwise, if the video and preview
255 * output is the same, we need to set the preview to be the same
256 * as the requested video size.
257 *
258 */
259/*
260 * Query the camera to retrieve the supported video frame sizes
261 * and also to see whether CameraParameters::setVideoSize()
262 * is supported or not.
263 * @param params CameraParameters to retrieve the information
264 * @@param isSetVideoSizeSupported retunrs whether method
265 *      CameraParameters::setVideoSize() is supported or not.
266 * @param sizes returns the vector of Size objects for the
267 *      supported video frame sizes advertised by the camera.
268 */
269static void getSupportedVideoSizes(
270    const CameraParameters& params,
271    bool *isSetVideoSizeSupported,
272    Vector<Size>& sizes) {
273
274    *isSetVideoSizeSupported = true;
275    params.getSupportedVideoSizes(sizes);
276    if (sizes.size() == 0) {
277        ALOGD("Camera does not support setVideoSize()");
278        params.getSupportedPreviewSizes(sizes);
279        *isSetVideoSizeSupported = false;
280    }
281}
282
283/*
284 * Check whether the camera has the supported color format
285 * @param params CameraParameters to retrieve the information
286 * @return OK if no error.
287 */
288status_t CameraSource::isCameraColorFormatSupported(
289        const CameraParameters& params) {
290    mColorFormat = getColorFormat(params.get(
291            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
292    if (mColorFormat == -1) {
293        return BAD_VALUE;
294    }
295    return OK;
296}
297
298/*
299 * Configure the camera to use the requested video size
300 * (width and height) and/or frame rate. If both width and
301 * height are -1, configuration on the video size is skipped.
302 * if frameRate is -1, configuration on the frame rate
303 * is skipped. Skipping the configuration allows one to
304 * use the current camera setting without the need to
305 * actually know the specific values (see Create() method).
306 *
307 * @param params the CameraParameters to be configured
308 * @param width the target video frame width in pixels
309 * @param height the target video frame height in pixels
310 * @param frameRate the target frame rate in frames per second.
311 * @return OK if no error.
312 */
313status_t CameraSource::configureCamera(
314        CameraParameters* params,
315        int32_t width, int32_t height,
316        int32_t frameRate) {
317    ALOGV("configureCamera");
318    Vector<Size> sizes;
319    bool isSetVideoSizeSupportedByCamera = true;
320    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
321    bool isCameraParamChanged = false;
322    if (width != -1 && height != -1) {
323        if (!isVideoSizeSupported(width, height, sizes)) {
324            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
325            return BAD_VALUE;
326        }
327        if (isSetVideoSizeSupportedByCamera) {
328            params->setVideoSize(width, height);
329        } else {
330            params->setPreviewSize(width, height);
331        }
332        isCameraParamChanged = true;
333    } else if ((width == -1 && height != -1) ||
334               (width != -1 && height == -1)) {
335        // If one and only one of the width and height is -1
336        // we reject such a request.
337        ALOGE("Requested video size (%dx%d) is not supported", width, height);
338        return BAD_VALUE;
339    } else {  // width == -1 && height == -1
340        // Do not configure the camera.
341        // Use the current width and height value setting from the camera.
342    }
343
344    if (frameRate != -1) {
345        CHECK(frameRate > 0 && frameRate <= 120);
346        const char* supportedFrameRates =
347                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
348        CHECK(supportedFrameRates != NULL);
349        ALOGV("Supported frame rates: %s", supportedFrameRates);
350        char buf[4];
351        snprintf(buf, 4, "%d", frameRate);
352        if (strstr(supportedFrameRates, buf) == NULL) {
353            ALOGE("Requested frame rate (%d) is not supported: %s",
354                frameRate, supportedFrameRates);
355            return BAD_VALUE;
356        }
357
358        // The frame rate is supported, set the camera to the requested value.
359        params->setPreviewFrameRate(frameRate);
360        isCameraParamChanged = true;
361    } else {  // frameRate == -1
362        // Do not configure the camera.
363        // Use the current frame rate value setting from the camera
364    }
365
366    if (isCameraParamChanged) {
367        // Either frame rate or frame size needs to be changed.
368        String8 s = params->flatten();
369        if (OK != mCamera->setParameters(s)) {
370            ALOGE("Could not change settings."
371                 " Someone else is using camera %p?", mCamera.get());
372            return -EBUSY;
373        }
374    }
375    return OK;
376}
377
378/*
379 * Check whether the requested video frame size
380 * has been successfully configured or not. If both width and height
381 * are -1, check on the current width and height value setting
382 * is performed.
383 *
384 * @param params CameraParameters to retrieve the information
385 * @param the target video frame width in pixels to check against
386 * @param the target video frame height in pixels to check against
387 * @return OK if no error
388 */
389status_t CameraSource::checkVideoSize(
390        const CameraParameters& params,
391        int32_t width, int32_t height) {
392
393    ALOGV("checkVideoSize");
394    // The actual video size is the same as the preview size
395    // if the camera hal does not support separate video and
396    // preview output. In this case, we retrieve the video
397    // size from preview.
398    int32_t frameWidthActual = -1;
399    int32_t frameHeightActual = -1;
400    Vector<Size> sizes;
401    params.getSupportedVideoSizes(sizes);
402    if (sizes.size() == 0) {
403        // video size is the same as preview size
404        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
405    } else {
406        // video size may not be the same as preview
407        params.getVideoSize(&frameWidthActual, &frameHeightActual);
408    }
409    if (frameWidthActual < 0 || frameHeightActual < 0) {
410        ALOGE("Failed to retrieve video frame size (%dx%d)",
411                frameWidthActual, frameHeightActual);
412        return UNKNOWN_ERROR;
413    }
414
415    // Check the actual video frame size against the target/requested
416    // video frame size.
417    if (width != -1 && height != -1) {
418        if (frameWidthActual != width || frameHeightActual != height) {
419            ALOGE("Failed to set video frame size to %dx%d. "
420                    "The actual video size is %dx%d ", width, height,
421                    frameWidthActual, frameHeightActual);
422            return UNKNOWN_ERROR;
423        }
424    }
425
426    // Good now.
427    mVideoSize.width = frameWidthActual;
428    mVideoSize.height = frameHeightActual;
429    return OK;
430}
431
432/*
433 * Check the requested frame rate has been successfully configured or not.
434 * If the target frameRate is -1, check on the current frame rate value
435 * setting is performed.
436 *
437 * @param params CameraParameters to retrieve the information
438 * @param the target video frame rate to check against
439 * @return OK if no error.
440 */
441status_t CameraSource::checkFrameRate(
442        const CameraParameters& params,
443        int32_t frameRate) {
444
445    ALOGV("checkFrameRate");
446    int32_t frameRateActual = params.getPreviewFrameRate();
447    if (frameRateActual < 0) {
448        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
449        return UNKNOWN_ERROR;
450    }
451
452    // Check the actual video frame rate against the target/requested
453    // video frame rate.
454    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
455        ALOGE("Failed to set preview frame rate to %d fps. The actual "
456                "frame rate is %d", frameRate, frameRateActual);
457        return UNKNOWN_ERROR;
458    }
459
460    // Good now.
461    mVideoFrameRate = frameRateActual;
462    return OK;
463}
464
465/*
466 * Initialize the CameraSource to so that it becomes
467 * ready for providing the video input streams as requested.
468 * @param camera the camera object used for the video source
469 * @param cameraId if camera == 0, use camera with this id
470 *      as the video source
471 * @param videoSize the target video frame size. If both
472 *      width and height in videoSize is -1, use the current
473 *      width and heigth settings by the camera
474 * @param frameRate the target frame rate in frames per second.
475 *      if it is -1, use the current camera frame rate setting.
476 * @param storeMetaDataInVideoBuffers request to store meta
477 *      data or real YUV data in video buffers. Request to
478 *      store meta data in video buffers may not be honored
479 *      if the source does not support this feature.
480 *
481 * @return OK if no error.
482 */
483status_t CameraSource::init(
484        const sp<ICamera>& camera,
485        const sp<ICameraRecordingProxy>& proxy,
486        int32_t cameraId,
487        const String16& clientName,
488        uid_t clientUid,
489        Size videoSize,
490        int32_t frameRate,
491        bool storeMetaDataInVideoBuffers) {
492
493    ALOGV("init");
494    status_t err = OK;
495    int64_t token = IPCThreadState::self()->clearCallingIdentity();
496    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
497                               videoSize, frameRate,
498                               storeMetaDataInVideoBuffers);
499    IPCThreadState::self()->restoreCallingIdentity(token);
500    return err;
501}
502
503status_t CameraSource::initWithCameraAccess(
504        const sp<ICamera>& camera,
505        const sp<ICameraRecordingProxy>& proxy,
506        int32_t cameraId,
507        const String16& clientName,
508        uid_t clientUid,
509        Size videoSize,
510        int32_t frameRate,
511        bool storeMetaDataInVideoBuffers) {
512    ALOGV("initWithCameraAccess");
513    status_t err = OK;
514
515    if ((err = isCameraAvailable(camera, proxy, cameraId,
516            clientName, clientUid)) != OK) {
517        ALOGE("Camera connection could not be established.");
518        return err;
519    }
520    CameraParameters params(mCamera->getParameters());
521    if ((err = isCameraColorFormatSupported(params)) != OK) {
522        return err;
523    }
524
525    // Set the camera to use the requested video frame size
526    // and/or frame rate.
527    if ((err = configureCamera(&params,
528                    videoSize.width, videoSize.height,
529                    frameRate))) {
530        return err;
531    }
532
533    // Check on video frame size and frame rate.
534    CameraParameters newCameraParams(mCamera->getParameters());
535    if ((err = checkVideoSize(newCameraParams,
536                videoSize.width, videoSize.height)) != OK) {
537        return err;
538    }
539    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
540        return err;
541    }
542
543    // Set the preview display. Skip this if mSurface is null because
544    // applications may already set a surface to the camera.
545    if (mSurface != NULL) {
546        // This CHECK is good, since we just passed the lock/unlock
547        // check earlier by calling mCamera->setParameters().
548        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
549    }
550
551    // By default, do not store metadata in video buffers
552    mIsMetaDataStoredInVideoBuffers = false;
553    mCamera->storeMetaDataInBuffers(false);
554    if (storeMetaDataInVideoBuffers) {
555        if (OK == mCamera->storeMetaDataInBuffers(true)) {
556            mIsMetaDataStoredInVideoBuffers = true;
557        }
558    }
559
560    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
561    if (glitchDurationUs > mGlitchDurationThresholdUs) {
562        mGlitchDurationThresholdUs = glitchDurationUs;
563    }
564
565    // XXX: query camera for the stride and slice height
566    // when the capability becomes available.
567    mMeta = new MetaData;
568    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
569    mMeta->setInt32(kKeyColorFormat, mColorFormat);
570    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
571    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
572    mMeta->setInt32(kKeyStride,      mVideoSize.width);
573    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
574    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
575    return OK;
576}
577
578CameraSource::~CameraSource() {
579    if (mStarted) {
580        reset();
581    } else if (mInitCheck == OK) {
582        // Camera is initialized but because start() is never called,
583        // the lock on Camera is never released(). This makes sure
584        // Camera's lock is released in this case.
585        releaseCamera();
586    }
587}
588
589void CameraSource::startCameraRecording() {
590    ALOGV("startCameraRecording");
591    // Reset the identity to the current thread because media server owns the
592    // camera and recording is started by the applications. The applications
593    // will connect to the camera in ICameraRecordingProxy::startRecording.
594    int64_t token = IPCThreadState::self()->clearCallingIdentity();
595    if (mNumInputBuffers > 0) {
596        status_t err = mCamera->sendCommand(
597            CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
598
599        // This could happen for CameraHAL1 clients; thus the failure is
600        // not a fatal error
601        if (err != OK) {
602            ALOGW("Failed to set video buffer count to %d due to %d",
603                mNumInputBuffers, err);
604        }
605    }
606
607    if (mCameraFlags & FLAGS_HOT_CAMERA) {
608        mCamera->unlock();
609        mCamera.clear();
610        CHECK_EQ((status_t)OK,
611            mCameraRecordingProxy->startRecording(new ProxyListener(this)));
612    } else {
613        mCamera->setListener(new CameraSourceListener(this));
614        mCamera->startRecording();
615        CHECK(mCamera->recordingEnabled());
616    }
617    IPCThreadState::self()->restoreCallingIdentity(token);
618}
619
620status_t CameraSource::start(MetaData *meta) {
621    ALOGV("start");
622    CHECK(!mStarted);
623    if (mInitCheck != OK) {
624        ALOGE("CameraSource is not initialized yet");
625        return mInitCheck;
626    }
627
628    char value[PROPERTY_VALUE_MAX];
629    if (property_get("media.stagefright.record-stats", value, NULL)
630        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
631        mCollectStats = true;
632    }
633
634    mStartTimeUs = 0;
635    mNumInputBuffers = 0;
636    if (meta) {
637        int64_t startTimeUs;
638        if (meta->findInt64(kKeyTime, &startTimeUs)) {
639            mStartTimeUs = startTimeUs;
640        }
641
642        int32_t nBuffers;
643        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
644            CHECK_GT(nBuffers, 0);
645            mNumInputBuffers = nBuffers;
646        }
647    }
648
649    startCameraRecording();
650
651    mStarted = true;
652    return OK;
653}
654
655void CameraSource::stopCameraRecording() {
656    ALOGV("stopCameraRecording");
657    if (mCameraFlags & FLAGS_HOT_CAMERA) {
658        mCameraRecordingProxy->stopRecording();
659    } else {
660        mCamera->setListener(NULL);
661        mCamera->stopRecording();
662    }
663}
664
665void CameraSource::releaseCamera() {
666    ALOGV("releaseCamera");
667    if (mCamera != 0) {
668        int64_t token = IPCThreadState::self()->clearCallingIdentity();
669        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
670            ALOGV("Camera was cold when we started, stopping preview");
671            mCamera->stopPreview();
672            mCamera->disconnect();
673        }
674        mCamera->unlock();
675        mCamera.clear();
676        mCamera = 0;
677        IPCThreadState::self()->restoreCallingIdentity(token);
678    }
679    if (mCameraRecordingProxy != 0) {
680        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
681        mCameraRecordingProxy.clear();
682    }
683    mCameraFlags = 0;
684}
685
686status_t CameraSource::reset() {
687    ALOGD("reset: E");
688    Mutex::Autolock autoLock(mLock);
689    mStarted = false;
690    mFrameAvailableCondition.signal();
691
692    int64_t token;
693    bool isTokenValid = false;
694    if (mCamera != 0) {
695        token = IPCThreadState::self()->clearCallingIdentity();
696        isTokenValid = true;
697    }
698    releaseQueuedFrames();
699    while (!mFramesBeingEncoded.empty()) {
700        if (NO_ERROR !=
701            mFrameCompleteCondition.waitRelative(mLock,
702                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
703            ALOGW("Timed out waiting for outstanding frames being encoded: %d",
704                mFramesBeingEncoded.size());
705        }
706    }
707    stopCameraRecording();
708    releaseCamera();
709    if (isTokenValid) {
710        IPCThreadState::self()->restoreCallingIdentity(token);
711    }
712
713    if (mCollectStats) {
714        ALOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
715                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
716                mLastFrameTimestampUs - mFirstFrameTimeUs);
717    }
718
719    if (mNumGlitches > 0) {
720        ALOGW("%d long delays between neighboring video frames", mNumGlitches);
721    }
722
723    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
724    ALOGD("reset: X");
725    return OK;
726}
727
728void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
729    ALOGV("releaseRecordingFrame");
730    if (mCameraRecordingProxy != NULL) {
731        mCameraRecordingProxy->releaseRecordingFrame(frame);
732    } else if (mCamera != NULL) {
733        int64_t token = IPCThreadState::self()->clearCallingIdentity();
734        mCamera->releaseRecordingFrame(frame);
735        IPCThreadState::self()->restoreCallingIdentity(token);
736    }
737}
738
739void CameraSource::releaseQueuedFrames() {
740    List<sp<IMemory> >::iterator it;
741    while (!mFramesReceived.empty()) {
742        it = mFramesReceived.begin();
743        releaseRecordingFrame(*it);
744        mFramesReceived.erase(it);
745        ++mNumFramesDropped;
746    }
747}
748
749sp<MetaData> CameraSource::getFormat() {
750    return mMeta;
751}
752
753void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
754    releaseRecordingFrame(frame);
755}
756
757void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
758    ALOGV("signalBufferReturned: %p", buffer->data());
759    Mutex::Autolock autoLock(mLock);
760    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
761         it != mFramesBeingEncoded.end(); ++it) {
762        if ((*it)->pointer() ==  buffer->data()) {
763            releaseOneRecordingFrame((*it));
764            mFramesBeingEncoded.erase(it);
765            ++mNumFramesEncoded;
766            buffer->setObserver(0);
767            buffer->release();
768            mFrameCompleteCondition.signal();
769            return;
770        }
771    }
772    CHECK(!"signalBufferReturned: bogus buffer");
773}
774
775status_t CameraSource::read(
776        MediaBuffer **buffer, const ReadOptions *options) {
777    ALOGV("read");
778
779    *buffer = NULL;
780
781    int64_t seekTimeUs;
782    ReadOptions::SeekMode mode;
783    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
784        return ERROR_UNSUPPORTED;
785    }
786
787    sp<IMemory> frame;
788    int64_t frameTime;
789
790    {
791        Mutex::Autolock autoLock(mLock);
792        while (mStarted && mFramesReceived.empty()) {
793            if (NO_ERROR !=
794                mFrameAvailableCondition.waitRelative(mLock,
795                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
796                if (mCameraRecordingProxy != 0 &&
797                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
798                    ALOGW("camera recording proxy is gone");
799                    return ERROR_END_OF_STREAM;
800                }
801                ALOGW("Timed out waiting for incoming camera video frames: %lld us",
802                    mLastFrameTimestampUs);
803            }
804        }
805        if (!mStarted) {
806            return OK;
807        }
808        frame = *mFramesReceived.begin();
809        mFramesReceived.erase(mFramesReceived.begin());
810
811        frameTime = *mFrameTimes.begin();
812        mFrameTimes.erase(mFrameTimes.begin());
813        mFramesBeingEncoded.push_back(frame);
814        *buffer = new MediaBuffer(frame->pointer(), frame->size());
815        (*buffer)->setObserver(this);
816        (*buffer)->add_ref();
817        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
818    }
819    return OK;
820}
821
822void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
823        int32_t msgType, const sp<IMemory> &data) {
824    ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
825    Mutex::Autolock autoLock(mLock);
826    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
827        ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
828        releaseOneRecordingFrame(data);
829        return;
830    }
831
832    if (mNumFramesReceived > 0) {
833        CHECK(timestampUs > mLastFrameTimestampUs);
834        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
835            ++mNumGlitches;
836        }
837    }
838
839    // May need to skip frame or modify timestamp. Currently implemented
840    // by the subclass CameraSourceTimeLapse.
841    if (skipCurrentFrame(timestampUs)) {
842        releaseOneRecordingFrame(data);
843        return;
844    }
845
846    mLastFrameTimestampUs = timestampUs;
847    if (mNumFramesReceived == 0) {
848        mFirstFrameTimeUs = timestampUs;
849        // Initial delay
850        if (mStartTimeUs > 0) {
851            if (timestampUs < mStartTimeUs) {
852                // Frame was captured before recording was started
853                // Drop it without updating the statistical data.
854                releaseOneRecordingFrame(data);
855                return;
856            }
857            mStartTimeUs = timestampUs - mStartTimeUs;
858        }
859    }
860    ++mNumFramesReceived;
861
862    CHECK(data != NULL && data->size() > 0);
863    mFramesReceived.push_back(data);
864    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
865    mFrameTimes.push_back(timeUs);
866    ALOGV("initial delay: %lld, current time stamp: %lld",
867        mStartTimeUs, timeUs);
868    mFrameAvailableCondition.signal();
869}
870
871bool CameraSource::isMetaDataStoredInVideoBuffers() const {
872    ALOGV("isMetaDataStoredInVideoBuffers");
873    return mIsMetaDataStoredInVideoBuffers;
874}
875
876CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
877    mSource = source;
878}
879
880void CameraSource::ProxyListener::dataCallbackTimestamp(
881        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
882    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
883}
884
885void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
886    ALOGI("Camera recording proxy died");
887}
888
889}  // namespace android
890