CameraSource.cpp revision dba83c1cb1bef03bc5d1760c2639d06ff71c0fa7
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <media/stagefright/foundation/ADebug.h>
26#include <media/stagefright/CameraSource.h>
27#include <media/stagefright/MediaDefs.h>
28#include <media/stagefright/MediaErrors.h>
29#include <media/stagefright/MetaData.h>
30#include <camera/Camera.h>
31#include <camera/CameraParameters.h>
32#include <gui/Surface.h>
33#include <utils/String8.h>
34#include <cutils/properties.h>
35
36#if LOG_NDEBUG
37#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
38#else
39#define UNUSED_UNLESS_VERBOSE(x)
40#endif
41
42namespace android {
43
44static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
45
46struct CameraSourceListener : public CameraListener {
47    CameraSourceListener(const sp<CameraSource> &source);
48
49    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
50    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
51                          camera_frame_metadata_t *metadata);
52
53    virtual void postDataTimestamp(
54            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
55
56protected:
57    virtual ~CameraSourceListener();
58
59private:
60    wp<CameraSource> mSource;
61
62    CameraSourceListener(const CameraSourceListener &);
63    CameraSourceListener &operator=(const CameraSourceListener &);
64};
65
66CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
67    : mSource(source) {
68}
69
70CameraSourceListener::~CameraSourceListener() {
71}
72
73void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
74    UNUSED_UNLESS_VERBOSE(msgType);
75    UNUSED_UNLESS_VERBOSE(ext1);
76    UNUSED_UNLESS_VERBOSE(ext2);
77    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
78}
79
80void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
81                                    camera_frame_metadata_t * /* metadata */) {
82    ALOGV("postData(%d, ptr:%p, size:%zu)",
83         msgType, dataPtr->pointer(), dataPtr->size());
84
85    sp<CameraSource> source = mSource.promote();
86    if (source.get() != NULL) {
87        source->dataCallback(msgType, dataPtr);
88    }
89}
90
91void CameraSourceListener::postDataTimestamp(
92        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
93
94    sp<CameraSource> source = mSource.promote();
95    if (source.get() != NULL) {
96        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
97    }
98}
99
100static int32_t getColorFormat(const char* colorFormat) {
101    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
102       return OMX_COLOR_FormatYUV420Planar;
103    }
104
105    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
106       return OMX_COLOR_FormatYUV422SemiPlanar;
107    }
108
109    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
110        return OMX_COLOR_FormatYUV420SemiPlanar;
111    }
112
113    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
114        return OMX_COLOR_FormatYCbYCr;
115    }
116
117    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
118       return OMX_COLOR_Format16bitRGB565;
119    }
120
121    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
122       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
123    }
124
125    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
126        return OMX_COLOR_FormatAndroidOpaque;
127    }
128
129    ALOGE("Uknown color format (%s), please add it to "
130         "CameraSource::getColorFormat", colorFormat);
131
132    CHECK(!"Unknown color format");
133    return -1;
134}
135
136CameraSource *CameraSource::Create(const String16 &clientName) {
137    Size size;
138    size.width = -1;
139    size.height = -1;
140
141    sp<ICamera> camera;
142    return new CameraSource(camera, NULL, 0, clientName, -1,
143            size, -1, NULL, false);
144}
145
146// static
147CameraSource *CameraSource::CreateFromCamera(
148    const sp<ICamera>& camera,
149    const sp<ICameraRecordingProxy>& proxy,
150    int32_t cameraId,
151    const String16& clientName,
152    uid_t clientUid,
153    Size videoSize,
154    int32_t frameRate,
155    const sp<IGraphicBufferProducer>& surface,
156    bool storeMetaDataInVideoBuffers) {
157
158    CameraSource *source = new CameraSource(camera, proxy, cameraId,
159            clientName, clientUid, videoSize, frameRate, surface,
160            storeMetaDataInVideoBuffers);
161    return source;
162}
163
164CameraSource::CameraSource(
165    const sp<ICamera>& camera,
166    const sp<ICameraRecordingProxy>& proxy,
167    int32_t cameraId,
168    const String16& clientName,
169    uid_t clientUid,
170    Size videoSize,
171    int32_t frameRate,
172    const sp<IGraphicBufferProducer>& surface,
173    bool storeMetaDataInVideoBuffers)
174    : mCameraFlags(0),
175      mNumInputBuffers(0),
176      mVideoFrameRate(-1),
177      mCamera(0),
178      mSurface(surface),
179      mNumFramesReceived(0),
180      mLastFrameTimestampUs(0),
181      mStarted(false),
182      mNumFramesEncoded(0),
183      mTimeBetweenFrameCaptureUs(0),
184      mFirstFrameTimeUs(0),
185      mNumFramesDropped(0),
186      mNumGlitches(0),
187      mGlitchDurationThresholdUs(200000),
188      mCollectStats(false) {
189    mVideoSize.width  = -1;
190    mVideoSize.height = -1;
191
192    mInitCheck = init(camera, proxy, cameraId,
193                    clientName, clientUid,
194                    videoSize, frameRate,
195                    storeMetaDataInVideoBuffers);
196    if (mInitCheck != OK) releaseCamera();
197}
198
199status_t CameraSource::initCheck() const {
200    return mInitCheck;
201}
202
203status_t CameraSource::isCameraAvailable(
204    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
205    int32_t cameraId, const String16& clientName, uid_t clientUid) {
206
207    if (camera == 0) {
208        mCamera = Camera::connect(cameraId, clientName, clientUid);
209        if (mCamera == 0) return -EBUSY;
210        mCameraFlags &= ~FLAGS_HOT_CAMERA;
211    } else {
212        // We get the proxy from Camera, not ICamera. We need to get the proxy
213        // to the remote Camera owned by the application. Here mCamera is a
214        // local Camera object created by us. We cannot use the proxy from
215        // mCamera here.
216        mCamera = Camera::create(camera);
217        if (mCamera == 0) return -EBUSY;
218        mCameraRecordingProxy = proxy;
219        mCameraFlags |= FLAGS_HOT_CAMERA;
220        mDeathNotifier = new DeathNotifier();
221        // isBinderAlive needs linkToDeath to work.
222        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
223    }
224
225    mCamera->lock();
226
227    return OK;
228}
229
230
231/*
232 * Check to see whether the requested video width and height is one
233 * of the supported sizes.
234 * @param width the video frame width in pixels
235 * @param height the video frame height in pixels
236 * @param suppportedSizes the vector of sizes that we check against
237 * @return true if the dimension (width and height) is supported.
238 */
239static bool isVideoSizeSupported(
240    int32_t width, int32_t height,
241    const Vector<Size>& supportedSizes) {
242
243    ALOGV("isVideoSizeSupported");
244    for (size_t i = 0; i < supportedSizes.size(); ++i) {
245        if (width  == supportedSizes[i].width &&
246            height == supportedSizes[i].height) {
247            return true;
248        }
249    }
250    return false;
251}
252
253/*
254 * If the preview and video output is separate, we only set the
255 * the video size, and applications should set the preview size
256 * to some proper value, and the recording framework will not
257 * change the preview size; otherwise, if the video and preview
258 * output is the same, we need to set the preview to be the same
259 * as the requested video size.
260 *
261 */
262/*
263 * Query the camera to retrieve the supported video frame sizes
264 * and also to see whether CameraParameters::setVideoSize()
265 * is supported or not.
266 * @param params CameraParameters to retrieve the information
267 * @@param isSetVideoSizeSupported retunrs whether method
268 *      CameraParameters::setVideoSize() is supported or not.
269 * @param sizes returns the vector of Size objects for the
270 *      supported video frame sizes advertised by the camera.
271 */
272static void getSupportedVideoSizes(
273    const CameraParameters& params,
274    bool *isSetVideoSizeSupported,
275    Vector<Size>& sizes) {
276
277    *isSetVideoSizeSupported = true;
278    params.getSupportedVideoSizes(sizes);
279    if (sizes.size() == 0) {
280        ALOGD("Camera does not support setVideoSize()");
281        params.getSupportedPreviewSizes(sizes);
282        *isSetVideoSizeSupported = false;
283    }
284}
285
286/*
287 * Check whether the camera has the supported color format
288 * @param params CameraParameters to retrieve the information
289 * @return OK if no error.
290 */
291status_t CameraSource::isCameraColorFormatSupported(
292        const CameraParameters& params) {
293    mColorFormat = getColorFormat(params.get(
294            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
295    if (mColorFormat == -1) {
296        return BAD_VALUE;
297    }
298    return OK;
299}
300
301/*
302 * Configure the camera to use the requested video size
303 * (width and height) and/or frame rate. If both width and
304 * height are -1, configuration on the video size is skipped.
305 * if frameRate is -1, configuration on the frame rate
306 * is skipped. Skipping the configuration allows one to
307 * use the current camera setting without the need to
308 * actually know the specific values (see Create() method).
309 *
310 * @param params the CameraParameters to be configured
311 * @param width the target video frame width in pixels
312 * @param height the target video frame height in pixels
313 * @param frameRate the target frame rate in frames per second.
314 * @return OK if no error.
315 */
316status_t CameraSource::configureCamera(
317        CameraParameters* params,
318        int32_t width, int32_t height,
319        int32_t frameRate) {
320    ALOGV("configureCamera");
321    Vector<Size> sizes;
322    bool isSetVideoSizeSupportedByCamera = true;
323    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
324    bool isCameraParamChanged = false;
325    if (width != -1 && height != -1) {
326        if (!isVideoSizeSupported(width, height, sizes)) {
327            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
328            return BAD_VALUE;
329        }
330        if (isSetVideoSizeSupportedByCamera) {
331            params->setVideoSize(width, height);
332        } else {
333            params->setPreviewSize(width, height);
334        }
335        isCameraParamChanged = true;
336    } else if ((width == -1 && height != -1) ||
337               (width != -1 && height == -1)) {
338        // If one and only one of the width and height is -1
339        // we reject such a request.
340        ALOGE("Requested video size (%dx%d) is not supported", width, height);
341        return BAD_VALUE;
342    } else {  // width == -1 && height == -1
343        // Do not configure the camera.
344        // Use the current width and height value setting from the camera.
345    }
346
347    if (frameRate != -1) {
348        CHECK(frameRate > 0 && frameRate <= 120);
349        const char* supportedFrameRates =
350                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
351        CHECK(supportedFrameRates != NULL);
352        ALOGV("Supported frame rates: %s", supportedFrameRates);
353        char buf[4];
354        snprintf(buf, 4, "%d", frameRate);
355        if (strstr(supportedFrameRates, buf) == NULL) {
356            ALOGE("Requested frame rate (%d) is not supported: %s",
357                frameRate, supportedFrameRates);
358            return BAD_VALUE;
359        }
360
361        // The frame rate is supported, set the camera to the requested value.
362        params->setPreviewFrameRate(frameRate);
363        isCameraParamChanged = true;
364    } else {  // frameRate == -1
365        // Do not configure the camera.
366        // Use the current frame rate value setting from the camera
367    }
368
369    if (isCameraParamChanged) {
370        // Either frame rate or frame size needs to be changed.
371        String8 s = params->flatten();
372        if (OK != mCamera->setParameters(s)) {
373            ALOGE("Could not change settings."
374                 " Someone else is using camera %p?", mCamera.get());
375            return -EBUSY;
376        }
377    }
378    return OK;
379}
380
381/*
382 * Check whether the requested video frame size
383 * has been successfully configured or not. If both width and height
384 * are -1, check on the current width and height value setting
385 * is performed.
386 *
387 * @param params CameraParameters to retrieve the information
388 * @param the target video frame width in pixels to check against
389 * @param the target video frame height in pixels to check against
390 * @return OK if no error
391 */
392status_t CameraSource::checkVideoSize(
393        const CameraParameters& params,
394        int32_t width, int32_t height) {
395
396    ALOGV("checkVideoSize");
397    // The actual video size is the same as the preview size
398    // if the camera hal does not support separate video and
399    // preview output. In this case, we retrieve the video
400    // size from preview.
401    int32_t frameWidthActual = -1;
402    int32_t frameHeightActual = -1;
403    Vector<Size> sizes;
404    params.getSupportedVideoSizes(sizes);
405    if (sizes.size() == 0) {
406        // video size is the same as preview size
407        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
408    } else {
409        // video size may not be the same as preview
410        params.getVideoSize(&frameWidthActual, &frameHeightActual);
411    }
412    if (frameWidthActual < 0 || frameHeightActual < 0) {
413        ALOGE("Failed to retrieve video frame size (%dx%d)",
414                frameWidthActual, frameHeightActual);
415        return UNKNOWN_ERROR;
416    }
417
418    // Check the actual video frame size against the target/requested
419    // video frame size.
420    if (width != -1 && height != -1) {
421        if (frameWidthActual != width || frameHeightActual != height) {
422            ALOGE("Failed to set video frame size to %dx%d. "
423                    "The actual video size is %dx%d ", width, height,
424                    frameWidthActual, frameHeightActual);
425            return UNKNOWN_ERROR;
426        }
427    }
428
429    // Good now.
430    mVideoSize.width = frameWidthActual;
431    mVideoSize.height = frameHeightActual;
432    return OK;
433}
434
435/*
436 * Check the requested frame rate has been successfully configured or not.
437 * If the target frameRate is -1, check on the current frame rate value
438 * setting is performed.
439 *
440 * @param params CameraParameters to retrieve the information
441 * @param the target video frame rate to check against
442 * @return OK if no error.
443 */
444status_t CameraSource::checkFrameRate(
445        const CameraParameters& params,
446        int32_t frameRate) {
447
448    ALOGV("checkFrameRate");
449    int32_t frameRateActual = params.getPreviewFrameRate();
450    if (frameRateActual < 0) {
451        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
452        return UNKNOWN_ERROR;
453    }
454
455    // Check the actual video frame rate against the target/requested
456    // video frame rate.
457    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
458        ALOGE("Failed to set preview frame rate to %d fps. The actual "
459                "frame rate is %d", frameRate, frameRateActual);
460        return UNKNOWN_ERROR;
461    }
462
463    // Good now.
464    mVideoFrameRate = frameRateActual;
465    return OK;
466}
467
468/*
469 * Initialize the CameraSource to so that it becomes
470 * ready for providing the video input streams as requested.
471 * @param camera the camera object used for the video source
472 * @param cameraId if camera == 0, use camera with this id
473 *      as the video source
474 * @param videoSize the target video frame size. If both
475 *      width and height in videoSize is -1, use the current
476 *      width and heigth settings by the camera
477 * @param frameRate the target frame rate in frames per second.
478 *      if it is -1, use the current camera frame rate setting.
479 * @param storeMetaDataInVideoBuffers request to store meta
480 *      data or real YUV data in video buffers. Request to
481 *      store meta data in video buffers may not be honored
482 *      if the source does not support this feature.
483 *
484 * @return OK if no error.
485 */
486status_t CameraSource::init(
487        const sp<ICamera>& camera,
488        const sp<ICameraRecordingProxy>& proxy,
489        int32_t cameraId,
490        const String16& clientName,
491        uid_t clientUid,
492        Size videoSize,
493        int32_t frameRate,
494        bool storeMetaDataInVideoBuffers) {
495
496    ALOGV("init");
497    status_t err = OK;
498    int64_t token = IPCThreadState::self()->clearCallingIdentity();
499    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
500                               videoSize, frameRate,
501                               storeMetaDataInVideoBuffers);
502    IPCThreadState::self()->restoreCallingIdentity(token);
503    return err;
504}
505
506status_t CameraSource::initWithCameraAccess(
507        const sp<ICamera>& camera,
508        const sp<ICameraRecordingProxy>& proxy,
509        int32_t cameraId,
510        const String16& clientName,
511        uid_t clientUid,
512        Size videoSize,
513        int32_t frameRate,
514        bool storeMetaDataInVideoBuffers) {
515    ALOGV("initWithCameraAccess");
516    status_t err = OK;
517
518    if ((err = isCameraAvailable(camera, proxy, cameraId,
519            clientName, clientUid)) != OK) {
520        ALOGE("Camera connection could not be established.");
521        return err;
522    }
523    CameraParameters params(mCamera->getParameters());
524    if ((err = isCameraColorFormatSupported(params)) != OK) {
525        return err;
526    }
527
528    // Set the camera to use the requested video frame size
529    // and/or frame rate.
530    if ((err = configureCamera(&params,
531                    videoSize.width, videoSize.height,
532                    frameRate))) {
533        return err;
534    }
535
536    // Check on video frame size and frame rate.
537    CameraParameters newCameraParams(mCamera->getParameters());
538    if ((err = checkVideoSize(newCameraParams,
539                videoSize.width, videoSize.height)) != OK) {
540        return err;
541    }
542    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
543        return err;
544    }
545
546    // Set the preview display. Skip this if mSurface is null because
547    // applications may already set a surface to the camera.
548    if (mSurface != NULL) {
549        // This CHECK is good, since we just passed the lock/unlock
550        // check earlier by calling mCamera->setParameters().
551        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
552    }
553
554    // By default, do not store metadata in video buffers
555    mIsMetaDataStoredInVideoBuffers = false;
556    mCamera->storeMetaDataInBuffers(false);
557    if (storeMetaDataInVideoBuffers) {
558        if (OK == mCamera->storeMetaDataInBuffers(true)) {
559            mIsMetaDataStoredInVideoBuffers = true;
560        }
561    }
562
563    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
564    if (glitchDurationUs > mGlitchDurationThresholdUs) {
565        mGlitchDurationThresholdUs = glitchDurationUs;
566    }
567
568    // XXX: query camera for the stride and slice height
569    // when the capability becomes available.
570    mMeta = new MetaData;
571    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
572    mMeta->setInt32(kKeyColorFormat, mColorFormat);
573    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
574    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
575    mMeta->setInt32(kKeyStride,      mVideoSize.width);
576    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
577    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
578    return OK;
579}
580
581CameraSource::~CameraSource() {
582    if (mStarted) {
583        reset();
584    } else if (mInitCheck == OK) {
585        // Camera is initialized but because start() is never called,
586        // the lock on Camera is never released(). This makes sure
587        // Camera's lock is released in this case.
588        releaseCamera();
589    }
590}
591
592status_t CameraSource::startCameraRecording() {
593    ALOGV("startCameraRecording");
594    // Reset the identity to the current thread because media server owns the
595    // camera and recording is started by the applications. The applications
596    // will connect to the camera in ICameraRecordingProxy::startRecording.
597    int64_t token = IPCThreadState::self()->clearCallingIdentity();
598    status_t err;
599    if (mNumInputBuffers > 0) {
600        err = mCamera->sendCommand(
601            CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
602
603        // This could happen for CameraHAL1 clients; thus the failure is
604        // not a fatal error
605        if (err != OK) {
606            ALOGW("Failed to set video buffer count to %d due to %d",
607                mNumInputBuffers, err);
608        }
609    }
610
611    err = OK;
612    if (mCameraFlags & FLAGS_HOT_CAMERA) {
613        mCamera->unlock();
614        mCamera.clear();
615        if ((err = mCameraRecordingProxy->startRecording(
616                new ProxyListener(this))) != OK) {
617            ALOGE("Failed to start recording, received error: %s (%d)",
618                    strerror(-err), err);
619        }
620    } else {
621        mCamera->setListener(new CameraSourceListener(this));
622        mCamera->startRecording();
623        if (!mCamera->recordingEnabled()) {
624            err = -EINVAL;
625            ALOGE("Failed to start recording");
626        }
627    }
628    IPCThreadState::self()->restoreCallingIdentity(token);
629    return err;
630}
631
632status_t CameraSource::start(MetaData *meta) {
633    ALOGV("start");
634    CHECK(!mStarted);
635    if (mInitCheck != OK) {
636        ALOGE("CameraSource is not initialized yet");
637        return mInitCheck;
638    }
639
640    char value[PROPERTY_VALUE_MAX];
641    if (property_get("media.stagefright.record-stats", value, NULL)
642        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
643        mCollectStats = true;
644    }
645
646    mStartTimeUs = 0;
647    mNumInputBuffers = 0;
648    if (meta) {
649        int64_t startTimeUs;
650        if (meta->findInt64(kKeyTime, &startTimeUs)) {
651            mStartTimeUs = startTimeUs;
652        }
653
654        int32_t nBuffers;
655        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
656            CHECK_GT(nBuffers, 0);
657            mNumInputBuffers = nBuffers;
658        }
659    }
660
661    status_t err;
662    if ((err = startCameraRecording()) == OK) {
663        mStarted = true;
664    }
665
666    return err;
667}
668
669void CameraSource::stopCameraRecording() {
670    ALOGV("stopCameraRecording");
671    if (mCameraFlags & FLAGS_HOT_CAMERA) {
672        mCameraRecordingProxy->stopRecording();
673    } else {
674        mCamera->setListener(NULL);
675        mCamera->stopRecording();
676    }
677}
678
679void CameraSource::releaseCamera() {
680    ALOGV("releaseCamera");
681    if (mCamera != 0) {
682        int64_t token = IPCThreadState::self()->clearCallingIdentity();
683        if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
684            ALOGV("Camera was cold when we started, stopping preview");
685            mCamera->stopPreview();
686            mCamera->disconnect();
687        }
688        mCamera->unlock();
689        mCamera.clear();
690        mCamera = 0;
691        IPCThreadState::self()->restoreCallingIdentity(token);
692    }
693    if (mCameraRecordingProxy != 0) {
694        mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
695        mCameraRecordingProxy.clear();
696    }
697    mCameraFlags = 0;
698}
699
700status_t CameraSource::reset() {
701    ALOGD("reset: E");
702    Mutex::Autolock autoLock(mLock);
703    mStarted = false;
704    mFrameAvailableCondition.signal();
705
706    int64_t token;
707    bool isTokenValid = false;
708    if (mCamera != 0) {
709        token = IPCThreadState::self()->clearCallingIdentity();
710        isTokenValid = true;
711    }
712    releaseQueuedFrames();
713    while (!mFramesBeingEncoded.empty()) {
714        if (NO_ERROR !=
715            mFrameCompleteCondition.waitRelative(mLock,
716                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
717            ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
718                mFramesBeingEncoded.size());
719        }
720    }
721    stopCameraRecording();
722    releaseCamera();
723    if (isTokenValid) {
724        IPCThreadState::self()->restoreCallingIdentity(token);
725    }
726
727    if (mCollectStats) {
728        ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
729                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
730                mLastFrameTimestampUs - mFirstFrameTimeUs);
731    }
732
733    if (mNumGlitches > 0) {
734        ALOGW("%d long delays between neighboring video frames", mNumGlitches);
735    }
736
737    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
738    ALOGD("reset: X");
739    return OK;
740}
741
742void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
743    ALOGV("releaseRecordingFrame");
744    if (mCameraRecordingProxy != NULL) {
745        mCameraRecordingProxy->releaseRecordingFrame(frame);
746    } else if (mCamera != NULL) {
747        int64_t token = IPCThreadState::self()->clearCallingIdentity();
748        mCamera->releaseRecordingFrame(frame);
749        IPCThreadState::self()->restoreCallingIdentity(token);
750    }
751}
752
753void CameraSource::releaseQueuedFrames() {
754    List<sp<IMemory> >::iterator it;
755    while (!mFramesReceived.empty()) {
756        it = mFramesReceived.begin();
757        releaseRecordingFrame(*it);
758        mFramesReceived.erase(it);
759        ++mNumFramesDropped;
760    }
761}
762
763sp<MetaData> CameraSource::getFormat() {
764    return mMeta;
765}
766
767void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
768    releaseRecordingFrame(frame);
769}
770
771void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
772    ALOGV("signalBufferReturned: %p", buffer->data());
773    Mutex::Autolock autoLock(mLock);
774    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
775         it != mFramesBeingEncoded.end(); ++it) {
776        if ((*it)->pointer() ==  buffer->data()) {
777            releaseOneRecordingFrame((*it));
778            mFramesBeingEncoded.erase(it);
779            ++mNumFramesEncoded;
780            buffer->setObserver(0);
781            buffer->release();
782            mFrameCompleteCondition.signal();
783            return;
784        }
785    }
786    CHECK(!"signalBufferReturned: bogus buffer");
787}
788
789status_t CameraSource::read(
790        MediaBuffer **buffer, const ReadOptions *options) {
791    ALOGV("read");
792
793    *buffer = NULL;
794
795    int64_t seekTimeUs;
796    ReadOptions::SeekMode mode;
797    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
798        return ERROR_UNSUPPORTED;
799    }
800
801    sp<IMemory> frame;
802    int64_t frameTime;
803
804    {
805        Mutex::Autolock autoLock(mLock);
806        while (mStarted && mFramesReceived.empty()) {
807            if (NO_ERROR !=
808                mFrameAvailableCondition.waitRelative(mLock,
809                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
810                if (mCameraRecordingProxy != 0 &&
811                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
812                    ALOGW("camera recording proxy is gone");
813                    return ERROR_END_OF_STREAM;
814                }
815                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
816                    mLastFrameTimestampUs);
817            }
818        }
819        if (!mStarted) {
820            return OK;
821        }
822        frame = *mFramesReceived.begin();
823        mFramesReceived.erase(mFramesReceived.begin());
824
825        frameTime = *mFrameTimes.begin();
826        mFrameTimes.erase(mFrameTimes.begin());
827        mFramesBeingEncoded.push_back(frame);
828        *buffer = new MediaBuffer(frame->pointer(), frame->size());
829        (*buffer)->setObserver(this);
830        (*buffer)->add_ref();
831        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
832    }
833    return OK;
834}
835
836void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
837        int32_t msgType, const sp<IMemory> &data) {
838    ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
839    Mutex::Autolock autoLock(mLock);
840    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
841        ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
842        releaseOneRecordingFrame(data);
843        return;
844    }
845
846    if (mNumFramesReceived > 0) {
847        CHECK(timestampUs > mLastFrameTimestampUs);
848        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
849            ++mNumGlitches;
850        }
851    }
852
853    // May need to skip frame or modify timestamp. Currently implemented
854    // by the subclass CameraSourceTimeLapse.
855    if (skipCurrentFrame(timestampUs)) {
856        releaseOneRecordingFrame(data);
857        return;
858    }
859
860    mLastFrameTimestampUs = timestampUs;
861    if (mNumFramesReceived == 0) {
862        mFirstFrameTimeUs = timestampUs;
863        // Initial delay
864        if (mStartTimeUs > 0) {
865            if (timestampUs < mStartTimeUs) {
866                // Frame was captured before recording was started
867                // Drop it without updating the statistical data.
868                releaseOneRecordingFrame(data);
869                return;
870            }
871            mStartTimeUs = timestampUs - mStartTimeUs;
872        }
873    }
874    ++mNumFramesReceived;
875
876    CHECK(data != NULL && data->size() > 0);
877    mFramesReceived.push_back(data);
878    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
879    mFrameTimes.push_back(timeUs);
880    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
881        mStartTimeUs, timeUs);
882    mFrameAvailableCondition.signal();
883}
884
885bool CameraSource::isMetaDataStoredInVideoBuffers() const {
886    ALOGV("isMetaDataStoredInVideoBuffers");
887    return mIsMetaDataStoredInVideoBuffers;
888}
889
890CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
891    mSource = source;
892}
893
894void CameraSource::ProxyListener::dataCallbackTimestamp(
895        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
896    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
897}
898
899void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
900    ALOGI("Camera recording proxy died");
901}
902
903}  // namespace android
904