CameraSource.cpp revision 121969b7e0d958092fae76226dc55fe8547a1da6
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSource"
21#include <utils/Log.h>
22
23#include <OMX_Component.h>
24#include <binder/IPCThreadState.h>
25#include <media/stagefright/foundation/ADebug.h>
26#include <media/stagefright/CameraSource.h>
27#include <media/stagefright/MediaDefs.h>
28#include <media/stagefright/MediaErrors.h>
29#include <media/stagefright/MetaData.h>
30#include <camera/Camera.h>
31#include <camera/CameraParameters.h>
32#include <gui/Surface.h>
33#include <utils/String8.h>
34#include <cutils/properties.h>
35
36#if LOG_NDEBUG
37#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
38#else
39#define UNUSED_UNLESS_VERBOSE(x)
40#endif
41
42namespace android {
43
44static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
45
46struct CameraSourceListener : public CameraListener {
47    CameraSourceListener(const sp<CameraSource> &source);
48
49    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
50    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
51                          camera_frame_metadata_t *metadata);
52
53    virtual void postDataTimestamp(
54            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
55
56protected:
57    virtual ~CameraSourceListener();
58
59private:
60    wp<CameraSource> mSource;
61
62    CameraSourceListener(const CameraSourceListener &);
63    CameraSourceListener &operator=(const CameraSourceListener &);
64};
65
66CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
67    : mSource(source) {
68}
69
70CameraSourceListener::~CameraSourceListener() {
71}
72
73void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
74    UNUSED_UNLESS_VERBOSE(msgType);
75    UNUSED_UNLESS_VERBOSE(ext1);
76    UNUSED_UNLESS_VERBOSE(ext2);
77    ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
78}
79
80void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
81                                    camera_frame_metadata_t * /* metadata */) {
82    ALOGV("postData(%d, ptr:%p, size:%zu)",
83         msgType, dataPtr->pointer(), dataPtr->size());
84
85    sp<CameraSource> source = mSource.promote();
86    if (source.get() != NULL) {
87        source->dataCallback(msgType, dataPtr);
88    }
89}
90
91void CameraSourceListener::postDataTimestamp(
92        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
93
94    sp<CameraSource> source = mSource.promote();
95    if (source.get() != NULL) {
96        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
97    }
98}
99
100static int32_t getColorFormat(const char* colorFormat) {
101    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
102       return OMX_COLOR_FormatYUV420Planar;
103    }
104
105    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
106       return OMX_COLOR_FormatYUV422SemiPlanar;
107    }
108
109    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
110        return OMX_COLOR_FormatYUV420SemiPlanar;
111    }
112
113    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
114        return OMX_COLOR_FormatYCbYCr;
115    }
116
117    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
118       return OMX_COLOR_Format16bitRGB565;
119    }
120
121    if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
122       return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
123    }
124
125    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
126        return OMX_COLOR_FormatAndroidOpaque;
127    }
128
129    ALOGE("Uknown color format (%s), please add it to "
130         "CameraSource::getColorFormat", colorFormat);
131
132    CHECK(!"Unknown color format");
133}
134
135CameraSource *CameraSource::Create(const String16 &clientName) {
136    Size size;
137    size.width = -1;
138    size.height = -1;
139
140    sp<ICamera> camera;
141    return new CameraSource(camera, NULL, 0, clientName, -1,
142            size, -1, NULL, false);
143}
144
145// static
146CameraSource *CameraSource::CreateFromCamera(
147    const sp<ICamera>& camera,
148    const sp<ICameraRecordingProxy>& proxy,
149    int32_t cameraId,
150    const String16& clientName,
151    uid_t clientUid,
152    Size videoSize,
153    int32_t frameRate,
154    const sp<IGraphicBufferProducer>& surface,
155    bool storeMetaDataInVideoBuffers) {
156
157    CameraSource *source = new CameraSource(camera, proxy, cameraId,
158            clientName, clientUid, videoSize, frameRate, surface,
159            storeMetaDataInVideoBuffers);
160    return source;
161}
162
163CameraSource::CameraSource(
164    const sp<ICamera>& camera,
165    const sp<ICameraRecordingProxy>& proxy,
166    int32_t cameraId,
167    const String16& clientName,
168    uid_t clientUid,
169    Size videoSize,
170    int32_t frameRate,
171    const sp<IGraphicBufferProducer>& surface,
172    bool storeMetaDataInVideoBuffers)
173    : mCameraFlags(0),
174      mNumInputBuffers(0),
175      mVideoFrameRate(-1),
176      mCamera(0),
177      mSurface(surface),
178      mNumFramesReceived(0),
179      mLastFrameTimestampUs(0),
180      mStarted(false),
181      mNumFramesEncoded(0),
182      mTimeBetweenFrameCaptureUs(0),
183      mFirstFrameTimeUs(0),
184      mNumFramesDropped(0),
185      mNumGlitches(0),
186      mGlitchDurationThresholdUs(200000),
187      mCollectStats(false) {
188    mVideoSize.width  = -1;
189    mVideoSize.height = -1;
190
191    mInitCheck = init(camera, proxy, cameraId,
192                    clientName, clientUid,
193                    videoSize, frameRate,
194                    storeMetaDataInVideoBuffers);
195    if (mInitCheck != OK) releaseCamera();
196}
197
198status_t CameraSource::initCheck() const {
199    return mInitCheck;
200}
201
202status_t CameraSource::isCameraAvailable(
203    const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
204    int32_t cameraId, const String16& clientName, uid_t clientUid) {
205
206    if (camera == 0) {
207        mCamera = Camera::connect(cameraId, clientName, clientUid);
208        if (mCamera == 0) return -EBUSY;
209        mCameraFlags &= ~FLAGS_HOT_CAMERA;
210    } else {
211        // We get the proxy from Camera, not ICamera. We need to get the proxy
212        // to the remote Camera owned by the application. Here mCamera is a
213        // local Camera object created by us. We cannot use the proxy from
214        // mCamera here.
215        mCamera = Camera::create(camera);
216        if (mCamera == 0) return -EBUSY;
217        mCameraRecordingProxy = proxy;
218        mCameraFlags |= FLAGS_HOT_CAMERA;
219        mDeathNotifier = new DeathNotifier();
220        // isBinderAlive needs linkToDeath to work.
221        mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
222    }
223
224    mCamera->lock();
225
226    return OK;
227}
228
229
230/*
231 * Check to see whether the requested video width and height is one
232 * of the supported sizes.
233 * @param width the video frame width in pixels
234 * @param height the video frame height in pixels
235 * @param suppportedSizes the vector of sizes that we check against
236 * @return true if the dimension (width and height) is supported.
237 */
238static bool isVideoSizeSupported(
239    int32_t width, int32_t height,
240    const Vector<Size>& supportedSizes) {
241
242    ALOGV("isVideoSizeSupported");
243    for (size_t i = 0; i < supportedSizes.size(); ++i) {
244        if (width  == supportedSizes[i].width &&
245            height == supportedSizes[i].height) {
246            return true;
247        }
248    }
249    return false;
250}
251
252/*
253 * If the preview and video output is separate, we only set the
254 * the video size, and applications should set the preview size
255 * to some proper value, and the recording framework will not
256 * change the preview size; otherwise, if the video and preview
257 * output is the same, we need to set the preview to be the same
258 * as the requested video size.
259 *
260 */
261/*
262 * Query the camera to retrieve the supported video frame sizes
263 * and also to see whether CameraParameters::setVideoSize()
264 * is supported or not.
265 * @param params CameraParameters to retrieve the information
266 * @@param isSetVideoSizeSupported retunrs whether method
267 *      CameraParameters::setVideoSize() is supported or not.
268 * @param sizes returns the vector of Size objects for the
269 *      supported video frame sizes advertised by the camera.
270 */
271static void getSupportedVideoSizes(
272    const CameraParameters& params,
273    bool *isSetVideoSizeSupported,
274    Vector<Size>& sizes) {
275
276    *isSetVideoSizeSupported = true;
277    params.getSupportedVideoSizes(sizes);
278    if (sizes.size() == 0) {
279        ALOGD("Camera does not support setVideoSize()");
280        params.getSupportedPreviewSizes(sizes);
281        *isSetVideoSizeSupported = false;
282    }
283}
284
285/*
286 * Check whether the camera has the supported color format
287 * @param params CameraParameters to retrieve the information
288 * @return OK if no error.
289 */
290status_t CameraSource::isCameraColorFormatSupported(
291        const CameraParameters& params) {
292    mColorFormat = getColorFormat(params.get(
293            CameraParameters::KEY_VIDEO_FRAME_FORMAT));
294    if (mColorFormat == -1) {
295        return BAD_VALUE;
296    }
297    return OK;
298}
299
300/*
301 * Configure the camera to use the requested video size
302 * (width and height) and/or frame rate. If both width and
303 * height are -1, configuration on the video size is skipped.
304 * if frameRate is -1, configuration on the frame rate
305 * is skipped. Skipping the configuration allows one to
306 * use the current camera setting without the need to
307 * actually know the specific values (see Create() method).
308 *
309 * @param params the CameraParameters to be configured
310 * @param width the target video frame width in pixels
311 * @param height the target video frame height in pixels
312 * @param frameRate the target frame rate in frames per second.
313 * @return OK if no error.
314 */
315status_t CameraSource::configureCamera(
316        CameraParameters* params,
317        int32_t width, int32_t height,
318        int32_t frameRate) {
319    ALOGV("configureCamera");
320    Vector<Size> sizes;
321    bool isSetVideoSizeSupportedByCamera = true;
322    getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
323    bool isCameraParamChanged = false;
324    if (width != -1 && height != -1) {
325        if (!isVideoSizeSupported(width, height, sizes)) {
326            ALOGE("Video dimension (%dx%d) is unsupported", width, height);
327            return BAD_VALUE;
328        }
329        if (isSetVideoSizeSupportedByCamera) {
330            params->setVideoSize(width, height);
331        } else {
332            params->setPreviewSize(width, height);
333        }
334        isCameraParamChanged = true;
335    } else if ((width == -1 && height != -1) ||
336               (width != -1 && height == -1)) {
337        // If one and only one of the width and height is -1
338        // we reject such a request.
339        ALOGE("Requested video size (%dx%d) is not supported", width, height);
340        return BAD_VALUE;
341    } else {  // width == -1 && height == -1
342        // Do not configure the camera.
343        // Use the current width and height value setting from the camera.
344    }
345
346    if (frameRate != -1) {
347        CHECK(frameRate > 0 && frameRate <= 120);
348        const char* supportedFrameRates =
349                params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
350        CHECK(supportedFrameRates != NULL);
351        ALOGV("Supported frame rates: %s", supportedFrameRates);
352        char buf[4];
353        snprintf(buf, 4, "%d", frameRate);
354        if (strstr(supportedFrameRates, buf) == NULL) {
355            ALOGE("Requested frame rate (%d) is not supported: %s",
356                frameRate, supportedFrameRates);
357            return BAD_VALUE;
358        }
359
360        // The frame rate is supported, set the camera to the requested value.
361        params->setPreviewFrameRate(frameRate);
362        isCameraParamChanged = true;
363    } else {  // frameRate == -1
364        // Do not configure the camera.
365        // Use the current frame rate value setting from the camera
366    }
367
368    if (isCameraParamChanged) {
369        // Either frame rate or frame size needs to be changed.
370        String8 s = params->flatten();
371        if (OK != mCamera->setParameters(s)) {
372            ALOGE("Could not change settings."
373                 " Someone else is using camera %p?", mCamera.get());
374            return -EBUSY;
375        }
376    }
377    return OK;
378}
379
380/*
381 * Check whether the requested video frame size
382 * has been successfully configured or not. If both width and height
383 * are -1, check on the current width and height value setting
384 * is performed.
385 *
386 * @param params CameraParameters to retrieve the information
387 * @param the target video frame width in pixels to check against
388 * @param the target video frame height in pixels to check against
389 * @return OK if no error
390 */
391status_t CameraSource::checkVideoSize(
392        const CameraParameters& params,
393        int32_t width, int32_t height) {
394
395    ALOGV("checkVideoSize");
396    // The actual video size is the same as the preview size
397    // if the camera hal does not support separate video and
398    // preview output. In this case, we retrieve the video
399    // size from preview.
400    int32_t frameWidthActual = -1;
401    int32_t frameHeightActual = -1;
402    Vector<Size> sizes;
403    params.getSupportedVideoSizes(sizes);
404    if (sizes.size() == 0) {
405        // video size is the same as preview size
406        params.getPreviewSize(&frameWidthActual, &frameHeightActual);
407    } else {
408        // video size may not be the same as preview
409        params.getVideoSize(&frameWidthActual, &frameHeightActual);
410    }
411    if (frameWidthActual < 0 || frameHeightActual < 0) {
412        ALOGE("Failed to retrieve video frame size (%dx%d)",
413                frameWidthActual, frameHeightActual);
414        return UNKNOWN_ERROR;
415    }
416
417    // Check the actual video frame size against the target/requested
418    // video frame size.
419    if (width != -1 && height != -1) {
420        if (frameWidthActual != width || frameHeightActual != height) {
421            ALOGE("Failed to set video frame size to %dx%d. "
422                    "The actual video size is %dx%d ", width, height,
423                    frameWidthActual, frameHeightActual);
424            return UNKNOWN_ERROR;
425        }
426    }
427
428    // Good now.
429    mVideoSize.width = frameWidthActual;
430    mVideoSize.height = frameHeightActual;
431    return OK;
432}
433
434/*
435 * Check the requested frame rate has been successfully configured or not.
436 * If the target frameRate is -1, check on the current frame rate value
437 * setting is performed.
438 *
439 * @param params CameraParameters to retrieve the information
440 * @param the target video frame rate to check against
441 * @return OK if no error.
442 */
443status_t CameraSource::checkFrameRate(
444        const CameraParameters& params,
445        int32_t frameRate) {
446
447    ALOGV("checkFrameRate");
448    int32_t frameRateActual = params.getPreviewFrameRate();
449    if (frameRateActual < 0) {
450        ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
451        return UNKNOWN_ERROR;
452    }
453
454    // Check the actual video frame rate against the target/requested
455    // video frame rate.
456    if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
457        ALOGE("Failed to set preview frame rate to %d fps. The actual "
458                "frame rate is %d", frameRate, frameRateActual);
459        return UNKNOWN_ERROR;
460    }
461
462    // Good now.
463    mVideoFrameRate = frameRateActual;
464    return OK;
465}
466
467/*
468 * Initialize the CameraSource to so that it becomes
469 * ready for providing the video input streams as requested.
470 * @param camera the camera object used for the video source
471 * @param cameraId if camera == 0, use camera with this id
472 *      as the video source
473 * @param videoSize the target video frame size. If both
474 *      width and height in videoSize is -1, use the current
475 *      width and heigth settings by the camera
476 * @param frameRate the target frame rate in frames per second.
477 *      if it is -1, use the current camera frame rate setting.
478 * @param storeMetaDataInVideoBuffers request to store meta
479 *      data or real YUV data in video buffers. Request to
480 *      store meta data in video buffers may not be honored
481 *      if the source does not support this feature.
482 *
483 * @return OK if no error.
484 */
485status_t CameraSource::init(
486        const sp<ICamera>& camera,
487        const sp<ICameraRecordingProxy>& proxy,
488        int32_t cameraId,
489        const String16& clientName,
490        uid_t clientUid,
491        Size videoSize,
492        int32_t frameRate,
493        bool storeMetaDataInVideoBuffers) {
494
495    ALOGV("init");
496    status_t err = OK;
497    int64_t token = IPCThreadState::self()->clearCallingIdentity();
498    err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
499                               videoSize, frameRate,
500                               storeMetaDataInVideoBuffers);
501    IPCThreadState::self()->restoreCallingIdentity(token);
502    return err;
503}
504
505status_t CameraSource::initWithCameraAccess(
506        const sp<ICamera>& camera,
507        const sp<ICameraRecordingProxy>& proxy,
508        int32_t cameraId,
509        const String16& clientName,
510        uid_t clientUid,
511        Size videoSize,
512        int32_t frameRate,
513        bool storeMetaDataInVideoBuffers) {
514    ALOGV("initWithCameraAccess");
515    status_t err = OK;
516
517    if ((err = isCameraAvailable(camera, proxy, cameraId,
518            clientName, clientUid)) != OK) {
519        ALOGE("Camera connection could not be established.");
520        return err;
521    }
522    CameraParameters params(mCamera->getParameters());
523    if ((err = isCameraColorFormatSupported(params)) != OK) {
524        return err;
525    }
526
527    // Set the camera to use the requested video frame size
528    // and/or frame rate.
529    if ((err = configureCamera(&params,
530                    videoSize.width, videoSize.height,
531                    frameRate))) {
532        return err;
533    }
534
535    // Check on video frame size and frame rate.
536    CameraParameters newCameraParams(mCamera->getParameters());
537    if ((err = checkVideoSize(newCameraParams,
538                videoSize.width, videoSize.height)) != OK) {
539        return err;
540    }
541    if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
542        return err;
543    }
544
545    // Set the preview display. Skip this if mSurface is null because
546    // applications may already set a surface to the camera.
547    if (mSurface != NULL) {
548        // This CHECK is good, since we just passed the lock/unlock
549        // check earlier by calling mCamera->setParameters().
550        CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
551    }
552
553    // By default, do not store metadata in video buffers
554    mIsMetaDataStoredInVideoBuffers = false;
555    mCamera->storeMetaDataInBuffers(false);
556    if (storeMetaDataInVideoBuffers) {
557        if (OK == mCamera->storeMetaDataInBuffers(true)) {
558            mIsMetaDataStoredInVideoBuffers = true;
559        }
560    }
561
562    int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
563    if (glitchDurationUs > mGlitchDurationThresholdUs) {
564        mGlitchDurationThresholdUs = glitchDurationUs;
565    }
566
567    // XXX: query camera for the stride and slice height
568    // when the capability becomes available.
569    mMeta = new MetaData;
570    mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
571    mMeta->setInt32(kKeyColorFormat, mColorFormat);
572    mMeta->setInt32(kKeyWidth,       mVideoSize.width);
573    mMeta->setInt32(kKeyHeight,      mVideoSize.height);
574    mMeta->setInt32(kKeyStride,      mVideoSize.width);
575    mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
576    mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
577    return OK;
578}
579
580CameraSource::~CameraSource() {
581    if (mStarted) {
582        reset();
583    } else if (mInitCheck == OK) {
584        // Camera is initialized but because start() is never called,
585        // the lock on Camera is never released(). This makes sure
586        // Camera's lock is released in this case.
587        releaseCamera();
588    }
589}
590
591status_t CameraSource::startCameraRecording() {
592    ALOGV("startCameraRecording");
593    // Reset the identity to the current thread because media server owns the
594    // camera and recording is started by the applications. The applications
595    // will connect to the camera in ICameraRecordingProxy::startRecording.
596    int64_t token = IPCThreadState::self()->clearCallingIdentity();
597    status_t err;
598    if (mNumInputBuffers > 0) {
599        err = mCamera->sendCommand(
600            CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
601
602        // This could happen for CameraHAL1 clients; thus the failure is
603        // not a fatal error
604        if (err != OK) {
605            ALOGW("Failed to set video buffer count to %d due to %d",
606                mNumInputBuffers, err);
607        }
608    }
609
610    err = OK;
611    if (mCameraFlags & FLAGS_HOT_CAMERA) {
612        mCamera->unlock();
613        mCamera.clear();
614        if ((err = mCameraRecordingProxy->startRecording(
615                new ProxyListener(this))) != OK) {
616            ALOGE("Failed to start recording, received error: %s (%d)",
617                    strerror(-err), err);
618        }
619    } else {
620        mCamera->setListener(new CameraSourceListener(this));
621        mCamera->startRecording();
622        if (!mCamera->recordingEnabled()) {
623            err = -EINVAL;
624            ALOGE("Failed to start recording");
625        }
626    }
627    IPCThreadState::self()->restoreCallingIdentity(token);
628    return err;
629}
630
631status_t CameraSource::start(MetaData *meta) {
632    ALOGV("start");
633    CHECK(!mStarted);
634    if (mInitCheck != OK) {
635        ALOGE("CameraSource is not initialized yet");
636        return mInitCheck;
637    }
638
639    char value[PROPERTY_VALUE_MAX];
640    if (property_get("media.stagefright.record-stats", value, NULL)
641        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
642        mCollectStats = true;
643    }
644
645    mStartTimeUs = 0;
646    mNumInputBuffers = 0;
647    if (meta) {
648        int64_t startTimeUs;
649        if (meta->findInt64(kKeyTime, &startTimeUs)) {
650            mStartTimeUs = startTimeUs;
651        }
652
653        int32_t nBuffers;
654        if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
655            CHECK_GT(nBuffers, 0);
656            mNumInputBuffers = nBuffers;
657        }
658    }
659
660    status_t err;
661    if ((err = startCameraRecording()) == OK) {
662        mStarted = true;
663    }
664
665    return err;
666}
667
668void CameraSource::stopCameraRecording() {
669    ALOGV("stopCameraRecording");
670    if (mCameraFlags & FLAGS_HOT_CAMERA) {
671        mCameraRecordingProxy->stopRecording();
672    } else {
673        mCamera->setListener(NULL);
674        mCamera->stopRecording();
675    }
676}
677
678void CameraSource::releaseCamera() {
679    ALOGV("releaseCamera");
680    sp<Camera> camera;
681    bool coldCamera = false;
682    {
683        Mutex::Autolock autoLock(mLock);
684        // get a local ref and clear ref to mCamera now
685        camera = mCamera;
686        mCamera.clear();
687        coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
688    }
689
690    if (camera != 0) {
691        int64_t token = IPCThreadState::self()->clearCallingIdentity();
692        if (coldCamera) {
693            ALOGV("Camera was cold when we started, stopping preview");
694            camera->stopPreview();
695            camera->disconnect();
696        }
697        camera->unlock();
698        IPCThreadState::self()->restoreCallingIdentity(token);
699    }
700
701    {
702        Mutex::Autolock autoLock(mLock);
703        if (mCameraRecordingProxy != 0) {
704            mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
705            mCameraRecordingProxy.clear();
706        }
707        mCameraFlags = 0;
708    }
709}
710
711status_t CameraSource::reset() {
712    ALOGD("reset: E");
713
714    {
715        Mutex::Autolock autoLock(mLock);
716        mStarted = false;
717        mFrameAvailableCondition.signal();
718
719        int64_t token;
720        bool isTokenValid = false;
721        if (mCamera != 0) {
722            token = IPCThreadState::self()->clearCallingIdentity();
723            isTokenValid = true;
724        }
725        releaseQueuedFrames();
726        while (!mFramesBeingEncoded.empty()) {
727            if (NO_ERROR !=
728                mFrameCompleteCondition.waitRelative(mLock,
729                        mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
730                ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
731                    mFramesBeingEncoded.size());
732            }
733        }
734        stopCameraRecording();
735        if (isTokenValid) {
736            IPCThreadState::self()->restoreCallingIdentity(token);
737        }
738
739        if (mCollectStats) {
740            ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
741                    mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
742                    mLastFrameTimestampUs - mFirstFrameTimeUs);
743        }
744
745        if (mNumGlitches > 0) {
746            ALOGW("%d long delays between neighboring video frames", mNumGlitches);
747        }
748
749        CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
750    }
751
752    releaseCamera();
753
754    ALOGD("reset: X");
755    return OK;
756}
757
758void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
759    ALOGV("releaseRecordingFrame");
760    if (mCameraRecordingProxy != NULL) {
761        mCameraRecordingProxy->releaseRecordingFrame(frame);
762    } else if (mCamera != NULL) {
763        int64_t token = IPCThreadState::self()->clearCallingIdentity();
764        mCamera->releaseRecordingFrame(frame);
765        IPCThreadState::self()->restoreCallingIdentity(token);
766    }
767}
768
769void CameraSource::releaseQueuedFrames() {
770    List<sp<IMemory> >::iterator it;
771    while (!mFramesReceived.empty()) {
772        it = mFramesReceived.begin();
773        releaseRecordingFrame(*it);
774        mFramesReceived.erase(it);
775        ++mNumFramesDropped;
776    }
777}
778
779sp<MetaData> CameraSource::getFormat() {
780    return mMeta;
781}
782
783void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
784    releaseRecordingFrame(frame);
785}
786
787void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
788    ALOGV("signalBufferReturned: %p", buffer->data());
789    Mutex::Autolock autoLock(mLock);
790    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
791         it != mFramesBeingEncoded.end(); ++it) {
792        if ((*it)->pointer() ==  buffer->data()) {
793            releaseOneRecordingFrame((*it));
794            mFramesBeingEncoded.erase(it);
795            ++mNumFramesEncoded;
796            buffer->setObserver(0);
797            buffer->release();
798            mFrameCompleteCondition.signal();
799            return;
800        }
801    }
802    CHECK(!"signalBufferReturned: bogus buffer");
803}
804
805status_t CameraSource::read(
806        MediaBuffer **buffer, const ReadOptions *options) {
807    ALOGV("read");
808
809    *buffer = NULL;
810
811    int64_t seekTimeUs;
812    ReadOptions::SeekMode mode;
813    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
814        return ERROR_UNSUPPORTED;
815    }
816
817    sp<IMemory> frame;
818    int64_t frameTime;
819
820    {
821        Mutex::Autolock autoLock(mLock);
822        while (mStarted && mFramesReceived.empty()) {
823            if (NO_ERROR !=
824                mFrameAvailableCondition.waitRelative(mLock,
825                    mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
826                if (mCameraRecordingProxy != 0 &&
827                    !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
828                    ALOGW("camera recording proxy is gone");
829                    return ERROR_END_OF_STREAM;
830                }
831                ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
832                    mLastFrameTimestampUs);
833            }
834        }
835        if (!mStarted) {
836            return OK;
837        }
838        frame = *mFramesReceived.begin();
839        mFramesReceived.erase(mFramesReceived.begin());
840
841        frameTime = *mFrameTimes.begin();
842        mFrameTimes.erase(mFrameTimes.begin());
843        mFramesBeingEncoded.push_back(frame);
844        *buffer = new MediaBuffer(frame->pointer(), frame->size());
845        (*buffer)->setObserver(this);
846        (*buffer)->add_ref();
847        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
848    }
849    return OK;
850}
851
852void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
853        int32_t msgType, const sp<IMemory> &data) {
854    ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
855    Mutex::Autolock autoLock(mLock);
856    if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
857        ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
858        releaseOneRecordingFrame(data);
859        return;
860    }
861
862    if (mNumFramesReceived > 0) {
863        CHECK(timestampUs > mLastFrameTimestampUs);
864        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
865            ++mNumGlitches;
866        }
867    }
868
869    // May need to skip frame or modify timestamp. Currently implemented
870    // by the subclass CameraSourceTimeLapse.
871    if (skipCurrentFrame(timestampUs)) {
872        releaseOneRecordingFrame(data);
873        return;
874    }
875
876    mLastFrameTimestampUs = timestampUs;
877    if (mNumFramesReceived == 0) {
878        mFirstFrameTimeUs = timestampUs;
879        // Initial delay
880        if (mStartTimeUs > 0) {
881            if (timestampUs < mStartTimeUs) {
882                // Frame was captured before recording was started
883                // Drop it without updating the statistical data.
884                releaseOneRecordingFrame(data);
885                return;
886            }
887            mStartTimeUs = timestampUs - mStartTimeUs;
888        }
889    }
890    ++mNumFramesReceived;
891
892    CHECK(data != NULL && data->size() > 0);
893    mFramesReceived.push_back(data);
894    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
895    mFrameTimes.push_back(timeUs);
896    ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
897        mStartTimeUs, timeUs);
898    mFrameAvailableCondition.signal();
899}
900
901bool CameraSource::isMetaDataStoredInVideoBuffers() const {
902    ALOGV("isMetaDataStoredInVideoBuffers");
903    return mIsMetaDataStoredInVideoBuffers;
904}
905
906CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
907    mSource = source;
908}
909
910void CameraSource::ProxyListener::dataCallbackTimestamp(
911        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
912    mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
913}
914
915void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
916    ALOGI("Camera recording proxy died");
917}
918
919}  // namespace android
920