CameraSource.cpp revision fc20aab463f527ab3b0664986f0381a86b375884
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <utils/String8.h>
31#include <cutils/properties.h>
32
33namespace android {
34
35struct CameraSourceListener : public CameraListener {
36    CameraSourceListener(const sp<CameraSource> &source);
37
38    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
39    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
40
41    virtual void postDataTimestamp(
42            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
43
44protected:
45    virtual ~CameraSourceListener();
46
47private:
48    wp<CameraSource> mSource;
49
50    CameraSourceListener(const CameraSourceListener &);
51    CameraSourceListener &operator=(const CameraSourceListener &);
52};
53
54CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
55    : mSource(source) {
56}
57
58CameraSourceListener::~CameraSourceListener() {
59}
60
61void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
62    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
63}
64
65void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
66    LOGV("postData(%d, ptr:%p, size:%d)",
67         msgType, dataPtr->pointer(), dataPtr->size());
68}
69
70void CameraSourceListener::postDataTimestamp(
71        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
72
73    sp<CameraSource> source = mSource.promote();
74    if (source.get() != NULL) {
75        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
76    }
77}
78
79static int32_t getColorFormat(const char* colorFormat) {
80    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
81       return OMX_COLOR_FormatYUV422SemiPlanar;
82    }
83
84    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
85        return OMX_COLOR_FormatYUV420SemiPlanar;
86    }
87
88    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
89        return OMX_COLOR_FormatYCbYCr;
90    }
91
92    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
93       return OMX_COLOR_Format16bitRGB565;
94    }
95
96    CHECK_EQ(0, "Unknown color format");
97}
98
99// static
100CameraSource *CameraSource::Create() {
101    sp<Camera> camera = Camera::connect(0);
102
103    if (camera.get() == NULL) {
104        return NULL;
105    }
106
107    return new CameraSource(camera);
108}
109
110// static
111CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
112    if (camera.get() == NULL) {
113        return NULL;
114    }
115
116    return new CameraSource(camera);
117}
118
119void CameraSource::enableTimeLapseMode(
120        int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate) {
121    LOGV("starting time lapse mode");
122    mTimeBetweenTimeLapseFrameCaptureUs = timeBetweenTimeLapseFrameCaptureUs;
123    mTimeBetweenTimeLapseVideoFramesUs = (1E6/videoFrameRate);
124}
125
126void CameraSource::disableTimeLapseMode() {
127    LOGV("stopping time lapse mode");
128    mTimeBetweenTimeLapseFrameCaptureUs = -1;
129    mTimeBetweenTimeLapseVideoFramesUs = 0;
130}
131
132CameraSource::CameraSource(const sp<Camera> &camera)
133    : mCamera(camera),
134      mFirstFrameTimeUs(0),
135      mLastFrameTimestampUs(0),
136      mNumFramesReceived(0),
137      mNumFramesEncoded(0),
138      mNumFramesDropped(0),
139      mNumGlitches(0),
140      mGlitchDurationThresholdUs(200000),
141      mCollectStats(false),
142      mStarted(false),
143      mTimeBetweenTimeLapseFrameCaptureUs(-1),
144      mTimeBetweenTimeLapseVideoFramesUs(0),
145      mLastTimeLapseFrameRealTimestampUs(0) {
146
147    int64_t token = IPCThreadState::self()->clearCallingIdentity();
148    String8 s = mCamera->getParameters();
149    IPCThreadState::self()->restoreCallingIdentity(token);
150
151    printf("params: \"%s\"\n", s.string());
152
153    int32_t width, height, stride, sliceHeight;
154    CameraParameters params(s);
155    params.getPreviewSize(&width, &height);
156
157    // Calculate glitch duraton threshold based on frame rate
158    int32_t frameRate = params.getPreviewFrameRate();
159    int64_t glitchDurationUs = (1000000LL / frameRate);
160    if (glitchDurationUs > mGlitchDurationThresholdUs) {
161        mGlitchDurationThresholdUs = glitchDurationUs;
162    }
163
164    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
165    CHECK(colorFormatStr != NULL);
166    int32_t colorFormat = getColorFormat(colorFormatStr);
167
168    // XXX: query camera for the stride and slice height
169    // when the capability becomes available.
170    stride = width;
171    sliceHeight = height;
172
173    mMeta = new MetaData;
174    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
175    mMeta->setInt32(kKeyColorFormat, colorFormat);
176    mMeta->setInt32(kKeyWidth, width);
177    mMeta->setInt32(kKeyHeight, height);
178    mMeta->setInt32(kKeyStride, stride);
179    mMeta->setInt32(kKeySliceHeight, sliceHeight);
180
181}
182
183CameraSource::~CameraSource() {
184    if (mStarted) {
185        stop();
186    }
187}
188
189status_t CameraSource::start(MetaData *meta) {
190    CHECK(!mStarted);
191
192    char value[PROPERTY_VALUE_MAX];
193    if (property_get("media.stagefright.record-stats", value, NULL)
194        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
195        mCollectStats = true;
196    }
197
198    mStartTimeUs = 0;
199    int64_t startTimeUs;
200    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
201        mStartTimeUs = startTimeUs;
202    }
203
204    int64_t token = IPCThreadState::self()->clearCallingIdentity();
205    mCamera->setListener(new CameraSourceListener(this));
206    CHECK_EQ(OK, mCamera->startRecording());
207    IPCThreadState::self()->restoreCallingIdentity(token);
208
209    mStarted = true;
210    return OK;
211}
212
213status_t CameraSource::stop() {
214    LOGV("stop");
215    Mutex::Autolock autoLock(mLock);
216    mStarted = false;
217    mFrameAvailableCondition.signal();
218
219    int64_t token = IPCThreadState::self()->clearCallingIdentity();
220    mCamera->setListener(NULL);
221    mCamera->stopRecording();
222    releaseQueuedFrames();
223    while (!mFramesBeingEncoded.empty()) {
224        LOGI("Waiting for outstanding frames being encoded: %d",
225                mFramesBeingEncoded.size());
226        mFrameCompleteCondition.wait(mLock);
227    }
228    mCamera = NULL;
229    IPCThreadState::self()->restoreCallingIdentity(token);
230
231    if (mCollectStats) {
232        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
233                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
234                mLastFrameTimestampUs - mFirstFrameTimeUs);
235    }
236
237    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
238    return OK;
239}
240
241void CameraSource::releaseQueuedFrames() {
242    List<sp<IMemory> >::iterator it;
243    while (!mFramesReceived.empty()) {
244        it = mFramesReceived.begin();
245        mCamera->releaseRecordingFrame(*it);
246        mFramesReceived.erase(it);
247        ++mNumFramesDropped;
248    }
249}
250
251sp<MetaData> CameraSource::getFormat() {
252    return mMeta;
253}
254
255void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
256    int64_t token = IPCThreadState::self()->clearCallingIdentity();
257    mCamera->releaseRecordingFrame(frame);
258    IPCThreadState::self()->restoreCallingIdentity(token);
259}
260
261void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
262    LOGV("signalBufferReturned: %p", buffer->data());
263    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
264         it != mFramesBeingEncoded.end(); ++it) {
265        if ((*it)->pointer() ==  buffer->data()) {
266
267            releaseOneRecordingFrame((*it));
268            mFramesBeingEncoded.erase(it);
269            ++mNumFramesEncoded;
270            buffer->setObserver(0);
271            buffer->release();
272            mFrameCompleteCondition.signal();
273            return;
274        }
275    }
276    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
277}
278
279status_t CameraSource::read(
280        MediaBuffer **buffer, const ReadOptions *options) {
281    LOGV("read");
282
283    *buffer = NULL;
284
285    int64_t seekTimeUs;
286    if (options && options->getSeekTo(&seekTimeUs)) {
287        return ERROR_UNSUPPORTED;
288    }
289
290    sp<IMemory> frame;
291    int64_t frameTime;
292
293    {
294        Mutex::Autolock autoLock(mLock);
295        while (mStarted && mFramesReceived.empty()) {
296            mFrameAvailableCondition.wait(mLock);
297        }
298        if (!mStarted) {
299            return OK;
300        }
301        frame = *mFramesReceived.begin();
302        mFramesReceived.erase(mFramesReceived.begin());
303
304        frameTime = *mFrameTimes.begin();
305        mFrameTimes.erase(mFrameTimes.begin());
306
307        mFramesBeingEncoded.push_back(frame);
308        *buffer = new MediaBuffer(frame->pointer(), frame->size());
309        (*buffer)->setObserver(this);
310        (*buffer)->add_ref();
311        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
312    }
313    return OK;
314}
315
316void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
317        int32_t msgType, const sp<IMemory> &data) {
318    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
319    Mutex::Autolock autoLock(mLock);
320    if (!mStarted) {
321        releaseOneRecordingFrame(data);
322        ++mNumFramesReceived;
323        ++mNumFramesDropped;
324        return;
325    }
326
327    if (mNumFramesReceived > 0 &&
328        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
329        if (mNumGlitches % 10 == 0) {  // Don't spam the log
330            LOGW("Long delay detected in video recording");
331        }
332        ++mNumGlitches;
333    }
334
335    // time lapse
336    if(mTimeBetweenTimeLapseFrameCaptureUs >= 0) {
337        if(mLastTimeLapseFrameRealTimestampUs == 0) {
338            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
339            // to current time (timestampUs) and save frame data.
340            LOGV("dataCallbackTimestamp timelapse: initial frame");
341
342            mLastTimeLapseFrameRealTimestampUs = timestampUs;
343        } else if (timestampUs <
344                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
345            // Skip all frames from last encoded frame until
346            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
347            // Tell the camera to release its recording frame and return.
348            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
349
350            releaseOneRecordingFrame(data);
351            return;
352        } else {
353            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
354            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
355            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
356            // of the last encoded frame's time stamp.
357            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
358
359            mLastTimeLapseFrameRealTimestampUs = timestampUs;
360            timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
361        }
362    }
363
364    mLastFrameTimestampUs = timestampUs;
365    if (mNumFramesReceived == 0) {
366        mFirstFrameTimeUs = timestampUs;
367        // Initial delay
368        if (mStartTimeUs > 0) {
369            if (timestampUs < mStartTimeUs) {
370                // Frame was captured before recording was started
371                // Drop it without updating the statistical data.
372                releaseOneRecordingFrame(data);
373                return;
374            }
375            mStartTimeUs = timestampUs - mStartTimeUs;
376        }
377    }
378    ++mNumFramesReceived;
379
380    mFramesReceived.push_back(data);
381    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
382    mFrameTimes.push_back(timeUs);
383    LOGV("initial delay: %lld, current time stamp: %lld",
384        mStartTimeUs, timeUs);
385    mFrameAvailableCondition.signal();
386}
387
388}  // namespace android
389