CameraSource.cpp revision 65e7e6facda89927cb26594b3b65ae81b3235ebc
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <utils/String8.h>
31#include <cutils/properties.h>
32
33namespace android {
34
35struct CameraSourceListener : public CameraListener {
36    CameraSourceListener(const sp<CameraSource> &source);
37
38    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
39    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
40
41    virtual void postDataTimestamp(
42            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
43
44protected:
45    virtual ~CameraSourceListener();
46
47private:
48    wp<CameraSource> mSource;
49
50    CameraSourceListener(const CameraSourceListener &);
51    CameraSourceListener &operator=(const CameraSourceListener &);
52};
53
54CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
55    : mSource(source) {
56}
57
58CameraSourceListener::~CameraSourceListener() {
59}
60
61void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
62    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
63}
64
65void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
66    LOGV("postData(%d, ptr:%p, size:%d)",
67         msgType, dataPtr->pointer(), dataPtr->size());
68
69    sp<CameraSource> source = mSource.promote();
70    if (source.get() != NULL) {
71        source->dataCallback(msgType, dataPtr);
72    }
73}
74
75void CameraSourceListener::postDataTimestamp(
76        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
77
78    sp<CameraSource> source = mSource.promote();
79    if (source.get() != NULL) {
80        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
81    }
82}
83
84static int32_t getColorFormat(const char* colorFormat) {
85    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
86       return OMX_COLOR_FormatYUV422SemiPlanar;
87    }
88
89    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
90        return OMX_COLOR_FormatYUV420SemiPlanar;
91    }
92
93    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
94        return OMX_COLOR_FormatYCbYCr;
95    }
96
97    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
98       return OMX_COLOR_Format16bitRGB565;
99    }
100
101    CHECK_EQ(0, "Unknown color format");
102}
103
104// static
105CameraSource *CameraSource::Create() {
106    sp<Camera> camera = Camera::connect(0);
107
108    if (camera.get() == NULL) {
109        return NULL;
110    }
111
112    return new CameraSource(camera);
113}
114
115// static
116CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
117    if (camera.get() == NULL) {
118        return NULL;
119    }
120
121    return new CameraSource(camera);
122}
123
124CameraSource::CameraSource(const sp<Camera> &camera)
125    : mCamera(camera),
126      mNumFramesReceived(0),
127      mLastFrameTimestampUs(0),
128      mStarted(false),
129      mFirstFrameTimeUs(0),
130      mNumFramesEncoded(0),
131      mNumFramesDropped(0),
132      mNumGlitches(0),
133      mGlitchDurationThresholdUs(200000),
134      mCollectStats(false) {
135
136    int64_t token = IPCThreadState::self()->clearCallingIdentity();
137    String8 s = mCamera->getParameters();
138    IPCThreadState::self()->restoreCallingIdentity(token);
139
140    printf("params: \"%s\"\n", s.string());
141
142    int32_t width, height, stride, sliceHeight;
143    CameraParameters params(s);
144    params.getPreviewSize(&width, &height);
145
146    // Calculate glitch duraton threshold based on frame rate
147    int32_t frameRate = params.getPreviewFrameRate();
148    int64_t glitchDurationUs = (1000000LL / frameRate);
149    if (glitchDurationUs > mGlitchDurationThresholdUs) {
150        mGlitchDurationThresholdUs = glitchDurationUs;
151    }
152
153    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
154    CHECK(colorFormatStr != NULL);
155    int32_t colorFormat = getColorFormat(colorFormatStr);
156
157    // XXX: query camera for the stride and slice height
158    // when the capability becomes available.
159    stride = width;
160    sliceHeight = height;
161
162    mMeta = new MetaData;
163    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
164    mMeta->setInt32(kKeyColorFormat, colorFormat);
165    mMeta->setInt32(kKeyWidth, width);
166    mMeta->setInt32(kKeyHeight, height);
167    mMeta->setInt32(kKeyStride, stride);
168    mMeta->setInt32(kKeySliceHeight, sliceHeight);
169}
170
171CameraSource::~CameraSource() {
172    if (mStarted) {
173        stop();
174    }
175}
176
177void CameraSource::startCameraRecording() {
178    CHECK_EQ(OK, mCamera->startRecording());
179}
180
181status_t CameraSource::start(MetaData *meta) {
182    CHECK(!mStarted);
183
184    char value[PROPERTY_VALUE_MAX];
185    if (property_get("media.stagefright.record-stats", value, NULL)
186        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
187        mCollectStats = true;
188    }
189
190    mStartTimeUs = 0;
191    int64_t startTimeUs;
192    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
193        mStartTimeUs = startTimeUs;
194    }
195
196    int64_t token = IPCThreadState::self()->clearCallingIdentity();
197    mCamera->setListener(new CameraSourceListener(this));
198    startCameraRecording();
199    IPCThreadState::self()->restoreCallingIdentity(token);
200
201    mStarted = true;
202    return OK;
203}
204
205void CameraSource::stopCameraRecording() {
206    mCamera->stopRecording();
207}
208
209status_t CameraSource::stop() {
210    LOGV("stop");
211    Mutex::Autolock autoLock(mLock);
212    mStarted = false;
213    mFrameAvailableCondition.signal();
214
215    int64_t token = IPCThreadState::self()->clearCallingIdentity();
216    mCamera->setListener(NULL);
217    stopCameraRecording();
218    releaseQueuedFrames();
219    while (!mFramesBeingEncoded.empty()) {
220        LOGI("Waiting for outstanding frames being encoded: %d",
221                mFramesBeingEncoded.size());
222        mFrameCompleteCondition.wait(mLock);
223    }
224    mCamera = NULL;
225    IPCThreadState::self()->restoreCallingIdentity(token);
226
227    if (mCollectStats) {
228        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
229                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
230                mLastFrameTimestampUs - mFirstFrameTimeUs);
231    }
232
233    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
234    return OK;
235}
236
237void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
238    mCamera->releaseRecordingFrame(frame);
239}
240
241void CameraSource::releaseQueuedFrames() {
242    List<sp<IMemory> >::iterator it;
243    while (!mFramesReceived.empty()) {
244        it = mFramesReceived.begin();
245        releaseRecordingFrame(*it);
246        mFramesReceived.erase(it);
247        ++mNumFramesDropped;
248    }
249}
250
251sp<MetaData> CameraSource::getFormat() {
252    return mMeta;
253}
254
255void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
256    int64_t token = IPCThreadState::self()->clearCallingIdentity();
257    releaseRecordingFrame(frame);
258    IPCThreadState::self()->restoreCallingIdentity(token);
259}
260
261void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
262    LOGV("signalBufferReturned: %p", buffer->data());
263    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
264         it != mFramesBeingEncoded.end(); ++it) {
265        if ((*it)->pointer() ==  buffer->data()) {
266            releaseOneRecordingFrame((*it));
267            mFramesBeingEncoded.erase(it);
268            ++mNumFramesEncoded;
269            buffer->setObserver(0);
270            buffer->release();
271            mFrameCompleteCondition.signal();
272            return;
273        }
274    }
275    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
276}
277
278status_t CameraSource::read(
279        MediaBuffer **buffer, const ReadOptions *options) {
280    LOGV("read");
281
282    *buffer = NULL;
283
284    int64_t seekTimeUs;
285    if (options && options->getSeekTo(&seekTimeUs)) {
286        return ERROR_UNSUPPORTED;
287    }
288
289    sp<IMemory> frame;
290    int64_t frameTime;
291
292    {
293        Mutex::Autolock autoLock(mLock);
294        while (mStarted && mFramesReceived.empty()) {
295            mFrameAvailableCondition.wait(mLock);
296        }
297        if (!mStarted) {
298            return OK;
299        }
300        frame = *mFramesReceived.begin();
301        mFramesReceived.erase(mFramesReceived.begin());
302
303        frameTime = *mFrameTimes.begin();
304        mFrameTimes.erase(mFrameTimes.begin());
305
306        mFramesBeingEncoded.push_back(frame);
307        *buffer = new MediaBuffer(frame->pointer(), frame->size());
308        (*buffer)->setObserver(this);
309        (*buffer)->add_ref();
310        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
311    }
312    return OK;
313}
314
315void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
316        int32_t msgType, const sp<IMemory> &data) {
317    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
318    Mutex::Autolock autoLock(mLock);
319    if (!mStarted) {
320        releaseOneRecordingFrame(data);
321        ++mNumFramesReceived;
322        ++mNumFramesDropped;
323        return;
324    }
325
326    if (mNumFramesReceived > 0 &&
327        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
328        if (mNumGlitches % 10 == 0) {  // Don't spam the log
329            LOGW("Long delay detected in video recording");
330        }
331        ++mNumGlitches;
332    }
333
334    // May need to skip frame or modify timestamp. Currently implemented
335    // by the subclass CameraSourceTimeLapse.
336    if(skipCurrentFrame(timestampUs)) {
337        releaseOneRecordingFrame(data);
338        return;
339    }
340
341    mLastFrameTimestampUs = timestampUs;
342    if (mNumFramesReceived == 0) {
343        mFirstFrameTimeUs = timestampUs;
344        // Initial delay
345        if (mStartTimeUs > 0) {
346            if (timestampUs < mStartTimeUs) {
347                // Frame was captured before recording was started
348                // Drop it without updating the statistical data.
349                releaseOneRecordingFrame(data);
350                return;
351            }
352            mStartTimeUs = timestampUs - mStartTimeUs;
353        }
354    }
355    ++mNumFramesReceived;
356
357    mFramesReceived.push_back(data);
358    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
359    mFrameTimes.push_back(timeUs);
360    LOGV("initial delay: %lld, current time stamp: %lld",
361        mStartTimeUs, timeUs);
362    mFrameAvailableCondition.signal();
363}
364
365}  // namespace android
366