CameraSource.cpp revision 78eff720c86eb6d4e3d45a144df60b2ca464d2d4
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22#include <binder/IPCThreadState.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <utils/String8.h>
31#include <cutils/properties.h>
32
33namespace android {
34
35struct CameraSourceListener : public CameraListener {
36    CameraSourceListener(const sp<CameraSource> &source);
37
38    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
39    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
40
41    virtual void postDataTimestamp(
42            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
43
44protected:
45    virtual ~CameraSourceListener();
46
47private:
48    wp<CameraSource> mSource;
49
50    CameraSourceListener(const CameraSourceListener &);
51    CameraSourceListener &operator=(const CameraSourceListener &);
52};
53
54CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
55    : mSource(source) {
56}
57
58CameraSourceListener::~CameraSourceListener() {
59}
60
61void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
62    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
63}
64
65void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
66    LOGV("postData(%d, ptr:%p, size:%d)",
67         msgType, dataPtr->pointer(), dataPtr->size());
68
69    sp<CameraSource> source = mSource.promote();
70    if (source.get() != NULL) {
71        source->dataCallback(msgType, dataPtr);
72    }
73}
74
75void CameraSourceListener::postDataTimestamp(
76        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
77
78    sp<CameraSource> source = mSource.promote();
79    if (source.get() != NULL) {
80        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
81    }
82}
83
84static int32_t getColorFormat(const char* colorFormat) {
85    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
86       return OMX_COLOR_FormatYUV420Planar;
87    }
88
89    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
90       return OMX_COLOR_FormatYUV422SemiPlanar;
91    }
92
93    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
94        return OMX_COLOR_FormatYUV420SemiPlanar;
95    }
96
97    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
98        return OMX_COLOR_FormatYCbYCr;
99    }
100
101    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
102       return OMX_COLOR_Format16bitRGB565;
103    }
104
105    LOGE("Uknown color format (%s), please add it to "
106         "CameraSource::getColorFormat", colorFormat);
107
108    CHECK_EQ(0, "Unknown color format");
109}
110
111// static
112CameraSource *CameraSource::Create() {
113    sp<Camera> camera = Camera::connect(0);
114
115    if (camera.get() == NULL) {
116        return NULL;
117    }
118
119    return new CameraSource(camera);
120}
121
122// static
123CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
124    if (camera.get() == NULL) {
125        return NULL;
126    }
127
128    return new CameraSource(camera);
129}
130
131CameraSource::CameraSource(const sp<Camera> &camera)
132    : mCamera(camera),
133      mNumFramesReceived(0),
134      mLastFrameTimestampUs(0),
135      mStarted(false),
136      mFirstFrameTimeUs(0),
137      mNumFramesEncoded(0),
138      mNumFramesDropped(0),
139      mNumGlitches(0),
140      mGlitchDurationThresholdUs(200000),
141      mCollectStats(false) {
142
143    int64_t token = IPCThreadState::self()->clearCallingIdentity();
144    String8 s = mCamera->getParameters();
145    IPCThreadState::self()->restoreCallingIdentity(token);
146
147    printf("params: \"%s\"\n", s.string());
148
149    int32_t width, height, stride, sliceHeight;
150    CameraParameters params(s);
151    params.getPreviewSize(&width, &height);
152
153    // Calculate glitch duraton threshold based on frame rate
154    int32_t frameRate = params.getPreviewFrameRate();
155    int64_t glitchDurationUs = (1000000LL / frameRate);
156    if (glitchDurationUs > mGlitchDurationThresholdUs) {
157        mGlitchDurationThresholdUs = glitchDurationUs;
158    }
159
160    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
161    CHECK(colorFormatStr != NULL);
162    int32_t colorFormat = getColorFormat(colorFormatStr);
163
164    // XXX: query camera for the stride and slice height
165    // when the capability becomes available.
166    stride = width;
167    sliceHeight = height;
168
169    mMeta = new MetaData;
170    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
171    mMeta->setInt32(kKeyColorFormat, colorFormat);
172    mMeta->setInt32(kKeyWidth, width);
173    mMeta->setInt32(kKeyHeight, height);
174    mMeta->setInt32(kKeyStride, stride);
175    mMeta->setInt32(kKeySliceHeight, sliceHeight);
176}
177
178CameraSource::~CameraSource() {
179    if (mStarted) {
180        stop();
181    }
182}
183
184void CameraSource::startCameraRecording() {
185    CHECK_EQ(OK, mCamera->startRecording());
186}
187
188status_t CameraSource::start(MetaData *meta) {
189    CHECK(!mStarted);
190
191    char value[PROPERTY_VALUE_MAX];
192    if (property_get("media.stagefright.record-stats", value, NULL)
193        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
194        mCollectStats = true;
195    }
196
197    mStartTimeUs = 0;
198    int64_t startTimeUs;
199    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
200        mStartTimeUs = startTimeUs;
201    }
202
203    int64_t token = IPCThreadState::self()->clearCallingIdentity();
204    mCamera->setListener(new CameraSourceListener(this));
205    startCameraRecording();
206    IPCThreadState::self()->restoreCallingIdentity(token);
207
208    mStarted = true;
209    return OK;
210}
211
212void CameraSource::stopCameraRecording() {
213    mCamera->setListener(NULL);
214    mCamera->stopRecording();
215}
216
217status_t CameraSource::stop() {
218    LOGV("stop");
219    Mutex::Autolock autoLock(mLock);
220    mStarted = false;
221    mFrameAvailableCondition.signal();
222
223    int64_t token = IPCThreadState::self()->clearCallingIdentity();
224    stopCameraRecording();
225    releaseQueuedFrames();
226    while (!mFramesBeingEncoded.empty()) {
227        LOGI("Waiting for outstanding frames being encoded: %d",
228                mFramesBeingEncoded.size());
229        mFrameCompleteCondition.wait(mLock);
230    }
231    mCamera = NULL;
232    IPCThreadState::self()->restoreCallingIdentity(token);
233
234    if (mCollectStats) {
235        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
236                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
237                mLastFrameTimestampUs - mFirstFrameTimeUs);
238    }
239
240    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
241    return OK;
242}
243
244void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
245    mCamera->releaseRecordingFrame(frame);
246}
247
248void CameraSource::releaseQueuedFrames() {
249    List<sp<IMemory> >::iterator it;
250    while (!mFramesReceived.empty()) {
251        it = mFramesReceived.begin();
252        releaseRecordingFrame(*it);
253        mFramesReceived.erase(it);
254        ++mNumFramesDropped;
255    }
256}
257
258sp<MetaData> CameraSource::getFormat() {
259    return mMeta;
260}
261
262void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
263    int64_t token = IPCThreadState::self()->clearCallingIdentity();
264    releaseRecordingFrame(frame);
265    IPCThreadState::self()->restoreCallingIdentity(token);
266}
267
268void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
269    LOGV("signalBufferReturned: %p", buffer->data());
270    Mutex::Autolock autoLock(mLock);
271    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
272         it != mFramesBeingEncoded.end(); ++it) {
273        if ((*it)->pointer() ==  buffer->data()) {
274            releaseOneRecordingFrame((*it));
275            mFramesBeingEncoded.erase(it);
276            ++mNumFramesEncoded;
277            buffer->setObserver(0);
278            buffer->release();
279            mFrameCompleteCondition.signal();
280            return;
281        }
282    }
283    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
284}
285
286status_t CameraSource::read(
287        MediaBuffer **buffer, const ReadOptions *options) {
288    LOGV("read");
289
290    *buffer = NULL;
291
292    int64_t seekTimeUs;
293    ReadOptions::SeekMode mode;
294    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
295        return ERROR_UNSUPPORTED;
296    }
297
298    sp<IMemory> frame;
299    int64_t frameTime;
300
301    {
302        Mutex::Autolock autoLock(mLock);
303        while (mStarted) {
304            while(mFramesReceived.empty()) {
305                mFrameAvailableCondition.wait(mLock);
306            }
307
308            if (!mStarted) {
309                return OK;
310            }
311
312            frame = *mFramesReceived.begin();
313            mFramesReceived.erase(mFramesReceived.begin());
314
315            frameTime = *mFrameTimes.begin();
316            mFrameTimes.erase(mFrameTimes.begin());
317            int64_t skipTimeUs;
318            if (!options || !options->getSkipFrame(&skipTimeUs)) {
319                skipTimeUs = frameTime;
320            }
321            if (skipTimeUs > frameTime) {
322                LOGV("skipTimeUs: %lld us > frameTime: %lld us",
323                    skipTimeUs, frameTime);
324                releaseOneRecordingFrame(frame);
325                ++mNumFramesDropped;
326                // Safeguard against the abuse of the kSkipFrame_Option.
327                if (skipTimeUs - frameTime >= 1E6) {
328                    LOGE("Frame skipping requested is way too long: %lld us",
329                        skipTimeUs - frameTime);
330                    return UNKNOWN_ERROR;
331                }
332            } else {
333                mFramesBeingEncoded.push_back(frame);
334                *buffer = new MediaBuffer(frame->pointer(), frame->size());
335                (*buffer)->setObserver(this);
336                (*buffer)->add_ref();
337                (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
338
339                return OK;
340            }
341        }
342    }
343    return OK;
344}
345
346void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
347        int32_t msgType, const sp<IMemory> &data) {
348    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
349    Mutex::Autolock autoLock(mLock);
350    if (!mStarted) {
351        releaseOneRecordingFrame(data);
352        ++mNumFramesReceived;
353        ++mNumFramesDropped;
354        return;
355    }
356
357    if (mNumFramesReceived > 0 &&
358        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
359        if (mNumGlitches % 10 == 0) {  // Don't spam the log
360            LOGW("Long delay detected in video recording");
361        }
362        ++mNumGlitches;
363    }
364
365    // May need to skip frame or modify timestamp. Currently implemented
366    // by the subclass CameraSourceTimeLapse.
367    if(skipCurrentFrame(timestampUs)) {
368        releaseOneRecordingFrame(data);
369        return;
370    }
371
372    mLastFrameTimestampUs = timestampUs;
373    if (mNumFramesReceived == 0) {
374        mFirstFrameTimeUs = timestampUs;
375        // Initial delay
376        if (mStartTimeUs > 0) {
377            if (timestampUs < mStartTimeUs) {
378                // Frame was captured before recording was started
379                // Drop it without updating the statistical data.
380                releaseOneRecordingFrame(data);
381                return;
382            }
383            mStartTimeUs = timestampUs - mStartTimeUs;
384        }
385    }
386    ++mNumFramesReceived;
387
388    mFramesReceived.push_back(data);
389    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
390    mFrameTimes.push_back(timeUs);
391    LOGV("initial delay: %lld, current time stamp: %lld",
392        mStartTimeUs, timeUs);
393    mFrameAvailableCondition.signal();
394}
395
396}  // namespace android
397