CameraSource.cpp revision ddcc4a66d848deef6fb4689e64e30cd9bd2684fe
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSource"
19#include <utils/Log.h>
20
21#include <OMX_Component.h>
22
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <camera/Camera.h>
29#include <camera/CameraParameters.h>
30#include <utils/String8.h>
31#include <cutils/properties.h>
32
33namespace android {
34
35struct CameraSourceListener : public CameraListener {
36    CameraSourceListener(const sp<CameraSource> &source);
37
38    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
39    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
40
41    virtual void postDataTimestamp(
42            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
43
44protected:
45    virtual ~CameraSourceListener();
46
47private:
48    wp<CameraSource> mSource;
49
50    CameraSourceListener(const CameraSourceListener &);
51    CameraSourceListener &operator=(const CameraSourceListener &);
52};
53
54CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
55    : mSource(source) {
56}
57
58CameraSourceListener::~CameraSourceListener() {
59}
60
61void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
62    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
63}
64
65void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
66    LOGV("postData(%d, ptr:%p, size:%d)",
67         msgType, dataPtr->pointer(), dataPtr->size());
68}
69
70void CameraSourceListener::postDataTimestamp(
71        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
72
73    sp<CameraSource> source = mSource.promote();
74    if (source.get() != NULL) {
75        source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
76    }
77}
78
79static int32_t getColorFormat(const char* colorFormat) {
80    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
81       return OMX_COLOR_FormatYUV422SemiPlanar;
82    }
83
84    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
85        return OMX_COLOR_FormatYUV420SemiPlanar;
86    }
87
88    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
89        return OMX_COLOR_FormatYCbYCr;
90    }
91
92    if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
93       return OMX_COLOR_Format16bitRGB565;
94    }
95
96    CHECK_EQ(0, "Unknown color format");
97}
98
99// static
100CameraSource *CameraSource::Create() {
101    sp<Camera> camera = Camera::connect(0);
102
103    if (camera.get() == NULL) {
104        return NULL;
105    }
106
107    return new CameraSource(camera);
108}
109
110// static
111CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
112    if (camera.get() == NULL) {
113        return NULL;
114    }
115
116    return new CameraSource(camera);
117}
118
119CameraSource::CameraSource(const sp<Camera> &camera)
120    : mCamera(camera),
121      mFirstFrameTimeUs(0),
122      mLastFrameTimestampUs(0),
123      mNumFramesReceived(0),
124      mNumFramesEncoded(0),
125      mNumFramesDropped(0),
126      mCollectStats(false),
127      mStarted(false) {
128    String8 s = mCamera->getParameters();
129    printf("params: \"%s\"\n", s.string());
130
131    int32_t width, height, stride, sliceHeight;
132    CameraParameters params(s);
133    params.getPreviewSize(&width, &height);
134
135    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
136    CHECK(colorFormatStr != NULL);
137    int32_t colorFormat = getColorFormat(colorFormatStr);
138
139    // XXX: query camera for the stride and slice height
140    // when the capability becomes available.
141    stride = width;
142    sliceHeight = height;
143
144    mMeta = new MetaData;
145    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
146    mMeta->setInt32(kKeyColorFormat, colorFormat);
147    mMeta->setInt32(kKeyWidth, width);
148    mMeta->setInt32(kKeyHeight, height);
149    mMeta->setInt32(kKeyStride, stride);
150    mMeta->setInt32(kKeySliceHeight, sliceHeight);
151
152}
153
154CameraSource::~CameraSource() {
155    if (mStarted) {
156        stop();
157    }
158}
159
160status_t CameraSource::start(MetaData *) {
161    LOGV("start");
162    CHECK(!mStarted);
163
164    char value[PROPERTY_VALUE_MAX];
165    if (property_get("media.stagefright.record-stats", value, NULL)
166        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
167        mCollectStats = true;
168    }
169    mCamera->setListener(new CameraSourceListener(this));
170    CHECK_EQ(OK, mCamera->startRecording());
171
172    mStarted = true;
173    return OK;
174}
175
176status_t CameraSource::stop() {
177    LOGV("stop");
178    Mutex::Autolock autoLock(mLock);
179    mStarted = false;
180    mFrameAvailableCondition.signal();
181
182    mCamera->setListener(NULL);
183    mCamera->stopRecording();
184
185    releaseQueuedFrames();
186
187    while (!mFramesBeingEncoded.empty()) {
188        LOGI("Waiting for outstanding frames being encoded: %d",
189                mFramesBeingEncoded.size());
190        mFrameCompleteCondition.wait(mLock);
191    }
192
193    if (mCollectStats) {
194        LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
195                mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
196                mLastFrameTimestampUs - mFirstFrameTimeUs);
197    }
198
199    CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
200    return OK;
201}
202
203void CameraSource::releaseQueuedFrames() {
204    List<sp<IMemory> >::iterator it;
205    while (!mFramesReceived.empty()) {
206        it = mFramesReceived.begin();
207        mCamera->releaseRecordingFrame(*it);
208        mFramesReceived.erase(it);
209        ++mNumFramesDropped;
210    }
211}
212
213sp<MetaData> CameraSource::getFormat() {
214    return mMeta;
215}
216
217void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
218    LOGV("signalBufferReturned: %p", buffer->data());
219    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
220         it != mFramesBeingEncoded.end(); ++it) {
221        if ((*it)->pointer() ==  buffer->data()) {
222            mCamera->releaseRecordingFrame((*it));
223            mFramesBeingEncoded.erase(it);
224            ++mNumFramesEncoded;
225            buffer->setObserver(0);
226            buffer->release();
227            mFrameCompleteCondition.signal();
228            return;
229        }
230    }
231    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
232}
233
234status_t CameraSource::read(
235        MediaBuffer **buffer, const ReadOptions *options) {
236    LOGV("read");
237
238    *buffer = NULL;
239
240    int64_t seekTimeUs;
241    if (options && options->getSeekTo(&seekTimeUs)) {
242        return ERROR_UNSUPPORTED;
243    }
244
245    sp<IMemory> frame;
246    int64_t frameTime;
247
248    {
249        Mutex::Autolock autoLock(mLock);
250        while (mStarted && mFramesReceived.empty()) {
251            mFrameAvailableCondition.wait(mLock);
252        }
253        if (!mStarted) {
254            return OK;
255        }
256        frame = *mFramesReceived.begin();
257        mFramesReceived.erase(mFramesReceived.begin());
258
259        frameTime = *mFrameTimes.begin();
260        mFrameTimes.erase(mFrameTimes.begin());
261
262        mFramesBeingEncoded.push_back(frame);
263        *buffer = new MediaBuffer(frame->pointer(), frame->size());
264        (*buffer)->setObserver(this);
265        (*buffer)->add_ref();
266        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
267    }
268    return OK;
269}
270
271void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
272        int32_t msgType, const sp<IMemory> &data) {
273    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
274    Mutex::Autolock autoLock(mLock);
275    if (!mStarted) {
276        mCamera->releaseRecordingFrame(data);
277        ++mNumFramesReceived;
278        ++mNumFramesDropped;
279        return;
280    }
281
282    mLastFrameTimestampUs = timestampUs;
283    if (mNumFramesReceived == 0) {
284        mFirstFrameTimeUs = timestampUs;
285    }
286    ++mNumFramesReceived;
287
288    mFramesReceived.push_back(data);
289    mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs);
290    mFrameAvailableCondition.signal();
291}
292
293}  // namespace android
294