CameraSource.cpp revision 30ab66297501757d745b9ae10da61adcd891f497
1/*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <sys/time.h>
18
19#include <OMX_Component.h>
20
21#include <binder/IServiceManager.h>
22#include <cutils/properties.h> // for property_get
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/MediaDebug.h>
25#include <media/stagefright/MediaDefs.h>
26#include <media/stagefright/MediaErrors.h>
27#include <media/stagefright/MetaData.h>
28#include <ui/Camera.h>
29#include <ui/CameraParameters.h>
30#include <ui/GraphicBuffer.h>
31#include <ui/ISurface.h>
32#include <ui/Overlay.h>
33#include <utils/String8.h>
34
35namespace android {
36
37static int64_t getNowUs() {
38    struct timeval tv;
39    gettimeofday(&tv, NULL);
40
41    return (int64_t)tv.tv_usec + tv.tv_sec * 1000000;
42}
43
44struct DummySurface : public BnSurface {
45    DummySurface() {}
46
47    virtual sp<GraphicBuffer> requestBuffer(int bufferIdx, int usage) {
48        return NULL;
49    }
50
51    virtual status_t registerBuffers(const BufferHeap &buffers) {
52        return OK;
53    }
54
55    virtual void postBuffer(ssize_t offset) {}
56    virtual void unregisterBuffers() {}
57
58    virtual sp<OverlayRef> createOverlay(
59            uint32_t w, uint32_t h, int32_t format) {
60        return NULL;
61    }
62
63protected:
64    virtual ~DummySurface() {}
65
66    DummySurface(const DummySurface &);
67    DummySurface &operator=(const DummySurface &);
68};
69
70struct CameraSourceListener : public CameraListener {
71    CameraSourceListener(const sp<CameraSource> &source);
72
73    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
74    virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr);
75
76    virtual void postDataTimestamp(
77            nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
78
79protected:
80    virtual ~CameraSourceListener();
81
82private:
83    wp<CameraSource> mSource;
84
85    CameraSourceListener(const CameraSourceListener &);
86    CameraSourceListener &operator=(const CameraSourceListener &);
87};
88
89CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
90    : mSource(source) {
91}
92
93CameraSourceListener::~CameraSourceListener() {
94}
95
96void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
97    LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
98}
99
100void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
101    LOGV("postData(%d, ptr:%p, size:%d)",
102         msgType, dataPtr->pointer(), dataPtr->size());
103
104    sp<CameraSource> source = mSource.promote();
105    if (source.get() != NULL) {
106        source->dataCallback(msgType, dataPtr);
107    }
108}
109
110void CameraSourceListener::postDataTimestamp(
111        nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
112    LOGV("postDataTimestamp(%lld, %d, ptr:%p, size:%d)",
113         timestamp, msgType, dataPtr->pointer(), dataPtr->size());
114}
115
116// static
117CameraSource *CameraSource::Create() {
118    sp<Camera> camera = Camera::connect();
119
120    if (camera.get() == NULL) {
121        return NULL;
122    }
123
124    return new CameraSource(camera);
125}
126
127// static
128CameraSource *CameraSource::CreateFromICamera(const sp<ICamera> &icamera) {
129    sp<Camera> camera = Camera::create(icamera);
130
131    if (camera.get() == NULL) {
132        return NULL;
133    }
134
135    return new CameraSource(camera);
136}
137
138CameraSource::CameraSource(const sp<Camera> &camera)
139    : mCamera(camera),
140      mWidth(0),
141      mHeight(0),
142      mFirstFrameTimeUs(0),
143      mNumFrames(0),
144      mStarted(false) {
145    char value[PROPERTY_VALUE_MAX];
146    if (property_get("ro.hardware", value, NULL) && !strcmp(value, "sholes")) {
147        // The hardware encoder(s) do not support yuv420, but only YCbYCr,
148        // fortunately the camera also supports this, so we needn't transcode.
149        mCamera->setParameters(String8("preview-format=yuv422i-yuyv"));
150    }
151
152    String8 s = mCamera->getParameters();
153    printf("params: \"%s\"\n", s.string());
154
155    CameraParameters params(s);
156    params.getPreviewSize(&mWidth, &mHeight);
157}
158
159CameraSource::~CameraSource() {
160    if (mStarted) {
161        stop();
162    }
163}
164
165void CameraSource::setPreviewSurface(const sp<ISurface> &surface) {
166    mPreviewSurface = surface;
167}
168
169status_t CameraSource::start(MetaData *) {
170    CHECK(!mStarted);
171
172    mCamera->setListener(new CameraSourceListener(this));
173
174    status_t err =
175        mCamera->setPreviewDisplay(
176                mPreviewSurface != NULL ? mPreviewSurface : new DummySurface);
177    CHECK_EQ(err, OK);
178
179    mCamera->setPreviewCallbackFlags(
180            FRAME_CALLBACK_FLAG_ENABLE_MASK
181            | FRAME_CALLBACK_FLAG_COPY_OUT_MASK);
182
183    err = mCamera->startPreview();
184    CHECK_EQ(err, OK);
185
186    mStarted = true;
187
188    return OK;
189}
190
191status_t CameraSource::stop() {
192    CHECK(mStarted);
193
194    mCamera->stopPreview();
195
196    mStarted = false;
197
198    return OK;
199}
200
201sp<MetaData> CameraSource::getFormat() {
202    sp<MetaData> meta = new MetaData;
203    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
204    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar);
205    meta->setInt32(kKeyWidth, mWidth);
206    meta->setInt32(kKeyHeight, mHeight);
207
208    return meta;
209}
210
211status_t CameraSource::read(
212        MediaBuffer **buffer, const ReadOptions *options) {
213    CHECK(mStarted);
214
215    *buffer = NULL;
216
217    int64_t seekTimeUs;
218    if (options && options->getSeekTo(&seekTimeUs)) {
219        return ERROR_UNSUPPORTED;
220    }
221
222    sp<IMemory> frame;
223    int64_t frameTime;
224
225    {
226        Mutex::Autolock autoLock(mLock);
227        while (mFrames.empty()) {
228            mFrameAvailableCondition.wait(mLock);
229        }
230
231        frame = *mFrames.begin();
232        mFrames.erase(mFrames.begin());
233
234        frameTime = *mFrameTimes.begin();
235        mFrameTimes.erase(mFrameTimes.begin());
236    }
237
238    *buffer = new MediaBuffer(frame->size());
239    memcpy((*buffer)->data(), frame->pointer(), frame->size());
240    (*buffer)->set_range(0, frame->size());
241
242    (*buffer)->meta_data()->clear();
243    (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
244
245    return OK;
246}
247
248void CameraSource::dataCallback(int32_t msgType, const sp<IMemory> &data) {
249    Mutex::Autolock autoLock(mLock);
250
251    int64_t nowUs = getNowUs();
252    if (mNumFrames == 0) {
253        mFirstFrameTimeUs = nowUs;
254    }
255    ++mNumFrames;
256
257    mFrames.push_back(data);
258    mFrameTimes.push_back(nowUs - mFirstFrameTimeUs);
259    mFrameAvailableCondition.signal();
260}
261
262}  // namespace android
263