CameraSource.cpp revision 653252be963c07c99109d20f942d1f30c52a9360
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/MediaDebug.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <utils/String8.h> 31 32namespace android { 33 34struct CameraSourceListener : public CameraListener { 35 CameraSourceListener(const sp<CameraSource> &source); 36 37 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 38 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr); 39 40 virtual void postDataTimestamp( 41 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 42 43protected: 44 virtual ~CameraSourceListener(); 45 46private: 47 wp<CameraSource> mSource; 48 49 CameraSourceListener(const CameraSourceListener &); 50 CameraSourceListener &operator=(const CameraSourceListener &); 51}; 52 53CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 54 : mSource(source) { 55} 56 57CameraSourceListener::~CameraSourceListener() { 58} 59 60void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 61 LOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 62} 63 64void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { 65 LOGV("postData(%d, ptr:%p, size:%d)", 66 msgType, dataPtr->pointer(), dataPtr->size()); 67} 68 69void CameraSourceListener::postDataTimestamp( 70 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 71 72 sp<CameraSource> source = mSource.promote(); 73 if (source.get() != NULL) { 74 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 75 } 76} 77 78static int32_t getColorFormat(const char* colorFormat) { 79 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { 80 return OMX_COLOR_FormatYUV422SemiPlanar; 81 } 82 83 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 84 return OMX_COLOR_FormatYUV420SemiPlanar; 85 } 86 87 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) { 88 return OMX_COLOR_FormatYCbYCr; 89 } 90 91 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 92 return OMX_COLOR_Format16bitRGB565; 93 } 94 95 CHECK_EQ(0, "Unknown color format"); 96} 97 98// static 99CameraSource *CameraSource::Create() { 100 sp<Camera> camera = Camera::connect(0); 101 102 if (camera.get() == NULL) { 103 return NULL; 104 } 105 106 return new CameraSource(camera); 107} 108 109// static 110CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) { 111 if (camera.get() == NULL) { 112 return NULL; 113 } 114 115 return new CameraSource(camera); 116} 117 118CameraSource::CameraSource(const sp<Camera> &camera) 119 : mCamera(camera), 120 mFirstFrameTimeUs(0), 121 mLastFrameTimestampUs(0), 122 mNumFramesReceived(0), 123 mNumFramesEncoded(0), 124 mNumFramesDropped(0), 125 mStarted(false) { 126 String8 s = mCamera->getParameters(); 127 printf("params: \"%s\"\n", s.string()); 128 129 int32_t width, height; 130 CameraParameters params(s); 131 params.getPreviewSize(&width, &height); 132 133 const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT); 134 CHECK(colorFormatStr != NULL); 135 int32_t colorFormat = getColorFormat(colorFormatStr); 136 137 mMeta = new MetaData; 138 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 139 mMeta->setInt32(kKeyColorFormat, colorFormat); 140 mMeta->setInt32(kKeyWidth, width); 141 mMeta->setInt32(kKeyHeight, height); 142} 143 144CameraSource::~CameraSource() { 145 if (mStarted) { 146 stop(); 147 } 148} 149 150status_t CameraSource::start(MetaData *) { 151 LOGV("start"); 152 CHECK(!mStarted); 153 154 mCamera->setListener(new CameraSourceListener(this)); 155 CHECK_EQ(OK, mCamera->startRecording()); 156 157 mStarted = true; 158 return OK; 159} 160 161status_t CameraSource::stop() { 162 LOGV("stop"); 163 Mutex::Autolock autoLock(mLock); 164 mStarted = false; 165 mFrameAvailableCondition.signal(); 166 mCamera->setListener(NULL); 167 mCamera->stopRecording(); 168 169 releaseQueuedFrames(); 170 171 while (!mFramesBeingEncoded.empty()) { 172 LOGI("Number of outstanding frames is being encoded: %d", mFramesBeingEncoded.size()); 173 mFrameCompleteCondition.wait(mLock); 174 } 175 176 LOGI("Frames received/encoded/dropped: %d/%d/%d, timestamp (us) last/first: %lld/%lld", 177 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 178 mLastFrameTimestampUs, mFirstFrameTimeUs); 179 180 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 181 return OK; 182} 183 184void CameraSource::releaseQueuedFrames() { 185 List<sp<IMemory> >::iterator it; 186 while (!mFramesReceived.empty()) { 187 it = mFramesReceived.begin(); 188 mCamera->releaseRecordingFrame(*it); 189 mFramesReceived.erase(it); 190 ++mNumFramesDropped; 191 } 192} 193 194sp<MetaData> CameraSource::getFormat() { 195 return mMeta; 196} 197 198void CameraSource::signalBufferReturned(MediaBuffer *buffer) { 199 LOGV("signalBufferReturned: %p", buffer->data()); 200 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); 201 it != mFramesBeingEncoded.end(); ++it) { 202 if ((*it)->pointer() == buffer->data()) { 203 mCamera->releaseRecordingFrame((*it)); 204 mFramesBeingEncoded.erase(it); 205 ++mNumFramesEncoded; 206 buffer->setObserver(0); 207 buffer->release(); 208 mFrameCompleteCondition.signal(); 209 return; 210 } 211 } 212 CHECK_EQ(0, "signalBufferReturned: bogus buffer"); 213} 214 215status_t CameraSource::read( 216 MediaBuffer **buffer, const ReadOptions *options) { 217 LOGV("read"); 218 219 *buffer = NULL; 220 221 int64_t seekTimeUs; 222 if (options && options->getSeekTo(&seekTimeUs)) { 223 return ERROR_UNSUPPORTED; 224 } 225 226 sp<IMemory> frame; 227 int64_t frameTime; 228 229 { 230 Mutex::Autolock autoLock(mLock); 231 while (mStarted && mFramesReceived.empty()) { 232 mFrameAvailableCondition.wait(mLock); 233 } 234 if (!mStarted) { 235 return OK; 236 } 237 frame = *mFramesReceived.begin(); 238 mFramesReceived.erase(mFramesReceived.begin()); 239 240 frameTime = *mFrameTimes.begin(); 241 mFrameTimes.erase(mFrameTimes.begin()); 242 243 mFramesBeingEncoded.push_back(frame); 244 *buffer = new MediaBuffer(frame->pointer(), frame->size()); 245 (*buffer)->setObserver(this); 246 (*buffer)->add_ref(); 247 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 248 } 249 return OK; 250} 251 252void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 253 int32_t msgType, const sp<IMemory> &data) { 254 LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 255 mLastFrameTimestampUs = timestampUs; 256 Mutex::Autolock autoLock(mLock); 257 if (!mStarted) { 258 mCamera->releaseRecordingFrame(data); 259 ++mNumFramesReceived; 260 ++mNumFramesDropped; 261 return; 262 } 263 264 if (mNumFramesReceived == 0) { 265 mFirstFrameTimeUs = timestampUs; 266 } 267 ++mNumFramesReceived; 268 269 mFramesReceived.push_back(data); 270 mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs); 271 mFrameAvailableCondition.signal(); 272} 273 274} // namespace android 275