CameraSource.cpp revision cbe48a0678729eb863b259b4744e6ad12faf0475
1/* 2 * Copyright (C) 2009 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "CameraSource" 19#include <utils/Log.h> 20 21#include <OMX_Component.h> 22 23#include <media/stagefright/CameraSource.h> 24#include <media/stagefright/MediaDebug.h> 25#include <media/stagefright/MediaDefs.h> 26#include <media/stagefright/MediaErrors.h> 27#include <media/stagefright/MetaData.h> 28#include <camera/Camera.h> 29#include <camera/CameraParameters.h> 30#include <utils/String8.h> 31 32namespace android { 33 34struct CameraSourceListener : public CameraListener { 35 CameraSourceListener(const sp<CameraSource> &source); 36 37 virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2); 38 virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr); 39 40 virtual void postDataTimestamp( 41 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr); 42 43protected: 44 virtual ~CameraSourceListener(); 45 46private: 47 wp<CameraSource> mSource; 48 49 CameraSourceListener(const CameraSourceListener &); 50 CameraSourceListener &operator=(const CameraSourceListener &); 51}; 52 53CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source) 54 : mSource(source) { 55} 56 57CameraSourceListener::~CameraSourceListener() { 58} 59 60void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { 61 LOGV("notify(%d, %d, %d)", msgType, ext1, ext2); 62} 63 64void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { 65 LOGV("postData(%d, ptr:%p, size:%d)", 66 msgType, dataPtr->pointer(), dataPtr->size()); 67} 68 69void CameraSourceListener::postDataTimestamp( 70 nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) { 71 72 sp<CameraSource> source = mSource.promote(); 73 if (source.get() != NULL) { 74 source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr); 75 } 76} 77 78// static 79CameraSource *CameraSource::Create() { 80 sp<Camera> camera = Camera::connect(0); 81 82 if (camera.get() == NULL) { 83 return NULL; 84 } 85 86 return new CameraSource(camera); 87} 88 89// static 90CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) { 91 if (camera.get() == NULL) { 92 return NULL; 93 } 94 95 return new CameraSource(camera); 96} 97 98CameraSource::CameraSource(const sp<Camera> &camera) 99 : mCamera(camera), 100 mWidth(0), 101 mHeight(0), 102 mFirstFrameTimeUs(0), 103 mLastFrameTimestampUs(0), 104 mNumFramesReceived(0), 105 mNumFramesEncoded(0), 106 mNumFramesDropped(0), 107 mBufferGroup(NULL), 108 mStarted(false) { 109 String8 s = mCamera->getParameters(); 110 printf("params: \"%s\"\n", s.string()); 111 112 CameraParameters params(s); 113 params.getPreviewSize(&mWidth, &mHeight); 114} 115 116CameraSource::~CameraSource() { 117 if (mStarted) { 118 stop(); 119 } 120} 121 122static int bytesPerPixelTimes10(const char *colorFormat) { 123 LOGI("color format: %s", colorFormat); 124 return 20; 125#if 0 126 // XXX: Fix Camera Hal bug? 127 // On sholes, it returns CameraParameters::PIXEL_FORMAT_YUV420SP??? 128 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP) || 129 !strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I) || 130 !strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) { 131 return 20; 132 } else if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) { 133 return 15; 134 } 135 CHECK_EQ(0, "Unknown color format"); 136#endif 137} 138 139status_t CameraSource::start(MetaData *) { 140 LOGV("start"); 141 CHECK(!mStarted); 142 143 mCamera->setListener(new CameraSourceListener(this)); 144 CHECK_EQ(OK, mCamera->startRecording()); 145 146 mStarted = true; 147 mBufferGroup = new MediaBufferGroup(); 148 String8 s = mCamera->getParameters(); 149 CameraParameters params(s); 150 const char *colorFormat = params.getPreviewFormat(); 151 const int size = (mWidth * mHeight * bytesPerPixelTimes10(colorFormat))/10; 152 mBufferGroup->add_buffer(new MediaBuffer(size)); 153 154 return OK; 155} 156 157status_t CameraSource::stop() { 158 LOGV("stop"); 159 Mutex::Autolock autoLock(mLock); 160 mStarted = false; 161 mFrameAvailableCondition.signal(); 162 mCamera->setListener(NULL); 163 mCamera->stopRecording(); 164 165 releaseQueuedFrames(); 166 delete mBufferGroup; 167 mBufferGroup = NULL; 168 LOGI("Frames received/encoded/dropped: %d/%d/%d, timestamp (us) last/first: %lld/%lld", 169 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped, 170 mLastFrameTimestampUs, mFirstFrameTimeUs); 171 172 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); 173 return OK; 174} 175 176void CameraSource::releaseQueuedFrames() { 177 List<sp<IMemory> >::iterator it; 178 while (!mFrames.empty()) { 179 it = mFrames.begin(); 180 mCamera->releaseRecordingFrame(*it); 181 mFrames.erase(it); 182 ++mNumFramesDropped; 183 } 184} 185 186sp<MetaData> CameraSource::getFormat() { 187 sp<MetaData> meta = new MetaData; 188 meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); 189 meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); 190 meta->setInt32(kKeyWidth, mWidth); 191 meta->setInt32(kKeyHeight, mHeight); 192 193 return meta; 194} 195 196status_t CameraSource::read( 197 MediaBuffer **buffer, const ReadOptions *options) { 198 LOGV("read"); 199 200 *buffer = NULL; 201 202 int64_t seekTimeUs; 203 if (options && options->getSeekTo(&seekTimeUs)) { 204 return ERROR_UNSUPPORTED; 205 } 206 207 sp<IMemory> frame; 208 int64_t frameTime; 209 210 { 211 Mutex::Autolock autoLock(mLock); 212 while (mStarted && mFrames.empty()) { 213 mFrameAvailableCondition.wait(mLock); 214 } 215 if (!mStarted) { 216 return OK; 217 } 218 frame = *mFrames.begin(); 219 mFrames.erase(mFrames.begin()); 220 221 frameTime = *mFrameTimes.begin(); 222 mFrameTimes.erase(mFrameTimes.begin()); 223 ++mNumFramesEncoded; 224 } 225 226 mBufferGroup->acquire_buffer(buffer); 227 memcpy((*buffer)->data(), frame->pointer(), frame->size()); 228 (*buffer)->set_range(0, frame->size()); 229 mCamera->releaseRecordingFrame(frame); 230 231 (*buffer)->meta_data()->clear(); 232 (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); 233 234 return OK; 235} 236 237void CameraSource::dataCallbackTimestamp(int64_t timestampUs, 238 int32_t msgType, const sp<IMemory> &data) { 239 LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs); 240 mLastFrameTimestampUs = timestampUs; 241 Mutex::Autolock autoLock(mLock); 242 if (!mStarted) { 243 mCamera->releaseRecordingFrame(data); 244 ++mNumFramesReceived; 245 ++mNumFramesDropped; 246 return; 247 } 248 249 if (mNumFramesReceived == 0) { 250 mFirstFrameTimeUs = timestampUs; 251 } 252 ++mNumFramesReceived; 253 254 mFrames.push_back(data); 255 mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs); 256 mFrameAvailableCondition.signal(); 257} 258 259} // namespace android 260