CameraSourceTimeLapse.cpp revision 7dbfa5a98c9f396ea63d0f322c3fa11df91d17a2
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
27#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <ui/Rect.h>
32#include <utils/String8.h>
33#include <utils/Vector.h>
34#include "OMX_Video.h"
35#include <limits.h>
36
37namespace android {
38
39// static
40CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse,
41        int64_t timeBetweenTimeLapseFrameCaptureUs,
42        int32_t width, int32_t height,
43        int32_t videoFrameRate) {
44    sp<Camera> camera = Camera::connect(0);
45
46    if (camera.get() == NULL) {
47        return NULL;
48    }
49
50    return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
51            timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate);
52}
53
54// static
55CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
56        bool useStillCameraForTimeLapse,
57        int64_t timeBetweenTimeLapseFrameCaptureUs,
58        int32_t width, int32_t height,
59        int32_t videoFrameRate) {
60    if (camera.get() == NULL) {
61        return NULL;
62    }
63
64    return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
65            timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate);
66}
67
68CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
69        bool useStillCameraForTimeLapse,
70        int64_t timeBetweenTimeLapseFrameCaptureUs,
71        int32_t width, int32_t height,
72        int32_t videoFrameRate)
73    : CameraSource(camera),
74      mUseStillCameraForTimeLapse(useStillCameraForTimeLapse),
75      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
76      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
77      mLastTimeLapseFrameRealTimestampUs(0),
78      mSkipCurrentFrame(false) {
79
80    LOGV("starting time lapse mode");
81    mVideoWidth = width;
82    mVideoHeight = height;
83    if (mUseStillCameraForTimeLapse) {
84        CHECK(setPictureSizeToClosestSupported(width, height));
85        mNeedCropping = computeCropRectangleOffset();
86        mMeta->setInt32(kKeyWidth, width);
87        mMeta->setInt32(kKeyHeight, height);
88    }
89}
90
91CameraSourceTimeLapse::~CameraSourceTimeLapse() {
92}
93
94bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
95    int64_t token = IPCThreadState::self()->clearCallingIdentity();
96    String8 s = mCamera->getParameters();
97    IPCThreadState::self()->restoreCallingIdentity(token);
98
99    CameraParameters params(s);
100    Vector<Size> supportedSizes;
101    params.getSupportedPictureSizes(supportedSizes);
102
103    int32_t minPictureSize = INT_MAX;
104    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
105        int32_t pictureWidth = supportedSizes[i].width;
106        int32_t pictureHeight = supportedSizes[i].height;
107
108        if ((pictureWidth >= width) && (pictureHeight >= height)) {
109            int32_t pictureSize = pictureWidth*pictureHeight;
110            if (pictureSize < minPictureSize) {
111                minPictureSize = pictureSize;
112                mPictureWidth = pictureWidth;
113                mPictureHeight = pictureHeight;
114            }
115        }
116    }
117    LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
118    return (minPictureSize != INT_MAX);
119}
120
121bool CameraSourceTimeLapse::computeCropRectangleOffset() {
122    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
123        return false;
124    }
125
126    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
127
128    int32_t widthDifference = mPictureWidth - mVideoWidth;
129    int32_t heightDifference = mPictureHeight - mVideoHeight;
130
131    mCropRectStartX = widthDifference/2;
132    mCropRectStartY = heightDifference/2;
133
134    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
135
136    return true;
137}
138
139// static
140void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
141    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
142    source->threadTimeLapseEntry();
143    return NULL;
144}
145
146void CameraSourceTimeLapse::threadTimeLapseEntry() {
147    while(mStarted) {
148        if (mCameraIdle) {
149            LOGV("threadTimeLapseEntry: taking picture");
150            CHECK_EQ(OK, mCamera->takePicture());
151            mCameraIdle = false;
152            usleep(mTimeBetweenTimeLapseFrameCaptureUs);
153        } else {
154            LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little.");
155            usleep(1E4);
156        }
157    }
158}
159
160void CameraSourceTimeLapse::startCameraRecording() {
161    if (mUseStillCameraForTimeLapse) {
162        LOGV("start time lapse recording using still camera");
163
164        int64_t token = IPCThreadState::self()->clearCallingIdentity();
165        String8 s = mCamera->getParameters();
166        IPCThreadState::self()->restoreCallingIdentity(token);
167
168        CameraParameters params(s);
169        params.setPictureSize(mPictureWidth, mPictureHeight);
170        mCamera->setParameters(params.flatten());
171        mCameraIdle = true;
172
173        // create a thread which takes pictures in a loop
174        pthread_attr_t attr;
175        pthread_attr_init(&attr);
176        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
177
178        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
179        pthread_attr_destroy(&attr);
180    } else {
181        LOGV("start time lapse recording using video camera");
182        CHECK_EQ(OK, mCamera->startRecording());
183    }
184}
185
186void CameraSourceTimeLapse::stopCameraRecording() {
187    if (mUseStillCameraForTimeLapse) {
188        void *dummy;
189        pthread_join(mThreadTimeLapse, &dummy);
190    } else {
191        mCamera->stopRecording();
192    }
193}
194
195void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
196    if (!mUseStillCameraForTimeLapse) {
197        mCamera->releaseRecordingFrame(frame);
198    }
199}
200
201sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
202    size_t source_size = source_data->size();
203    void* source_pointer = source_data->pointer();
204
205    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
206    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
207    memcpy(newMemory->pointer(), source_pointer, source_size);
208    return newMemory;
209}
210
211// Allocates IMemory of final type MemoryBase with the given size.
212sp<IMemory> allocateIMemory(size_t size) {
213    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
214    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
215    return newMemory;
216}
217
218// static
219void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
220    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
221    source->threadStartPreview();
222    return NULL;
223}
224
225void CameraSourceTimeLapse::threadStartPreview() {
226    CHECK_EQ(OK, mCamera->startPreview());
227    mCameraIdle = true;
228}
229
230void CameraSourceTimeLapse::restartPreview() {
231    // Start this in a different thread, so that the dataCallback can return
232    LOGV("restartPreview");
233    pthread_attr_t attr;
234    pthread_attr_init(&attr);
235    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
236
237    pthread_t threadPreview;
238    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
239    pthread_attr_destroy(&attr);
240}
241
242sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
243    // find the YUV format
244    int32_t srcFormat;
245    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
246    YUVImage::YUVFormat yuvFormat;
247    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
248        yuvFormat = YUVImage::YUV420SemiPlanar;
249    } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
250        yuvFormat = YUVImage::YUV420Planar;
251    }
252
253    // allocate memory for cropped image and setup a canvas using it.
254    sp<IMemory> croppedImageMemory = allocateIMemory(
255            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
256    YUVImage yuvImageCropped(yuvFormat,
257            mVideoWidth, mVideoHeight,
258            (uint8_t *)croppedImageMemory->pointer());
259    YUVCanvas yuvCanvasCrop(yuvImageCropped);
260
261    YUVImage yuvImageSource(yuvFormat,
262            mPictureWidth, mPictureHeight,
263            (uint8_t *)source_data->pointer());
264    yuvCanvasCrop.CopyImageRect(
265            Rect(mCropRectStartX, mCropRectStartY,
266                mCropRectStartX + mVideoWidth,
267                mCropRectStartY + mVideoHeight),
268            0, 0,
269            yuvImageSource);
270
271    return croppedImageMemory;
272}
273
274void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
275    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
276        // takePicture will complete after this callback, so restart preview.
277        restartPreview();
278        return;
279    }
280    if (msgType != CAMERA_MSG_RAW_IMAGE) {
281        return;
282    }
283
284    LOGV("dataCallback for timelapse still frame");
285    CHECK_EQ(true, mUseStillCameraForTimeLapse);
286
287    int64_t timestampUs;
288    if (mNumFramesReceived == 0) {
289        timestampUs = mStartTimeUs;
290    } else {
291        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
292    }
293
294    if (mNeedCropping) {
295        sp<IMemory> croppedImageData = cropYUVImage(data);
296        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
297    } else {
298        sp<IMemory> dataCopy = createIMemoryCopy(data);
299        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
300    }
301}
302
303bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
304    if (mSkipCurrentFrame) {
305        mSkipCurrentFrame = false;
306        return true;
307    } else {
308        return false;
309    }
310}
311
312bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
313    if (!mUseStillCameraForTimeLapse) {
314        if (mLastTimeLapseFrameRealTimestampUs == 0) {
315            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
316            // to current time (timestampUs) and save frame data.
317            LOGV("dataCallbackTimestamp timelapse: initial frame");
318
319            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
320        } else if (*timestampUs <
321                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
322            // Skip all frames from last encoded frame until
323            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
324            // Tell the camera to release its recording frame and return.
325            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
326            return true;
327        } else {
328            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
329            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
330            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
331            // of the last encoded frame's time stamp.
332            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
333
334            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
335            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
336        }
337    }
338    return false;
339}
340
341void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
342            const sp<IMemory> &data) {
343    if (!mUseStillCameraForTimeLapse) {
344        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
345    }
346    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
347}
348
349}  // namespace android
350