CameraSourceTimeLapse.cpp revision c4e47d1e81c4e4403663cb911e98dbf3ada9942c
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
27#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <ui/Rect.h>
32#include <utils/String8.h>
33#include "OMX_Video.h"
34
35namespace android {
36
37// static
38CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse,
39        int64_t timeBetweenTimeLapseFrameCaptureUs,
40        int32_t width, int32_t height,
41        int32_t videoFrameRate) {
42    sp<Camera> camera = Camera::connect(0);
43
44    if (camera.get() == NULL) {
45        return NULL;
46    }
47
48    return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
49            timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate);
50}
51
52// static
53CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
54        bool useStillCameraForTimeLapse,
55        int64_t timeBetweenTimeLapseFrameCaptureUs,
56        int32_t width, int32_t height,
57        int32_t videoFrameRate) {
58    if (camera.get() == NULL) {
59        return NULL;
60    }
61
62    return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
63            timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate);
64}
65
66CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
67        bool useStillCameraForTimeLapse,
68        int64_t timeBetweenTimeLapseFrameCaptureUs,
69        int32_t width, int32_t height,
70        int32_t videoFrameRate)
71    : CameraSource(camera),
72      mUseStillCameraForTimeLapse(useStillCameraForTimeLapse),
73      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
74      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
75      mLastTimeLapseFrameRealTimestampUs(0),
76      mSkipCurrentFrame(false) {
77
78    LOGV("starting time lapse mode");
79    mVideoWidth = width;
80    mVideoHeight = height;
81    if (mUseStillCameraForTimeLapse) {
82        setPictureSizeToClosestSupported(width, height);
83        mNeedCropping = computeCropRectangleOffset();
84        mMeta->setInt32(kKeyWidth, width);
85        mMeta->setInt32(kKeyHeight, height);
86    }
87}
88
89CameraSourceTimeLapse::~CameraSourceTimeLapse() {
90}
91
92void CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
93    // TODO: Currently fixed to the highest resolution.
94    // Need to poll the camera and set accordingly.
95    mPictureWidth = 2048;
96    mPictureHeight = 1536;
97}
98
99bool CameraSourceTimeLapse::computeCropRectangleOffset() {
100    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
101        return false;
102    }
103
104    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
105
106    int32_t widthDifference = mPictureWidth - mVideoWidth;
107    int32_t heightDifference = mPictureHeight - mVideoHeight;
108
109    mCropRectStartX = widthDifference/2;
110    mCropRectStartY = heightDifference/2;
111
112    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
113
114    return true;
115}
116
117// static
118void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
119    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
120    source->threadTimeLapseEntry();
121    return NULL;
122}
123
124void CameraSourceTimeLapse::threadTimeLapseEntry() {
125    while(mStarted) {
126        if (mCameraIdle) {
127            LOGV("threadTimeLapseEntry: taking picture");
128            CHECK_EQ(OK, mCamera->takePicture());
129            mCameraIdle = false;
130            sleep(mTimeBetweenTimeLapseFrameCaptureUs/1E6);
131        } else {
132            LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little.");
133            sleep(.01);
134        }
135    }
136}
137
138void CameraSourceTimeLapse::startCameraRecording() {
139    if (mUseStillCameraForTimeLapse) {
140        LOGV("start time lapse recording using still camera");
141
142        int64_t token = IPCThreadState::self()->clearCallingIdentity();
143        String8 s = mCamera->getParameters();
144        IPCThreadState::self()->restoreCallingIdentity(token);
145
146        CameraParameters params(s);
147        params.setPictureSize(mPictureWidth, mPictureHeight);
148        mCamera->setParameters(params.flatten());
149        mCameraIdle = true;
150
151        // create a thread which takes pictures in a loop
152        pthread_attr_t attr;
153        pthread_attr_init(&attr);
154        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
155
156        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
157        pthread_attr_destroy(&attr);
158    } else {
159        LOGV("start time lapse recording using video camera");
160        CHECK_EQ(OK, mCamera->startRecording());
161    }
162}
163
164void CameraSourceTimeLapse::stopCameraRecording() {
165    if (mUseStillCameraForTimeLapse) {
166        void *dummy;
167        pthread_join(mThreadTimeLapse, &dummy);
168    } else {
169        mCamera->stopRecording();
170    }
171}
172
173void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
174    if (!mUseStillCameraForTimeLapse) {
175        mCamera->releaseRecordingFrame(frame);
176    }
177}
178
179sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
180    size_t source_size = source_data->size();
181    void* source_pointer = source_data->pointer();
182
183    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
184    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
185    memcpy(newMemory->pointer(), source_pointer, source_size);
186    return newMemory;
187}
188
189// Allocates IMemory of final type MemoryBase with the given size.
190sp<IMemory> allocateIMemory(size_t size) {
191    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
192    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
193    return newMemory;
194}
195
196// static
197void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
198    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
199    source->threadStartPreview();
200    return NULL;
201}
202
203void CameraSourceTimeLapse::threadStartPreview() {
204    CHECK_EQ(OK, mCamera->startPreview());
205    mCameraIdle = true;
206}
207
208void CameraSourceTimeLapse::restartPreview() {
209    // Start this in a different thread, so that the dataCallback can return
210    LOGV("restartPreview");
211    pthread_attr_t attr;
212    pthread_attr_init(&attr);
213    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
214
215    pthread_t threadPreview;
216    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
217    pthread_attr_destroy(&attr);
218}
219
220sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
221    // find the YUV format
222    int32_t srcFormat;
223    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
224    YUVImage::YUVFormat yuvFormat;
225    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
226        yuvFormat = YUVImage::YUV420SemiPlanar;
227    } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
228        yuvFormat = YUVImage::YUV420Planar;
229    }
230
231    // allocate memory for cropped image and setup a canvas using it.
232    sp<IMemory> croppedImageMemory = allocateIMemory(
233            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
234    YUVImage yuvImageCropped(yuvFormat,
235            mVideoWidth, mVideoHeight,
236            (uint8_t *)croppedImageMemory->pointer());
237    YUVCanvas yuvCanvasCrop(yuvImageCropped);
238
239    YUVImage yuvImageSource(yuvFormat,
240            mPictureWidth, mPictureHeight,
241            (uint8_t *)source_data->pointer());
242    yuvCanvasCrop.CopyImageRect(
243            Rect(mCropRectStartX, mCropRectStartY,
244                mCropRectStartX + mVideoWidth,
245                mCropRectStartY + mVideoHeight),
246            0, 0,
247            yuvImageSource);
248
249    return croppedImageMemory;
250}
251
252void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
253    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
254        // takePicture will complete after this callback, so restart preview.
255        restartPreview();
256        return;
257    }
258    if (msgType != CAMERA_MSG_RAW_IMAGE) {
259        return;
260    }
261
262    LOGV("dataCallback for timelapse still frame");
263    CHECK_EQ(true, mUseStillCameraForTimeLapse);
264
265    int64_t timestampUs;
266    if (mNumFramesReceived == 0) {
267        timestampUs = mStartTimeUs;
268    } else {
269        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
270    }
271
272    if (mNeedCropping) {
273        sp<IMemory> croppedImageData = cropYUVImage(data);
274        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
275    } else {
276        sp<IMemory> dataCopy = createIMemoryCopy(data);
277        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
278    }
279}
280
281bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
282    if (mSkipCurrentFrame) {
283        mSkipCurrentFrame = false;
284        return true;
285    } else {
286        return false;
287    }
288}
289
290bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
291    if (!mUseStillCameraForTimeLapse) {
292        if (mLastTimeLapseFrameRealTimestampUs == 0) {
293            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
294            // to current time (timestampUs) and save frame data.
295            LOGV("dataCallbackTimestamp timelapse: initial frame");
296
297            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
298        } else if (*timestampUs <
299                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
300            // Skip all frames from last encoded frame until
301            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
302            // Tell the camera to release its recording frame and return.
303            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
304            return true;
305        } else {
306            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
307            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
308            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
309            // of the last encoded frame's time stamp.
310            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
311
312            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
313            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
314        }
315    }
316    return false;
317}
318
319void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
320            const sp<IMemory> &data) {
321    if (!mUseStillCameraForTimeLapse) {
322        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
323    }
324    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
325}
326
327}  // namespace android
328