CameraSourceTimeLapse.cpp revision 616715ab614ce25ea395a8e9a553a82637f1021d
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
27#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <ui/Rect.h>
32#include <utils/String8.h>
33#include <utils/Vector.h>
34#include "OMX_Video.h"
35#include <limits.h>
36
37namespace android {
38
39// static
40CameraSourceTimeLapse *CameraSourceTimeLapse::Create(
41        int64_t timeBetweenTimeLapseFrameCaptureUs,
42        int32_t width, int32_t height,
43        int32_t videoFrameRate) {
44    sp<Camera> camera = Camera::connect(0);
45
46    if (camera.get() == NULL) {
47        return NULL;
48    }
49
50    return new CameraSourceTimeLapse(camera, timeBetweenTimeLapseFrameCaptureUs,
51            width, height, videoFrameRate);
52}
53
54// static
55CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
56        int64_t timeBetweenTimeLapseFrameCaptureUs,
57        int32_t width, int32_t height,
58        int32_t videoFrameRate) {
59    if (camera.get() == NULL) {
60        return NULL;
61    }
62
63    return new CameraSourceTimeLapse(camera, timeBetweenTimeLapseFrameCaptureUs,
64            width, height, videoFrameRate);
65}
66
67CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
68        int64_t timeBetweenTimeLapseFrameCaptureUs,
69        int32_t width, int32_t height,
70        int32_t videoFrameRate)
71    : CameraSource(camera),
72      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
73      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
74      mLastTimeLapseFrameRealTimestampUs(0),
75      mSkipCurrentFrame(false) {
76
77    LOGV("starting time lapse mode");
78    mVideoWidth = width;
79    mVideoHeight = height;
80
81    if (trySettingPreviewSize(width, height)) {
82        mUseStillCameraForTimeLapse = false;
83    } else {
84        // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
85        // than the fastest rate at which the still camera can take pictures.
86        mUseStillCameraForTimeLapse = true;
87        CHECK(setPictureSizeToClosestSupported(width, height));
88        mNeedCropping = computeCropRectangleOffset();
89        mMeta->setInt32(kKeyWidth, width);
90        mMeta->setInt32(kKeyHeight, height);
91    }
92}
93
94CameraSourceTimeLapse::~CameraSourceTimeLapse() {
95}
96
97bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
98    int64_t token = IPCThreadState::self()->clearCallingIdentity();
99    String8 s = mCamera->getParameters();
100    IPCThreadState::self()->restoreCallingIdentity(token);
101
102    CameraParameters params(s);
103    Vector<Size> supportedSizes;
104    params.getSupportedPreviewSizes(supportedSizes);
105
106    bool previewSizeSupported = false;
107    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
108        int32_t pictureWidth = supportedSizes[i].width;
109        int32_t pictureHeight = supportedSizes[i].height;
110
111        if ((pictureWidth == width) && (pictureHeight == height)) {
112            previewSizeSupported = true;
113        }
114    }
115
116    if (previewSizeSupported) {
117        LOGV("Video size (%d, %d) is a supported preview size", width, height);
118        params.setPreviewSize(width, height);
119        CHECK(mCamera->setParameters(params.flatten()));
120        return true;
121    }
122
123    return false;
124}
125
126bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
127    int64_t token = IPCThreadState::self()->clearCallingIdentity();
128    String8 s = mCamera->getParameters();
129    IPCThreadState::self()->restoreCallingIdentity(token);
130
131    CameraParameters params(s);
132    Vector<Size> supportedSizes;
133    params.getSupportedPictureSizes(supportedSizes);
134
135    int32_t minPictureSize = INT_MAX;
136    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
137        int32_t pictureWidth = supportedSizes[i].width;
138        int32_t pictureHeight = supportedSizes[i].height;
139
140        if ((pictureWidth >= width) && (pictureHeight >= height)) {
141            int32_t pictureSize = pictureWidth*pictureHeight;
142            if (pictureSize < minPictureSize) {
143                minPictureSize = pictureSize;
144                mPictureWidth = pictureWidth;
145                mPictureHeight = pictureHeight;
146            }
147        }
148    }
149    LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
150    return (minPictureSize != INT_MAX);
151}
152
153bool CameraSourceTimeLapse::computeCropRectangleOffset() {
154    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
155        return false;
156    }
157
158    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
159
160    int32_t widthDifference = mPictureWidth - mVideoWidth;
161    int32_t heightDifference = mPictureHeight - mVideoHeight;
162
163    mCropRectStartX = widthDifference/2;
164    mCropRectStartY = heightDifference/2;
165
166    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
167
168    return true;
169}
170
171// static
172void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
173    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
174    source->threadTimeLapseEntry();
175    return NULL;
176}
177
178void CameraSourceTimeLapse::threadTimeLapseEntry() {
179    while(mStarted) {
180        if (mCameraIdle) {
181            LOGV("threadTimeLapseEntry: taking picture");
182            CHECK_EQ(OK, mCamera->takePicture());
183            mCameraIdle = false;
184            usleep(mTimeBetweenTimeLapseFrameCaptureUs);
185        } else {
186            LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little.");
187            usleep(1E4);
188        }
189    }
190}
191
192void CameraSourceTimeLapse::startCameraRecording() {
193    if (mUseStillCameraForTimeLapse) {
194        LOGV("start time lapse recording using still camera");
195
196        int64_t token = IPCThreadState::self()->clearCallingIdentity();
197        String8 s = mCamera->getParameters();
198        IPCThreadState::self()->restoreCallingIdentity(token);
199
200        CameraParameters params(s);
201        params.setPictureSize(mPictureWidth, mPictureHeight);
202        mCamera->setParameters(params.flatten());
203        mCameraIdle = true;
204
205        // disable shutter sound and play the recording sound.
206        mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
207        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
208
209        // create a thread which takes pictures in a loop
210        pthread_attr_t attr;
211        pthread_attr_init(&attr);
212        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
213
214        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
215        pthread_attr_destroy(&attr);
216    } else {
217        LOGV("start time lapse recording using video camera");
218        CHECK_EQ(OK, mCamera->startRecording());
219    }
220}
221
222void CameraSourceTimeLapse::stopCameraRecording() {
223    if (mUseStillCameraForTimeLapse) {
224        void *dummy;
225        pthread_join(mThreadTimeLapse, &dummy);
226
227        // play the recording sound and restart preview.
228        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
229        CHECK_EQ(OK, mCamera->startPreview());
230    } else {
231        mCamera->stopRecording();
232    }
233}
234
235void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
236    if (!mUseStillCameraForTimeLapse) {
237        mCamera->releaseRecordingFrame(frame);
238    }
239}
240
241sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
242    size_t source_size = source_data->size();
243    void* source_pointer = source_data->pointer();
244
245    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
246    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
247    memcpy(newMemory->pointer(), source_pointer, source_size);
248    return newMemory;
249}
250
251// Allocates IMemory of final type MemoryBase with the given size.
252sp<IMemory> allocateIMemory(size_t size) {
253    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
254    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
255    return newMemory;
256}
257
258// static
259void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
260    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
261    source->threadStartPreview();
262    return NULL;
263}
264
265void CameraSourceTimeLapse::threadStartPreview() {
266    CHECK_EQ(OK, mCamera->startPreview());
267    mCameraIdle = true;
268}
269
270void CameraSourceTimeLapse::restartPreview() {
271    // Start this in a different thread, so that the dataCallback can return
272    LOGV("restartPreview");
273    pthread_attr_t attr;
274    pthread_attr_init(&attr);
275    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
276
277    pthread_t threadPreview;
278    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
279    pthread_attr_destroy(&attr);
280}
281
282sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
283    // find the YUV format
284    int32_t srcFormat;
285    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
286    YUVImage::YUVFormat yuvFormat;
287    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
288        yuvFormat = YUVImage::YUV420SemiPlanar;
289    } else {
290        CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
291        yuvFormat = YUVImage::YUV420Planar;
292    }
293
294    // allocate memory for cropped image and setup a canvas using it.
295    sp<IMemory> croppedImageMemory = allocateIMemory(
296            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
297    YUVImage yuvImageCropped(yuvFormat,
298            mVideoWidth, mVideoHeight,
299            (uint8_t *)croppedImageMemory->pointer());
300    YUVCanvas yuvCanvasCrop(yuvImageCropped);
301
302    YUVImage yuvImageSource(yuvFormat,
303            mPictureWidth, mPictureHeight,
304            (uint8_t *)source_data->pointer());
305    yuvCanvasCrop.CopyImageRect(
306            Rect(mCropRectStartX, mCropRectStartY,
307                mCropRectStartX + mVideoWidth,
308                mCropRectStartY + mVideoHeight),
309            0, 0,
310            yuvImageSource);
311
312    return croppedImageMemory;
313}
314
315void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
316    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
317        // takePicture will complete after this callback, so restart preview.
318        restartPreview();
319        return;
320    }
321    if (msgType != CAMERA_MSG_RAW_IMAGE) {
322        return;
323    }
324
325    LOGV("dataCallback for timelapse still frame");
326    CHECK_EQ(true, mUseStillCameraForTimeLapse);
327
328    int64_t timestampUs;
329    if (mNumFramesReceived == 0) {
330        timestampUs = mStartTimeUs;
331    } else {
332        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
333    }
334
335    if (mNeedCropping) {
336        sp<IMemory> croppedImageData = cropYUVImage(data);
337        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
338    } else {
339        sp<IMemory> dataCopy = createIMemoryCopy(data);
340        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
341    }
342}
343
344bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
345    if (mSkipCurrentFrame) {
346        mSkipCurrentFrame = false;
347        return true;
348    } else {
349        return false;
350    }
351}
352
353bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
354    if (!mUseStillCameraForTimeLapse) {
355        if (mLastTimeLapseFrameRealTimestampUs == 0) {
356            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
357            // to current time (timestampUs) and save frame data.
358            LOGV("dataCallbackTimestamp timelapse: initial frame");
359
360            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
361        } else if (*timestampUs <
362                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
363            // Skip all frames from last encoded frame until
364            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
365            // Tell the camera to release its recording frame and return.
366            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
367            return true;
368        } else {
369            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
370            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
371            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
372            // of the last encoded frame's time stamp.
373            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
374
375            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
376            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
377        }
378    }
379    return false;
380}
381
382void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
383            const sp<IMemory> &data) {
384    if (!mUseStillCameraForTimeLapse) {
385        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
386    }
387    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
388}
389
390}  // namespace android
391