CameraSourceTimeLapse.cpp revision ba29002c7aee13c068049037cd14bba6a244da6b
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
27#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <ui/Rect.h>
32#include <utils/String8.h>
33#include <utils/Vector.h>
34#include "OMX_Video.h"
35#include <limits.h>
36
37namespace android {
38
39// static
40CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
41        const sp<ICamera> &camera,
42        int32_t cameraId,
43        Size videoSize,
44        int32_t videoFrameRate,
45        const sp<Surface>& surface,
46        int64_t timeBetweenTimeLapseFrameCaptureUs) {
47
48    CameraSourceTimeLapse *source = new
49            CameraSourceTimeLapse(camera, cameraId,
50                videoSize, videoFrameRate, surface,
51                timeBetweenTimeLapseFrameCaptureUs);
52
53    if (source != NULL) {
54        if (source->initCheck() != OK) {
55            delete source;
56            return NULL;
57        }
58    }
59    return source;
60}
61
62CameraSourceTimeLapse::CameraSourceTimeLapse(
63        const sp<ICamera>& camera,
64        int32_t cameraId,
65        Size videoSize,
66        int32_t videoFrameRate,
67        const sp<Surface>& surface,
68        int64_t timeBetweenTimeLapseFrameCaptureUs)
69    : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
70      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
71      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
72      mLastTimeLapseFrameRealTimestampUs(0),
73      mSkipCurrentFrame(false) {
74
75    LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs);
76    mVideoWidth = videoSize.width;
77    mVideoHeight = videoSize.height;
78
79    if (trySettingPreviewSize(videoSize.width, videoSize.height)) {
80        mUseStillCameraForTimeLapse = false;
81    } else {
82        // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
83        // than the fastest rate at which the still camera can take pictures.
84        mUseStillCameraForTimeLapse = true;
85        CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
86        mNeedCropping = computeCropRectangleOffset();
87        mMeta->setInt32(kKeyWidth, videoSize.width);
88        mMeta->setInt32(kKeyHeight, videoSize.height);
89    }
90
91    // Initialize quick stop variables.
92    mQuickStop = false;
93    mForceRead = false;
94    mLastReadBufferCopy = NULL;
95    mStopWaitingForIdleCamera = false;
96}
97
98CameraSourceTimeLapse::~CameraSourceTimeLapse() {
99}
100
101void CameraSourceTimeLapse::startQuickReadReturns() {
102    Mutex::Autolock autoLock(mQuickStopLock);
103    LOGV("Enabling quick read returns");
104
105    // Enable quick stop mode.
106    mQuickStop = true;
107
108    if (mUseStillCameraForTimeLapse) {
109        // wake up the thread right away.
110        mTakePictureCondition.signal();
111    } else {
112        // Force dataCallbackTimestamp() coming from the video camera to not skip the
113        // next frame as we want read() to get a get a frame right away.
114        mForceRead = true;
115    }
116}
117
118bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
119    LOGV("trySettingPreviewSize: %dx%d", width, height);
120    int64_t token = IPCThreadState::self()->clearCallingIdentity();
121    String8 s = mCamera->getParameters();
122
123    CameraParameters params(s);
124    Vector<Size> supportedSizes;
125    params.getSupportedPreviewSizes(supportedSizes);
126
127    bool previewSizeSupported = false;
128    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
129        int32_t pictureWidth = supportedSizes[i].width;
130        int32_t pictureHeight = supportedSizes[i].height;
131
132        if ((pictureWidth == width) && (pictureHeight == height)) {
133            previewSizeSupported = true;
134        }
135    }
136
137    bool isSuccessful = false;
138    if (previewSizeSupported) {
139        LOGV("Video size (%d, %d) is a supported preview size", width, height);
140        params.setPreviewSize(width, height);
141        if (mCamera->setParameters(params.flatten()) == OK) {
142            isSuccessful = true;
143        } else {
144            LOGE("Failed to set preview size to %dx%d", width, height);
145            isSuccessful = false;
146        }
147    }
148
149    IPCThreadState::self()->restoreCallingIdentity(token);
150    return isSuccessful;
151}
152
153bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
154    LOGV("setPictureSizeToClosestSupported: %dx%d", width, height);
155    int64_t token = IPCThreadState::self()->clearCallingIdentity();
156    String8 s = mCamera->getParameters();
157    IPCThreadState::self()->restoreCallingIdentity(token);
158
159    CameraParameters params(s);
160    Vector<Size> supportedSizes;
161    params.getSupportedPictureSizes(supportedSizes);
162
163    int32_t minPictureSize = INT_MAX;
164    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
165        int32_t pictureWidth = supportedSizes[i].width;
166        int32_t pictureHeight = supportedSizes[i].height;
167
168        if ((pictureWidth >= width) && (pictureHeight >= height)) {
169            int32_t pictureSize = pictureWidth*pictureHeight;
170            if (pictureSize < minPictureSize) {
171                minPictureSize = pictureSize;
172                mPictureWidth = pictureWidth;
173                mPictureHeight = pictureHeight;
174            }
175        }
176    }
177    LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
178    return (minPictureSize != INT_MAX);
179}
180
181bool CameraSourceTimeLapse::computeCropRectangleOffset() {
182    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
183        return false;
184    }
185
186    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
187
188    int32_t widthDifference = mPictureWidth - mVideoWidth;
189    int32_t heightDifference = mPictureHeight - mVideoHeight;
190
191    mCropRectStartX = widthDifference/2;
192    mCropRectStartY = heightDifference/2;
193
194    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
195
196    return true;
197}
198
199void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
200    Mutex::Autolock autoLock(mQuickStopLock);
201    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
202        buffer->setObserver(NULL);
203        buffer->release();
204    } else {
205        return CameraSource::signalBufferReturned(buffer);
206    }
207}
208
209void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
210    size_t sourceSize = sourceBuffer.size();
211    void* sourcePointer = sourceBuffer.data();
212
213    (*newBuffer) = new MediaBuffer(sourceSize);
214    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
215
216    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
217}
218
219void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
220    int64_t frameTime;
221    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
222    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
223    mLastReadBufferCopy->add_ref();
224    mLastReadBufferCopy->setObserver(this);
225}
226
227status_t CameraSourceTimeLapse::read(
228        MediaBuffer **buffer, const ReadOptions *options) {
229    if (mLastReadBufferCopy == NULL) {
230        mLastReadStatus = CameraSource::read(buffer, options);
231
232        // mQuickStop may have turned to true while read was blocked. Make a copy of
233        // the buffer in that case.
234        Mutex::Autolock autoLock(mQuickStopLock);
235        if (mQuickStop && *buffer) {
236            fillLastReadBufferCopy(**buffer);
237        }
238        return mLastReadStatus;
239    } else {
240        (*buffer) = mLastReadBufferCopy;
241        (*buffer)->add_ref();
242        return mLastReadStatus;
243    }
244}
245
246// static
247void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
248    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
249    source->threadTimeLapseEntry();
250    return NULL;
251}
252
253void CameraSourceTimeLapse::threadTimeLapseEntry() {
254    while (mStarted) {
255        {
256            Mutex::Autolock autoLock(mCameraIdleLock);
257            if (!mCameraIdle) {
258                mCameraIdleCondition.wait(mCameraIdleLock);
259            }
260            CHECK(mCameraIdle);
261            mCameraIdle = false;
262        }
263
264        // Even if mQuickStop == true we need to take one more picture
265        // as a read() may be blocked, waiting for a frame to get available.
266        // After this takePicture, if mQuickStop == true, we can safely exit
267        // this thread as read() will make a copy of this last frame and keep
268        // returning it in the quick stop mode.
269        Mutex::Autolock autoLock(mQuickStopLock);
270        CHECK_EQ(OK, mCamera->takePicture());
271        if (mQuickStop) {
272            LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
273            return;
274        }
275        mTakePictureCondition.waitRelative(mQuickStopLock,
276                mTimeBetweenTimeLapseFrameCaptureUs * 1000);
277    }
278    LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
279}
280
281void CameraSourceTimeLapse::startCameraRecording() {
282    if (mUseStillCameraForTimeLapse) {
283        LOGV("start time lapse recording using still camera");
284
285        int64_t token = IPCThreadState::self()->clearCallingIdentity();
286        String8 s = mCamera->getParameters();
287
288        CameraParameters params(s);
289        params.setPictureSize(mPictureWidth, mPictureHeight);
290        mCamera->setParameters(params.flatten());
291        mCameraIdle = true;
292        mStopWaitingForIdleCamera = false;
293
294        // disable shutter sound and play the recording sound.
295        mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
296        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
297        IPCThreadState::self()->restoreCallingIdentity(token);
298
299        // create a thread which takes pictures in a loop
300        pthread_attr_t attr;
301        pthread_attr_init(&attr);
302        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
303
304        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
305        pthread_attr_destroy(&attr);
306    } else {
307        LOGV("start time lapse recording using video camera");
308        CHECK_EQ(OK, mCamera->startRecording());
309    }
310}
311
312void CameraSourceTimeLapse::stopCameraRecording() {
313    if (mUseStillCameraForTimeLapse) {
314        void *dummy;
315        pthread_join(mThreadTimeLapse, &dummy);
316
317        // Last takePicture may still be underway. Wait for the camera to get
318        // idle.
319        Mutex::Autolock autoLock(mCameraIdleLock);
320        mStopWaitingForIdleCamera = true;
321        if (!mCameraIdle) {
322            mCameraIdleCondition.wait(mCameraIdleLock);
323        }
324        CHECK(mCameraIdle);
325        mCamera->setListener(NULL);
326
327        // play the recording sound.
328        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
329    } else {
330        mCamera->setListener(NULL);
331        mCamera->stopRecording();
332    }
333    if (mLastReadBufferCopy) {
334        mLastReadBufferCopy->release();
335        mLastReadBufferCopy = NULL;
336    }
337}
338
339void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
340    if (!mUseStillCameraForTimeLapse) {
341        mCamera->releaseRecordingFrame(frame);
342    }
343}
344
345sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
346    size_t source_size = source_data->size();
347    void* source_pointer = source_data->pointer();
348
349    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
350    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
351    memcpy(newMemory->pointer(), source_pointer, source_size);
352    return newMemory;
353}
354
355// Allocates IMemory of final type MemoryBase with the given size.
356sp<IMemory> allocateIMemory(size_t size) {
357    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
358    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
359    return newMemory;
360}
361
362// static
363void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
364    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
365    source->threadStartPreview();
366    return NULL;
367}
368
369void CameraSourceTimeLapse::threadStartPreview() {
370    CHECK_EQ(OK, mCamera->startPreview());
371    Mutex::Autolock autoLock(mCameraIdleLock);
372    mCameraIdle = true;
373    mCameraIdleCondition.signal();
374}
375
376void CameraSourceTimeLapse::restartPreview() {
377    // Start this in a different thread, so that the dataCallback can return
378    LOGV("restartPreview");
379    pthread_attr_t attr;
380    pthread_attr_init(&attr);
381    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
382
383    pthread_t threadPreview;
384    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
385    pthread_attr_destroy(&attr);
386}
387
388sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
389    // find the YUV format
390    int32_t srcFormat;
391    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
392    YUVImage::YUVFormat yuvFormat;
393    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
394        yuvFormat = YUVImage::YUV420SemiPlanar;
395    } else {
396        CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
397        yuvFormat = YUVImage::YUV420Planar;
398    }
399
400    // allocate memory for cropped image and setup a canvas using it.
401    sp<IMemory> croppedImageMemory = allocateIMemory(
402            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
403    YUVImage yuvImageCropped(yuvFormat,
404            mVideoWidth, mVideoHeight,
405            (uint8_t *)croppedImageMemory->pointer());
406    YUVCanvas yuvCanvasCrop(yuvImageCropped);
407
408    YUVImage yuvImageSource(yuvFormat,
409            mPictureWidth, mPictureHeight,
410            (uint8_t *)source_data->pointer());
411    yuvCanvasCrop.CopyImageRect(
412            Rect(mCropRectStartX, mCropRectStartY,
413                mCropRectStartX + mVideoWidth,
414                mCropRectStartY + mVideoHeight),
415            0, 0,
416            yuvImageSource);
417
418    return croppedImageMemory;
419}
420
421void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
422    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
423        // takePicture will complete after this callback, so restart preview.
424        restartPreview();
425        return;
426    }
427    if (msgType != CAMERA_MSG_RAW_IMAGE) {
428        return;
429    }
430
431    LOGV("dataCallback for timelapse still frame");
432    CHECK_EQ(true, mUseStillCameraForTimeLapse);
433
434    int64_t timestampUs;
435    if (mNumFramesReceived == 0) {
436        timestampUs = mStartTimeUs;
437    } else {
438        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
439    }
440
441    if (mNeedCropping) {
442        sp<IMemory> croppedImageData = cropYUVImage(data);
443        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
444    } else {
445        sp<IMemory> dataCopy = createIMemoryCopy(data);
446        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
447    }
448}
449
450bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
451    if (mSkipCurrentFrame) {
452        mSkipCurrentFrame = false;
453        return true;
454    } else {
455        return false;
456    }
457}
458
459bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
460    if (!mUseStillCameraForTimeLapse) {
461        if (mLastTimeLapseFrameRealTimestampUs == 0) {
462            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
463            // to current time (timestampUs) and save frame data.
464            LOGV("dataCallbackTimestamp timelapse: initial frame");
465
466            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
467            return false;
468        }
469
470        {
471            Mutex::Autolock autoLock(mQuickStopLock);
472
473            // mForceRead may be set to true by startQuickReadReturns(). In that
474            // case don't skip this frame.
475            if (mForceRead) {
476                LOGV("dataCallbackTimestamp timelapse: forced read");
477                mForceRead = false;
478                *timestampUs = mLastFrameTimestampUs;
479                return false;
480            }
481        }
482
483        if (*timestampUs <
484                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
485            // Skip all frames from last encoded frame until
486            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
487            // Tell the camera to release its recording frame and return.
488            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
489            return true;
490        } else {
491            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
492            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
493            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
494            // of the last encoded frame's time stamp.
495            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
496
497            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
498            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
499            return false;
500        }
501    }
502    return false;
503}
504
505void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
506            const sp<IMemory> &data) {
507    if (!mUseStillCameraForTimeLapse) {
508        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
509    } else {
510        Mutex::Autolock autoLock(mCameraIdleLock);
511        // If we are using the still camera and stop() has been called, it may
512        // be waiting for the camera to get idle. In that case return
513        // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
514        // to a deadlock since it tries to access CameraSource::mLock which in
515        // this case is held by CameraSource::stop() currently waiting for the
516        // camera to get idle. And camera will not get idle until this call
517        // returns.
518        if (mStopWaitingForIdleCamera) {
519            return;
520        }
521    }
522    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
523}
524
525}  // namespace android
526