CameraSourceTimeLapse.cpp revision 4ca2c7c913f8bd4ada13aca56d36045d42d1e00f
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
27#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <ui/Rect.h>
32#include <utils/String8.h>
33#include <utils/Vector.h>
34#include "OMX_Video.h"
35#include <limits.h>
36
37namespace android {
38
39// static
40CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
41        const sp<ICamera> &camera,
42        const sp<ICameraRecordingProxy> &proxy,
43        int32_t cameraId,
44        Size videoSize,
45        int32_t videoFrameRate,
46        const sp<Surface>& surface,
47        int64_t timeBetweenTimeLapseFrameCaptureUs) {
48
49    CameraSourceTimeLapse *source = new
50            CameraSourceTimeLapse(camera, proxy, cameraId,
51                videoSize, videoFrameRate, surface,
52                timeBetweenTimeLapseFrameCaptureUs);
53
54    if (source != NULL) {
55        if (source->initCheck() != OK) {
56            delete source;
57            return NULL;
58        }
59    }
60    return source;
61}
62
63CameraSourceTimeLapse::CameraSourceTimeLapse(
64        const sp<ICamera>& camera,
65        const sp<ICameraRecordingProxy>& proxy,
66        int32_t cameraId,
67        Size videoSize,
68        int32_t videoFrameRate,
69        const sp<Surface>& surface,
70        int64_t timeBetweenTimeLapseFrameCaptureUs)
71    : CameraSource(camera, proxy, cameraId, videoSize, videoFrameRate, surface, true),
72      mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
73      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
74      mLastTimeLapseFrameRealTimestampUs(0),
75      mSkipCurrentFrame(false) {
76
77    LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs);
78    mVideoWidth = videoSize.width;
79    mVideoHeight = videoSize.height;
80
81    if (trySettingVideoSize(videoSize.width, videoSize.height)) {
82        mUseStillCameraForTimeLapse = false;
83    } else {
84        // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
85        // than the fastest rate at which the still camera can take pictures.
86        mUseStillCameraForTimeLapse = true;
87        CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
88        mNeedCropping = computeCropRectangleOffset();
89        mMeta->setInt32(kKeyWidth, videoSize.width);
90        mMeta->setInt32(kKeyHeight, videoSize.height);
91    }
92
93    // Initialize quick stop variables.
94    mQuickStop = false;
95    mForceRead = false;
96    mLastReadBufferCopy = NULL;
97    mStopWaitingForIdleCamera = false;
98}
99
100CameraSourceTimeLapse::~CameraSourceTimeLapse() {
101}
102
103void CameraSourceTimeLapse::startQuickReadReturns() {
104    Mutex::Autolock autoLock(mQuickStopLock);
105    LOGV("Enabling quick read returns");
106
107    // Enable quick stop mode.
108    mQuickStop = true;
109
110    if (mUseStillCameraForTimeLapse) {
111        // wake up the thread right away.
112        mTakePictureCondition.signal();
113    } else {
114        // Force dataCallbackTimestamp() coming from the video camera to not skip the
115        // next frame as we want read() to get a get a frame right away.
116        mForceRead = true;
117    }
118}
119
120bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) {
121    LOGV("trySettingVideoSize: %dx%d", width, height);
122    int64_t token = IPCThreadState::self()->clearCallingIdentity();
123    String8 s = mCamera->getParameters();
124
125    CameraParameters params(s);
126    Vector<Size> supportedSizes;
127    params.getSupportedVideoSizes(supportedSizes);
128    bool videoOutputSupported = false;
129    if (supportedSizes.size() == 0) {
130        params.getSupportedPreviewSizes(supportedSizes);
131    } else {
132        videoOutputSupported = true;
133    }
134
135    bool videoSizeSupported = false;
136    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
137        int32_t pictureWidth = supportedSizes[i].width;
138        int32_t pictureHeight = supportedSizes[i].height;
139
140        if ((pictureWidth == width) && (pictureHeight == height)) {
141            videoSizeSupported = true;
142        }
143    }
144
145    bool isSuccessful = false;
146    if (videoSizeSupported) {
147        LOGV("Video size (%d, %d) is supported", width, height);
148        if (videoOutputSupported) {
149            params.setVideoSize(width, height);
150        } else {
151            params.setPreviewSize(width, height);
152        }
153        if (mCamera->setParameters(params.flatten()) == OK) {
154            isSuccessful = true;
155        } else {
156            LOGE("Failed to set preview size to %dx%d", width, height);
157            isSuccessful = false;
158        }
159    }
160
161    IPCThreadState::self()->restoreCallingIdentity(token);
162    return isSuccessful;
163}
164
165bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
166    LOGV("setPictureSizeToClosestSupported: %dx%d", width, height);
167    int64_t token = IPCThreadState::self()->clearCallingIdentity();
168    String8 s = mCamera->getParameters();
169    IPCThreadState::self()->restoreCallingIdentity(token);
170
171    CameraParameters params(s);
172    Vector<Size> supportedSizes;
173    params.getSupportedPictureSizes(supportedSizes);
174
175    int32_t minPictureSize = INT_MAX;
176    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
177        int32_t pictureWidth = supportedSizes[i].width;
178        int32_t pictureHeight = supportedSizes[i].height;
179
180        if ((pictureWidth >= width) && (pictureHeight >= height)) {
181            int32_t pictureSize = pictureWidth*pictureHeight;
182            if (pictureSize < minPictureSize) {
183                minPictureSize = pictureSize;
184                mPictureWidth = pictureWidth;
185                mPictureHeight = pictureHeight;
186            }
187        }
188    }
189    LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
190    return (minPictureSize != INT_MAX);
191}
192
193bool CameraSourceTimeLapse::computeCropRectangleOffset() {
194    if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
195        return false;
196    }
197
198    CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
199
200    int32_t widthDifference = mPictureWidth - mVideoWidth;
201    int32_t heightDifference = mPictureHeight - mVideoHeight;
202
203    mCropRectStartX = widthDifference/2;
204    mCropRectStartY = heightDifference/2;
205
206    LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
207
208    return true;
209}
210
211void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
212    Mutex::Autolock autoLock(mQuickStopLock);
213    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
214        buffer->setObserver(NULL);
215        buffer->release();
216    } else {
217        return CameraSource::signalBufferReturned(buffer);
218    }
219}
220
221void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
222    size_t sourceSize = sourceBuffer.size();
223    void* sourcePointer = sourceBuffer.data();
224
225    (*newBuffer) = new MediaBuffer(sourceSize);
226    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
227
228    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
229}
230
231void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
232    int64_t frameTime;
233    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
234    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
235    mLastReadBufferCopy->add_ref();
236    mLastReadBufferCopy->setObserver(this);
237}
238
239status_t CameraSourceTimeLapse::read(
240        MediaBuffer **buffer, const ReadOptions *options) {
241    if (mLastReadBufferCopy == NULL) {
242        mLastReadStatus = CameraSource::read(buffer, options);
243
244        // mQuickStop may have turned to true while read was blocked. Make a copy of
245        // the buffer in that case.
246        Mutex::Autolock autoLock(mQuickStopLock);
247        if (mQuickStop && *buffer) {
248            fillLastReadBufferCopy(**buffer);
249        }
250        return mLastReadStatus;
251    } else {
252        (*buffer) = mLastReadBufferCopy;
253        (*buffer)->add_ref();
254        return mLastReadStatus;
255    }
256}
257
258// static
259void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
260    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
261    source->threadTimeLapseEntry();
262    return NULL;
263}
264
265void CameraSourceTimeLapse::threadTimeLapseEntry() {
266    while (mStarted) {
267        {
268            Mutex::Autolock autoLock(mCameraIdleLock);
269            if (!mCameraIdle) {
270                mCameraIdleCondition.wait(mCameraIdleLock);
271            }
272            CHECK(mCameraIdle);
273            mCameraIdle = false;
274        }
275
276        // Even if mQuickStop == true we need to take one more picture
277        // as a read() may be blocked, waiting for a frame to get available.
278        // After this takePicture, if mQuickStop == true, we can safely exit
279        // this thread as read() will make a copy of this last frame and keep
280        // returning it in the quick stop mode.
281        Mutex::Autolock autoLock(mQuickStopLock);
282        CHECK_EQ(OK, mCamera->takePicture(CAMERA_MSG_RAW_IMAGE));
283        if (mQuickStop) {
284            LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
285            return;
286        }
287        mTakePictureCondition.waitRelative(mQuickStopLock,
288                mTimeBetweenTimeLapseFrameCaptureUs * 1000);
289    }
290    LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
291}
292
293void CameraSourceTimeLapse::startCameraRecording() {
294    if (mUseStillCameraForTimeLapse) {
295        LOGV("start time lapse recording using still camera");
296
297        int64_t token = IPCThreadState::self()->clearCallingIdentity();
298        String8 s = mCamera->getParameters();
299
300        CameraParameters params(s);
301        params.setPictureSize(mPictureWidth, mPictureHeight);
302        mCamera->setParameters(params.flatten());
303        mCameraIdle = true;
304        mStopWaitingForIdleCamera = false;
305
306        // disable shutter sound and play the recording sound.
307        mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
308        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
309        IPCThreadState::self()->restoreCallingIdentity(token);
310
311        // create a thread which takes pictures in a loop
312        pthread_attr_t attr;
313        pthread_attr_init(&attr);
314        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
315
316        pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
317        pthread_attr_destroy(&attr);
318    } else {
319        LOGV("start time lapse recording using video camera");
320        CameraSource::startCameraRecording();
321    }
322}
323
324void CameraSourceTimeLapse::stopCameraRecording() {
325    if (mUseStillCameraForTimeLapse) {
326        void *dummy;
327        pthread_join(mThreadTimeLapse, &dummy);
328
329        // Last takePicture may still be underway. Wait for the camera to get
330        // idle.
331        Mutex::Autolock autoLock(mCameraIdleLock);
332        mStopWaitingForIdleCamera = true;
333        if (!mCameraIdle) {
334            mCameraIdleCondition.wait(mCameraIdleLock);
335        }
336        CHECK(mCameraIdle);
337        mCamera->setListener(NULL);
338
339        // play the recording sound.
340        mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
341    } else {
342        CameraSource::stopCameraRecording();
343    }
344    if (mLastReadBufferCopy) {
345        mLastReadBufferCopy->release();
346        mLastReadBufferCopy = NULL;
347    }
348}
349
350void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
351    if (!mUseStillCameraForTimeLapse) {
352        CameraSource::releaseRecordingFrame(frame);
353    }
354}
355
356sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
357    size_t source_size = source_data->size();
358    void* source_pointer = source_data->pointer();
359
360    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
361    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
362    memcpy(newMemory->pointer(), source_pointer, source_size);
363    return newMemory;
364}
365
366// Allocates IMemory of final type MemoryBase with the given size.
367sp<IMemory> allocateIMemory(size_t size) {
368    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
369    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
370    return newMemory;
371}
372
373// static
374void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
375    CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
376    source->threadStartPreview();
377    return NULL;
378}
379
380void CameraSourceTimeLapse::threadStartPreview() {
381    CHECK_EQ(OK, mCamera->startPreview());
382    Mutex::Autolock autoLock(mCameraIdleLock);
383    mCameraIdle = true;
384    mCameraIdleCondition.signal();
385}
386
387void CameraSourceTimeLapse::restartPreview() {
388    // Start this in a different thread, so that the dataCallback can return
389    LOGV("restartPreview");
390    pthread_attr_t attr;
391    pthread_attr_init(&attr);
392    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
393
394    pthread_t threadPreview;
395    pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
396    pthread_attr_destroy(&attr);
397}
398
399sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
400    // find the YUV format
401    int32_t srcFormat;
402    CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
403    YUVImage::YUVFormat yuvFormat;
404    if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
405        yuvFormat = YUVImage::YUV420SemiPlanar;
406    } else {
407        CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
408        yuvFormat = YUVImage::YUV420Planar;
409    }
410
411    // allocate memory for cropped image and setup a canvas using it.
412    sp<IMemory> croppedImageMemory = allocateIMemory(
413            YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
414    YUVImage yuvImageCropped(yuvFormat,
415            mVideoWidth, mVideoHeight,
416            (uint8_t *)croppedImageMemory->pointer());
417    YUVCanvas yuvCanvasCrop(yuvImageCropped);
418
419    YUVImage yuvImageSource(yuvFormat,
420            mPictureWidth, mPictureHeight,
421            (uint8_t *)source_data->pointer());
422    yuvCanvasCrop.CopyImageRect(
423            Rect(mCropRectStartX, mCropRectStartY,
424                mCropRectStartX + mVideoWidth,
425                mCropRectStartY + mVideoHeight),
426            0, 0,
427            yuvImageSource);
428
429    return croppedImageMemory;
430}
431
432void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
433    if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
434        // takePicture will complete after this callback, so restart preview.
435        restartPreview();
436        return;
437    }
438    if (msgType != CAMERA_MSG_RAW_IMAGE) {
439        return;
440    }
441
442    LOGV("dataCallback for timelapse still frame");
443    CHECK_EQ(true, mUseStillCameraForTimeLapse);
444
445    int64_t timestampUs;
446    if (mNumFramesReceived == 0) {
447        timestampUs = mStartTimeUs;
448    } else {
449        timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
450    }
451
452    if (mNeedCropping) {
453        sp<IMemory> croppedImageData = cropYUVImage(data);
454        dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
455    } else {
456        sp<IMemory> dataCopy = createIMemoryCopy(data);
457        dataCallbackTimestamp(timestampUs, msgType, dataCopy);
458    }
459}
460
461bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
462    if (mSkipCurrentFrame) {
463        mSkipCurrentFrame = false;
464        return true;
465    } else {
466        return false;
467    }
468}
469
470bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
471    if (!mUseStillCameraForTimeLapse) {
472        if (mLastTimeLapseFrameRealTimestampUs == 0) {
473            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
474            // to current time (timestampUs) and save frame data.
475            LOGV("dataCallbackTimestamp timelapse: initial frame");
476
477            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
478            return false;
479        }
480
481        {
482            Mutex::Autolock autoLock(mQuickStopLock);
483
484            // mForceRead may be set to true by startQuickReadReturns(). In that
485            // case don't skip this frame.
486            if (mForceRead) {
487                LOGV("dataCallbackTimestamp timelapse: forced read");
488                mForceRead = false;
489                *timestampUs =
490                    mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
491                return false;
492            }
493        }
494
495        // Workaround to bypass the first 2 input frames for skipping.
496        // The first 2 output frames from the encoder are: decoder specific info and
497        // the compressed video frame data for the first input video frame.
498        if (mNumFramesEncoded >= 1 && *timestampUs <
499                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
500            // Skip all frames from last encoded frame until
501            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
502            // Tell the camera to release its recording frame and return.
503            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
504            return true;
505        } else {
506            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
507            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
508            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
509            // of the last encoded frame's time stamp.
510            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
511
512            mLastTimeLapseFrameRealTimestampUs = *timestampUs;
513            *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
514            return false;
515        }
516    }
517    return false;
518}
519
520void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
521            const sp<IMemory> &data) {
522    if (!mUseStillCameraForTimeLapse) {
523        mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
524    } else {
525        Mutex::Autolock autoLock(mCameraIdleLock);
526        // If we are using the still camera and stop() has been called, it may
527        // be waiting for the camera to get idle. In that case return
528        // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
529        // to a deadlock since it tries to access CameraSource::mLock which in
530        // this case is held by CameraSource::stop() currently waiting for the
531        // camera to get idle. And camera will not get idle until this call
532        // returns.
533        if (mStopWaitingForIdleCamera) {
534            return;
535        }
536    }
537    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
538}
539
540}  // namespace android
541