CameraSourceTimeLapse.cpp revision a5750e0dad9e90f2195ce36f2c4457fa04b2b83e
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSourceTimeLapse"
21
22#include <binder/IPCThreadState.h>
23#include <binder/MemoryBase.h>
24#include <binder/MemoryHeapBase.h>
25#include <media/stagefright/foundation/ADebug.h>
26#include <media/stagefright/CameraSource.h>
27#include <media/stagefright/CameraSourceTimeLapse.h>
28#include <media/stagefright/MetaData.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <utils/String8.h>
32#include <utils/Vector.h>
33
34namespace android {
35
36// static
37CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
38        const sp<ICamera> &camera,
39        const sp<ICameraRecordingProxy> &proxy,
40        int32_t cameraId,
41        const String16& clientName,
42        uid_t clientUid,
43        Size videoSize,
44        int32_t videoFrameRate,
45        const sp<IGraphicBufferProducer>& surface,
46        int64_t timeBetweenFrameCaptureUs,
47        bool storeMetaDataInVideoBuffers) {
48
49    CameraSourceTimeLapse *source = new
50            CameraSourceTimeLapse(camera, proxy, cameraId,
51                clientName, clientUid,
52                videoSize, videoFrameRate, surface,
53                timeBetweenFrameCaptureUs,
54                storeMetaDataInVideoBuffers);
55
56    if (source != NULL) {
57        if (source->initCheck() != OK) {
58            delete source;
59            return NULL;
60        }
61    }
62    return source;
63}
64
65CameraSourceTimeLapse::CameraSourceTimeLapse(
66        const sp<ICamera>& camera,
67        const sp<ICameraRecordingProxy>& proxy,
68        int32_t cameraId,
69        const String16& clientName,
70        uid_t clientUid,
71        Size videoSize,
72        int32_t videoFrameRate,
73        const sp<IGraphicBufferProducer>& surface,
74        int64_t timeBetweenFrameCaptureUs,
75        bool storeMetaDataInVideoBuffers)
76      : CameraSource(camera, proxy, cameraId, clientName, clientUid,
77                videoSize, videoFrameRate, surface,
78                storeMetaDataInVideoBuffers),
79      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
80      mLastTimeLapseFrameRealTimestampUs(0),
81      mSkipCurrentFrame(false) {
82
83    mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
84    ALOGD("starting time lapse mode: %" PRId64 " us",
85        mTimeBetweenFrameCaptureUs);
86
87    mVideoWidth = videoSize.width;
88    mVideoHeight = videoSize.height;
89
90    if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
91        releaseCamera();
92        mInitCheck = NO_INIT;
93    }
94
95    // Initialize quick stop variables.
96    mQuickStop = false;
97    mForceRead = false;
98    mLastReadBufferCopy = NULL;
99    mStopWaitingForIdleCamera = false;
100}
101
102CameraSourceTimeLapse::~CameraSourceTimeLapse() {
103    if (mLastReadBufferCopy) {
104        mLastReadBufferCopy->release();
105        mLastReadBufferCopy = NULL;
106    }
107}
108
109void CameraSourceTimeLapse::startQuickReadReturns() {
110    ALOGV("startQuickReadReturns");
111    Mutex::Autolock autoLock(mQuickStopLock);
112
113    // Enable quick stop mode.
114    mQuickStop = true;
115
116    // Force dataCallbackTimestamp() coming from the video camera to
117    // not skip the next frame as we want read() to get a get a frame
118    // right away.
119    mForceRead = true;
120}
121
122bool CameraSourceTimeLapse::trySettingVideoSize(
123        int32_t width, int32_t height) {
124
125    ALOGV("trySettingVideoSize");
126    int64_t token = IPCThreadState::self()->clearCallingIdentity();
127    String8 s = mCamera->getParameters();
128
129    CameraParameters params(s);
130    Vector<Size> supportedSizes;
131    params.getSupportedVideoSizes(supportedSizes);
132    bool videoOutputSupported = false;
133    if (supportedSizes.size() == 0) {
134        params.getSupportedPreviewSizes(supportedSizes);
135    } else {
136        videoOutputSupported = true;
137    }
138
139    bool videoSizeSupported = false;
140    for (size_t i = 0; i < supportedSizes.size(); ++i) {
141        int32_t pictureWidth = supportedSizes[i].width;
142        int32_t pictureHeight = supportedSizes[i].height;
143
144        if ((pictureWidth == width) && (pictureHeight == height)) {
145            videoSizeSupported = true;
146        }
147    }
148
149    bool isSuccessful = false;
150    if (videoSizeSupported) {
151        ALOGV("Video size (%d, %d) is supported", width, height);
152        if (videoOutputSupported) {
153            params.setVideoSize(width, height);
154        } else {
155            params.setPreviewSize(width, height);
156        }
157        if (mCamera->setParameters(params.flatten()) == OK) {
158            isSuccessful = true;
159        } else {
160            ALOGE("Failed to set preview size to %dx%d", width, height);
161            isSuccessful = false;
162        }
163    }
164
165    IPCThreadState::self()->restoreCallingIdentity(token);
166    return isSuccessful;
167}
168
169void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
170    ALOGV("signalBufferReturned");
171    Mutex::Autolock autoLock(mQuickStopLock);
172    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
173        buffer->setObserver(NULL);
174        buffer->release();
175    } else {
176        return CameraSource::signalBufferReturned(buffer);
177    }
178}
179
180void createMediaBufferCopy(
181        const MediaBuffer& sourceBuffer,
182        int64_t frameTime,
183        MediaBuffer **newBuffer) {
184
185    ALOGV("createMediaBufferCopy");
186    size_t sourceSize = sourceBuffer.size();
187    void* sourcePointer = sourceBuffer.data();
188
189    (*newBuffer) = new MediaBuffer(sourceSize);
190    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
191
192    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
193}
194
195void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
196    ALOGV("fillLastReadBufferCopy");
197    int64_t frameTime;
198    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
199    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
200    mLastReadBufferCopy->add_ref();
201    mLastReadBufferCopy->setObserver(this);
202}
203
204status_t CameraSourceTimeLapse::read(
205        MediaBuffer **buffer, const ReadOptions *options) {
206    ALOGV("read");
207    if (mLastReadBufferCopy == NULL) {
208        mLastReadStatus = CameraSource::read(buffer, options);
209
210        // mQuickStop may have turned to true while read was blocked.
211        // Make a copy of the buffer in that case.
212        Mutex::Autolock autoLock(mQuickStopLock);
213        if (mQuickStop && *buffer) {
214            fillLastReadBufferCopy(**buffer);
215        }
216        return mLastReadStatus;
217    } else {
218        (*buffer) = mLastReadBufferCopy;
219        (*buffer)->add_ref();
220        return mLastReadStatus;
221    }
222}
223
224sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
225        const sp<IMemory> &source_data) {
226
227    ALOGV("createIMemoryCopy");
228    size_t source_size = source_data->size();
229    void* source_pointer = source_data->pointer();
230
231    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
232    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
233    memcpy(newMemory->pointer(), source_pointer, source_size);
234    return newMemory;
235}
236
237bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
238    ALOGV("skipCurrentFrame");
239    if (mSkipCurrentFrame) {
240        mSkipCurrentFrame = false;
241        return true;
242    } else {
243        return false;
244    }
245}
246
247bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
248    ALOGV("skipFrameAndModifyTimeStamp");
249    if (mLastTimeLapseFrameRealTimestampUs == 0) {
250        // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
251        // to current time (timestampUs) and save frame data.
252        ALOGV("dataCallbackTimestamp timelapse: initial frame");
253
254        mLastTimeLapseFrameRealTimestampUs = *timestampUs;
255        return false;
256    }
257
258    {
259        Mutex::Autolock autoLock(mQuickStopLock);
260
261        // mForceRead may be set to true by startQuickReadReturns(). In that
262        // case don't skip this frame.
263        if (mForceRead) {
264            ALOGV("dataCallbackTimestamp timelapse: forced read");
265            mForceRead = false;
266            *timestampUs =
267                mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
268
269            // Really make sure that this video recording frame will not be dropped.
270            if (*timestampUs < mStartTimeUs) {
271                ALOGI("set timestampUs to start time stamp %" PRId64 " us", mStartTimeUs);
272                *timestampUs = mStartTimeUs;
273            }
274            return false;
275        }
276    }
277
278    // Workaround to bypass the first 2 input frames for skipping.
279    // The first 2 output frames from the encoder are: decoder specific info and
280    // the compressed video frame data for the first input video frame.
281    if (mNumFramesEncoded >= 1 && *timestampUs <
282        (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) {
283        // Skip all frames from last encoded frame until
284        // sufficient time (mTimeBetweenFrameCaptureUs) has passed.
285        // Tell the camera to release its recording frame and return.
286        ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
287        return true;
288    } else {
289        // Desired frame has arrived after mTimeBetweenFrameCaptureUs time:
290        // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
291        // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
292        // of the last encoded frame's time stamp.
293        ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
294
295        mLastTimeLapseFrameRealTimestampUs = *timestampUs;
296        *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
297        return false;
298    }
299    return false;
300}
301
302void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
303            const sp<IMemory> &data) {
304    ALOGV("dataCallbackTimestamp");
305    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
306    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
307}
308
309}  // namespace android
310