1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/foundation/ADebug.h>
24#include <media/stagefright/CameraSource.h>
25#include <media/stagefright/CameraSourceTimeLapse.h>
26#include <media/stagefright/MetaData.h>
27#include <camera/Camera.h>
28#include <camera/CameraParameters.h>
29#include <utils/String8.h>
30#include <utils/Vector.h>
31
32namespace android {
33
34// static
35CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
36        const sp<ICamera> &camera,
37        const sp<ICameraRecordingProxy> &proxy,
38        int32_t cameraId,
39        const String16& clientName,
40        uid_t clientUid,
41        Size videoSize,
42        int32_t videoFrameRate,
43        const sp<IGraphicBufferProducer>& surface,
44        int64_t timeBetweenFrameCaptureUs) {
45
46    CameraSourceTimeLapse *source = new
47            CameraSourceTimeLapse(camera, proxy, cameraId,
48                clientName, clientUid,
49                videoSize, videoFrameRate, surface,
50                timeBetweenFrameCaptureUs);
51
52    if (source != NULL) {
53        if (source->initCheck() != OK) {
54            delete source;
55            return NULL;
56        }
57    }
58    return source;
59}
60
61CameraSourceTimeLapse::CameraSourceTimeLapse(
62        const sp<ICamera>& camera,
63        const sp<ICameraRecordingProxy>& proxy,
64        int32_t cameraId,
65        const String16& clientName,
66        uid_t clientUid,
67        Size videoSize,
68        int32_t videoFrameRate,
69        const sp<IGraphicBufferProducer>& surface,
70        int64_t timeBetweenFrameCaptureUs)
71      : CameraSource(camera, proxy, cameraId, clientName, clientUid,
72                videoSize, videoFrameRate, surface, true),
73      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
74      mLastTimeLapseFrameRealTimestampUs(0),
75      mSkipCurrentFrame(false) {
76
77    mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
78    ALOGD("starting time lapse mode: %lld us",
79        mTimeBetweenFrameCaptureUs);
80
81    mVideoWidth = videoSize.width;
82    mVideoHeight = videoSize.height;
83
84    if (!trySettingVideoSize(videoSize.width, videoSize.height)) {
85        mInitCheck = NO_INIT;
86    }
87
88    // Initialize quick stop variables.
89    mQuickStop = false;
90    mForceRead = false;
91    mLastReadBufferCopy = NULL;
92    mStopWaitingForIdleCamera = false;
93}
94
95CameraSourceTimeLapse::~CameraSourceTimeLapse() {
96    if (mLastReadBufferCopy) {
97        mLastReadBufferCopy->release();
98        mLastReadBufferCopy = NULL;
99    }
100}
101
102void CameraSourceTimeLapse::startQuickReadReturns() {
103    ALOGV("startQuickReadReturns");
104    Mutex::Autolock autoLock(mQuickStopLock);
105
106    // Enable quick stop mode.
107    mQuickStop = true;
108
109    // Force dataCallbackTimestamp() coming from the video camera to
110    // not skip the next frame as we want read() to get a get a frame
111    // right away.
112    mForceRead = true;
113}
114
115bool CameraSourceTimeLapse::trySettingVideoSize(
116        int32_t width, int32_t height) {
117
118    ALOGV("trySettingVideoSize");
119    int64_t token = IPCThreadState::self()->clearCallingIdentity();
120    String8 s = mCamera->getParameters();
121
122    CameraParameters params(s);
123    Vector<Size> supportedSizes;
124    params.getSupportedVideoSizes(supportedSizes);
125    bool videoOutputSupported = false;
126    if (supportedSizes.size() == 0) {
127        params.getSupportedPreviewSizes(supportedSizes);
128    } else {
129        videoOutputSupported = true;
130    }
131
132    bool videoSizeSupported = false;
133    for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
134        int32_t pictureWidth = supportedSizes[i].width;
135        int32_t pictureHeight = supportedSizes[i].height;
136
137        if ((pictureWidth == width) && (pictureHeight == height)) {
138            videoSizeSupported = true;
139        }
140    }
141
142    bool isSuccessful = false;
143    if (videoSizeSupported) {
144        ALOGV("Video size (%d, %d) is supported", width, height);
145        if (videoOutputSupported) {
146            params.setVideoSize(width, height);
147        } else {
148            params.setPreviewSize(width, height);
149        }
150        if (mCamera->setParameters(params.flatten()) == OK) {
151            isSuccessful = true;
152        } else {
153            ALOGE("Failed to set preview size to %dx%d", width, height);
154            isSuccessful = false;
155        }
156    }
157
158    IPCThreadState::self()->restoreCallingIdentity(token);
159    return isSuccessful;
160}
161
162void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
163    ALOGV("signalBufferReturned");
164    Mutex::Autolock autoLock(mQuickStopLock);
165    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
166        buffer->setObserver(NULL);
167        buffer->release();
168    } else {
169        return CameraSource::signalBufferReturned(buffer);
170    }
171}
172
173void createMediaBufferCopy(
174        const MediaBuffer& sourceBuffer,
175        int64_t frameTime,
176        MediaBuffer **newBuffer) {
177
178    ALOGV("createMediaBufferCopy");
179    size_t sourceSize = sourceBuffer.size();
180    void* sourcePointer = sourceBuffer.data();
181
182    (*newBuffer) = new MediaBuffer(sourceSize);
183    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
184
185    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
186}
187
188void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
189    ALOGV("fillLastReadBufferCopy");
190    int64_t frameTime;
191    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
192    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
193    mLastReadBufferCopy->add_ref();
194    mLastReadBufferCopy->setObserver(this);
195}
196
197status_t CameraSourceTimeLapse::read(
198        MediaBuffer **buffer, const ReadOptions *options) {
199    ALOGV("read");
200    if (mLastReadBufferCopy == NULL) {
201        mLastReadStatus = CameraSource::read(buffer, options);
202
203        // mQuickStop may have turned to true while read was blocked.
204        // Make a copy of the buffer in that case.
205        Mutex::Autolock autoLock(mQuickStopLock);
206        if (mQuickStop && *buffer) {
207            fillLastReadBufferCopy(**buffer);
208        }
209        return mLastReadStatus;
210    } else {
211        (*buffer) = mLastReadBufferCopy;
212        (*buffer)->add_ref();
213        return mLastReadStatus;
214    }
215}
216
217sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
218        const sp<IMemory> &source_data) {
219
220    ALOGV("createIMemoryCopy");
221    size_t source_size = source_data->size();
222    void* source_pointer = source_data->pointer();
223
224    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
225    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
226    memcpy(newMemory->pointer(), source_pointer, source_size);
227    return newMemory;
228}
229
230bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
231    ALOGV("skipCurrentFrame");
232    if (mSkipCurrentFrame) {
233        mSkipCurrentFrame = false;
234        return true;
235    } else {
236        return false;
237    }
238}
239
240bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
241    ALOGV("skipFrameAndModifyTimeStamp");
242    if (mLastTimeLapseFrameRealTimestampUs == 0) {
243        // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
244        // to current time (timestampUs) and save frame data.
245        ALOGV("dataCallbackTimestamp timelapse: initial frame");
246
247        mLastTimeLapseFrameRealTimestampUs = *timestampUs;
248        return false;
249    }
250
251    {
252        Mutex::Autolock autoLock(mQuickStopLock);
253
254        // mForceRead may be set to true by startQuickReadReturns(). In that
255        // case don't skip this frame.
256        if (mForceRead) {
257            ALOGV("dataCallbackTimestamp timelapse: forced read");
258            mForceRead = false;
259            *timestampUs =
260                mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
261
262            // Really make sure that this video recording frame will not be dropped.
263            if (*timestampUs < mStartTimeUs) {
264                ALOGI("set timestampUs to start time stamp %lld us", mStartTimeUs);
265                *timestampUs = mStartTimeUs;
266            }
267            return false;
268        }
269    }
270
271    // Workaround to bypass the first 2 input frames for skipping.
272    // The first 2 output frames from the encoder are: decoder specific info and
273    // the compressed video frame data for the first input video frame.
274    if (mNumFramesEncoded >= 1 && *timestampUs <
275        (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) {
276        // Skip all frames from last encoded frame until
277        // sufficient time (mTimeBetweenFrameCaptureUs) has passed.
278        // Tell the camera to release its recording frame and return.
279        ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
280        return true;
281    } else {
282        // Desired frame has arrived after mTimeBetweenFrameCaptureUs time:
283        // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
284        // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
285        // of the last encoded frame's time stamp.
286        ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
287
288        mLastTimeLapseFrameRealTimestampUs = *timestampUs;
289        *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
290        return false;
291    }
292    return false;
293}
294
295void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
296            const sp<IMemory> &data) {
297    ALOGV("dataCallbackTimestamp");
298    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
299    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
300}
301
302}  // namespace android
303