1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <inttypes.h>
18
19//#define LOG_NDEBUG 0
20#define LOG_TAG "CameraSourceTimeLapse"
21
22#include <binder/IPCThreadState.h>
23#include <binder/MemoryBase.h>
24#include <binder/MemoryHeapBase.h>
25#include <media/stagefright/foundation/ADebug.h>
26#include <media/stagefright/CameraSource.h>
27#include <media/stagefright/CameraSourceTimeLapse.h>
28#include <media/stagefright/MetaData.h>
29#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
31#include <utils/String8.h>
32#include <utils/Vector.h>
33
34namespace android {
35
36// static
37CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
38        const sp<hardware::ICamera> &camera,
39        const sp<ICameraRecordingProxy> &proxy,
40        int32_t cameraId,
41        const String16& clientName,
42        uid_t clientUid,
43        pid_t clientPid,
44        Size videoSize,
45        int32_t videoFrameRate,
46        const sp<IGraphicBufferProducer>& surface,
47        int64_t timeBetweenFrameCaptureUs,
48        bool storeMetaDataInVideoBuffers) {
49
50    CameraSourceTimeLapse *source = new
51            CameraSourceTimeLapse(camera, proxy, cameraId,
52                clientName, clientUid, clientPid,
53                videoSize, videoFrameRate, surface,
54                timeBetweenFrameCaptureUs,
55                storeMetaDataInVideoBuffers);
56
57    if (source != NULL) {
58        if (source->initCheck() != OK) {
59            delete source;
60            return NULL;
61        }
62    }
63    return source;
64}
65
66CameraSourceTimeLapse::CameraSourceTimeLapse(
67        const sp<hardware::ICamera>& camera,
68        const sp<ICameraRecordingProxy>& proxy,
69        int32_t cameraId,
70        const String16& clientName,
71        uid_t clientUid,
72        pid_t clientPid,
73        Size videoSize,
74        int32_t videoFrameRate,
75        const sp<IGraphicBufferProducer>& surface,
76        int64_t timeBetweenFrameCaptureUs,
77        bool storeMetaDataInVideoBuffers)
78      : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
79                videoSize, videoFrameRate, surface,
80                storeMetaDataInVideoBuffers),
81      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
82      mLastTimeLapseFrameRealTimestampUs(0),
83      mSkipCurrentFrame(false) {
84
85    mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
86    ALOGD("starting time lapse mode: %" PRId64 " us",
87        mTimeBetweenFrameCaptureUs);
88
89    mVideoWidth = videoSize.width;
90    mVideoHeight = videoSize.height;
91
92    if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
93        releaseCamera();
94        mInitCheck = NO_INIT;
95    }
96
97    // Initialize quick stop variables.
98    mQuickStop = false;
99    mForceRead = false;
100    mLastReadBufferCopy = NULL;
101    mStopWaitingForIdleCamera = false;
102}
103
104CameraSourceTimeLapse::~CameraSourceTimeLapse() {
105    if (mLastReadBufferCopy) {
106        mLastReadBufferCopy->release();
107        mLastReadBufferCopy = NULL;
108    }
109}
110
111void CameraSourceTimeLapse::startQuickReadReturns() {
112    ALOGV("startQuickReadReturns");
113    Mutex::Autolock autoLock(mQuickStopLock);
114
115    // Enable quick stop mode.
116    mQuickStop = true;
117
118    // Force dataCallbackTimestamp() coming from the video camera to
119    // not skip the next frame as we want read() to get a get a frame
120    // right away.
121    mForceRead = true;
122}
123
124bool CameraSourceTimeLapse::trySettingVideoSize(
125        int32_t width, int32_t height) {
126
127    ALOGV("trySettingVideoSize");
128    int64_t token = IPCThreadState::self()->clearCallingIdentity();
129    String8 s = mCamera->getParameters();
130
131    CameraParameters params(s);
132    Vector<Size> supportedSizes;
133    params.getSupportedVideoSizes(supportedSizes);
134    bool videoOutputSupported = false;
135    if (supportedSizes.size() == 0) {
136        params.getSupportedPreviewSizes(supportedSizes);
137    } else {
138        videoOutputSupported = true;
139    }
140
141    bool videoSizeSupported = false;
142    for (size_t i = 0; i < supportedSizes.size(); ++i) {
143        int32_t pictureWidth = supportedSizes[i].width;
144        int32_t pictureHeight = supportedSizes[i].height;
145
146        if ((pictureWidth == width) && (pictureHeight == height)) {
147            videoSizeSupported = true;
148        }
149    }
150
151    bool isSuccessful = false;
152    if (videoSizeSupported) {
153        ALOGV("Video size (%d, %d) is supported", width, height);
154        if (videoOutputSupported) {
155            params.setVideoSize(width, height);
156        } else {
157            params.setPreviewSize(width, height);
158        }
159        if (mCamera->setParameters(params.flatten()) == OK) {
160            isSuccessful = true;
161        } else {
162            ALOGE("Failed to set preview size to %dx%d", width, height);
163            isSuccessful = false;
164        }
165    }
166
167    IPCThreadState::self()->restoreCallingIdentity(token);
168    return isSuccessful;
169}
170
171void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
172    ALOGV("signalBufferReturned");
173    Mutex::Autolock autoLock(mQuickStopLock);
174    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
175        buffer->setObserver(NULL);
176        buffer->release();
177    } else {
178        return CameraSource::signalBufferReturned(buffer);
179    }
180}
181
182void createMediaBufferCopy(
183        const MediaBuffer& sourceBuffer,
184        int64_t frameTime,
185        MediaBuffer **newBuffer) {
186
187    ALOGV("createMediaBufferCopy");
188    size_t sourceSize = sourceBuffer.size();
189    void* sourcePointer = sourceBuffer.data();
190
191    (*newBuffer) = new MediaBuffer(sourceSize);
192    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
193
194    (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
195}
196
197void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
198    ALOGV("fillLastReadBufferCopy");
199    int64_t frameTime;
200    CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
201    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
202    mLastReadBufferCopy->add_ref();
203    mLastReadBufferCopy->setObserver(this);
204}
205
206status_t CameraSourceTimeLapse::read(
207        MediaBuffer **buffer, const ReadOptions *options) {
208    ALOGV("read");
209    if (mLastReadBufferCopy == NULL) {
210        mLastReadStatus = CameraSource::read(buffer, options);
211
212        // mQuickStop may have turned to true while read was blocked.
213        // Make a copy of the buffer in that case.
214        Mutex::Autolock autoLock(mQuickStopLock);
215        if (mQuickStop && *buffer) {
216            fillLastReadBufferCopy(**buffer);
217        }
218        return mLastReadStatus;
219    } else {
220        (*buffer) = mLastReadBufferCopy;
221        (*buffer)->add_ref();
222        return mLastReadStatus;
223    }
224}
225
226sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
227        const sp<IMemory> &source_data) {
228
229    ALOGV("createIMemoryCopy");
230    size_t source_size = source_data->size();
231    void* source_pointer = source_data->pointer();
232
233    sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
234    sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
235    memcpy(newMemory->pointer(), source_pointer, source_size);
236    return newMemory;
237}
238
239bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
240    ALOGV("skipCurrentFrame");
241    if (mSkipCurrentFrame) {
242        mSkipCurrentFrame = false;
243        return true;
244    } else {
245        return false;
246    }
247}
248
249bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
250    ALOGV("skipFrameAndModifyTimeStamp");
251    if (mLastTimeLapseFrameRealTimestampUs == 0) {
252        // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
253        // to current time (timestampUs) and save frame data.
254        ALOGV("dataCallbackTimestamp timelapse: initial frame");
255
256        mLastTimeLapseFrameRealTimestampUs = *timestampUs;
257        return false;
258    }
259
260    {
261        Mutex::Autolock autoLock(mQuickStopLock);
262
263        // mForceRead may be set to true by startQuickReadReturns(). In that
264        // case don't skip this frame.
265        if (mForceRead) {
266            ALOGV("dataCallbackTimestamp timelapse: forced read");
267            mForceRead = false;
268            *timestampUs =
269                mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
270
271            // Really make sure that this video recording frame will not be dropped.
272            if (*timestampUs < mStartTimeUs) {
273                ALOGI("set timestampUs to start time stamp %" PRId64 " us", mStartTimeUs);
274                *timestampUs = mStartTimeUs;
275            }
276            return false;
277        }
278    }
279
280    // Workaround to bypass the first 2 input frames for skipping.
281    // The first 2 output frames from the encoder are: decoder specific info and
282    // the compressed video frame data for the first input video frame.
283    if (mNumFramesEncoded >= 1 && *timestampUs <
284        (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) {
285        // Skip all frames from last encoded frame until
286        // sufficient time (mTimeBetweenFrameCaptureUs) has passed.
287        // Tell the camera to release its recording frame and return.
288        ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
289        return true;
290    } else {
291        // Desired frame has arrived after mTimeBetweenFrameCaptureUs time:
292        // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
293        // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
294        // of the last encoded frame's time stamp.
295        ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
296
297        mLastTimeLapseFrameRealTimestampUs = *timestampUs;
298        *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
299        return false;
300    }
301    return false;
302}
303
304void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
305            const sp<IMemory> &data) {
306    ALOGV("dataCallbackTimestamp");
307    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
308    CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
309}
310
311void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestamp(int64_t timestampUs,
312            native_handle_t* handle) {
313    ALOGV("recordingFrameHandleCallbackTimestamp");
314    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
315    CameraSource::recordingFrameHandleCallbackTimestamp(timestampUs, handle);
316}
317
318void CameraSourceTimeLapse::recordingFrameHandleCallbackTimestampBatch(
319        const std::vector<int64_t>& timestampsUs,
320        const std::vector<native_handle_t*>& handles) {
321    ALOGV("recordingFrameHandleCallbackTimestampBatch");
322    int n = timestampsUs.size();
323    for (int i = 0; i < n; i++) {
324        // Don't do batching for CameraSourceTimeLapse for now
325        recordingFrameHandleCallbackTimestamp(timestampsUs[i], handles[i]);
326    }
327}
328
329void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
330    ALOGV("processBufferQueueFrame");
331    int64_t timestampUs = buffer.mTimestamp / 1000;
332    mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
333    buffer.mTimestamp = timestampUs * 1000;
334    CameraSource::processBufferQueueFrame(buffer);
335}
336
337}  // namespace android
338