1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16//#define LOG_NDEBUG 0
17#define LOG_TAG "SurfaceMediaSource"
18
19#include <media/stagefright/foundation/ADebug.h>
20#include <media/stagefright/SurfaceMediaSource.h>
21#include <media/stagefright/MediaDefs.h>
22#include <media/stagefright/MetaData.h>
23#include <OMX_IVCommon.h>
24#include <MetadataBufferType.h>
25
26#include <ui/GraphicBuffer.h>
27#include <gui/ISurfaceComposer.h>
28#include <gui/IGraphicBufferAlloc.h>
29#include <OMX_Component.h>
30
31#include <utils/Log.h>
32#include <utils/String8.h>
33
34#include <private/gui/ComposerService.h>
35
36namespace android {
37
38SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight) :
39    mWidth(bufferWidth),
40    mHeight(bufferHeight),
41    mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
42    mNumPendingBuffers(0),
43    mCurrentTimestamp(0),
44    mFrameRate(30),
45    mStarted(false),
46    mNumFramesReceived(0),
47    mNumFramesEncoded(0),
48    mFirstFrameTimestamp(0),
49    mMaxAcquiredBufferCount(4),  // XXX double-check the default
50    mUseAbsoluteTimestamps(false) {
51    ALOGV("SurfaceMediaSource");
52
53    if (bufferWidth == 0 || bufferHeight == 0) {
54        ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
55    }
56
57    mBufferQueue = new BufferQueue(true);
58    mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
59    mBufferQueue->setSynchronousMode(true);
60    mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
61            GRALLOC_USAGE_HW_TEXTURE);
62
63    sp<ISurfaceComposer> composer(ComposerService::getComposerService());
64
65    // Note that we can't create an sp<...>(this) in a ctor that will not keep a
66    // reference once the ctor ends, as that would cause the refcount of 'this'
67    // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>
68    // that's what we create.
69    wp<BufferQueue::ConsumerListener> listener;
70    sp<BufferQueue::ConsumerListener> proxy;
71    listener = static_cast<BufferQueue::ConsumerListener*>(this);
72    proxy = new BufferQueue::ProxyConsumerListener(listener);
73
74    status_t err = mBufferQueue->consumerConnect(proxy);
75    if (err != NO_ERROR) {
76        ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
77                strerror(-err), err);
78    }
79}
80
81SurfaceMediaSource::~SurfaceMediaSource() {
82    ALOGV("~SurfaceMediaSource");
83    CHECK(!mStarted);
84}
85
86nsecs_t SurfaceMediaSource::getTimestamp() {
87    ALOGV("getTimestamp");
88    Mutex::Autolock lock(mMutex);
89    return mCurrentTimestamp;
90}
91
92void SurfaceMediaSource::setFrameAvailableListener(
93        const sp<FrameAvailableListener>& listener) {
94    ALOGV("setFrameAvailableListener");
95    Mutex::Autolock lock(mMutex);
96    mFrameAvailableListener = listener;
97}
98
99void SurfaceMediaSource::dump(String8& result) const
100{
101    char buffer[1024];
102    dump(result, "", buffer, 1024);
103}
104
105void SurfaceMediaSource::dump(String8& result, const char* prefix,
106        char* buffer, size_t SIZE) const
107{
108    Mutex::Autolock lock(mMutex);
109
110    result.append(buffer);
111    mBufferQueue->dump(result);
112}
113
114status_t SurfaceMediaSource::setFrameRate(int32_t fps)
115{
116    ALOGV("setFrameRate");
117    Mutex::Autolock lock(mMutex);
118    const int MAX_FRAME_RATE = 60;
119    if (fps < 0 || fps > MAX_FRAME_RATE) {
120        return BAD_VALUE;
121    }
122    mFrameRate = fps;
123    return OK;
124}
125
126bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const {
127    ALOGV("isMetaDataStoredInVideoBuffers");
128    return true;
129}
130
131int32_t SurfaceMediaSource::getFrameRate( ) const {
132    ALOGV("getFrameRate");
133    Mutex::Autolock lock(mMutex);
134    return mFrameRate;
135}
136
137status_t SurfaceMediaSource::start(MetaData *params)
138{
139    ALOGV("start");
140
141    Mutex::Autolock lock(mMutex);
142
143    CHECK(!mStarted);
144
145    mStartTimeNs = 0;
146    int64_t startTimeUs;
147    int32_t bufferCount = 0;
148    if (params) {
149        if (params->findInt64(kKeyTime, &startTimeUs)) {
150            mStartTimeNs = startTimeUs * 1000;
151        }
152
153        if (!params->findInt32(kKeyNumBuffers, &bufferCount)) {
154            ALOGE("Failed to find the advertised buffer count");
155            return UNKNOWN_ERROR;
156        }
157
158        if (bufferCount <= 1) {
159            ALOGE("bufferCount %d is too small", bufferCount);
160            return BAD_VALUE;
161        }
162
163        mMaxAcquiredBufferCount = bufferCount;
164    }
165
166    CHECK_GT(mMaxAcquiredBufferCount, 1);
167
168    status_t err =
169        mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
170
171    if (err != OK) {
172        return err;
173    }
174
175    mNumPendingBuffers = 0;
176    mStarted = true;
177
178    return OK;
179}
180
181status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
182    ALOGV("setMaxAcquiredBufferCount(%d)", count);
183    Mutex::Autolock lock(mMutex);
184
185    CHECK_GT(count, 1);
186    mMaxAcquiredBufferCount = count;
187
188    return OK;
189}
190
191status_t SurfaceMediaSource::setUseAbsoluteTimestamps() {
192    ALOGV("setUseAbsoluteTimestamps");
193    Mutex::Autolock lock(mMutex);
194    mUseAbsoluteTimestamps = true;
195
196    return OK;
197}
198
199status_t SurfaceMediaSource::stop()
200{
201    ALOGV("stop");
202    Mutex::Autolock lock(mMutex);
203
204    if (!mStarted) {
205        return OK;
206    }
207
208    while (mNumPendingBuffers > 0) {
209        ALOGI("Still waiting for %d buffers to be returned.",
210                mNumPendingBuffers);
211
212#if DEBUG_PENDING_BUFFERS
213        for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
214            ALOGI("%d: %p", i, mPendingBuffers.itemAt(i));
215        }
216#endif
217
218        mMediaBuffersAvailableCondition.wait(mMutex);
219    }
220
221    mStarted = false;
222    mFrameAvailableCondition.signal();
223    mMediaBuffersAvailableCondition.signal();
224
225    return mBufferQueue->consumerDisconnect();
226}
227
228sp<MetaData> SurfaceMediaSource::getFormat()
229{
230    ALOGV("getFormat");
231
232    Mutex::Autolock lock(mMutex);
233    sp<MetaData> meta = new MetaData;
234
235    meta->setInt32(kKeyWidth, mWidth);
236    meta->setInt32(kKeyHeight, mHeight);
237    // The encoder format is set as an opaque colorformat
238    // The encoder will later find out the actual colorformat
239    // from the GL Frames itself.
240    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
241    meta->setInt32(kKeyStride, mWidth);
242    meta->setInt32(kKeySliceHeight, mHeight);
243    meta->setInt32(kKeyFrameRate, mFrameRate);
244    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
245    return meta;
246}
247
248// Pass the data to the MediaBuffer. Pass in only the metadata
249// The metadata passed consists of two parts:
250// 1. First, there is an integer indicating that it is a GRAlloc
251// source (kMetadataBufferTypeGrallocSource)
252// 2. This is followed by the buffer_handle_t that is a handle to the
253// GRalloc buffer. The encoder needs to interpret this GRalloc handle
254// and encode the frames.
255// --------------------------------------------------------------
256// |  kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
257// --------------------------------------------------------------
258// Note: Call only when you have the lock
259static void passMetadataBuffer(MediaBuffer **buffer,
260        buffer_handle_t bufferHandle) {
261    *buffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
262    char *data = (char *)(*buffer)->data();
263    if (data == NULL) {
264        ALOGE("Cannot allocate memory for metadata buffer!");
265        return;
266    }
267    OMX_U32 type = kMetadataBufferTypeGrallocSource;
268    memcpy(data, &type, 4);
269    memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
270
271    ALOGV("handle = %p, , offset = %d, length = %d",
272            bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
273}
274
275status_t SurfaceMediaSource::read( MediaBuffer **buffer,
276                                    const ReadOptions *options)
277{
278    ALOGV("read");
279    Mutex::Autolock lock(mMutex);
280
281    *buffer = NULL;
282
283    while (mStarted && mNumPendingBuffers == mMaxAcquiredBufferCount) {
284        mMediaBuffersAvailableCondition.wait(mMutex);
285    }
286
287    // Update the current buffer info
288    // TODO: mCurrentSlot can be made a bufferstate since there
289    // can be more than one "current" slots.
290
291    BufferQueue::BufferItem item;
292    // If the recording has started and the queue is empty, then just
293    // wait here till the frames come in from the client side
294    while (mStarted) {
295
296        status_t err = mBufferQueue->acquireBuffer(&item);
297        if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
298            // wait for a buffer to be queued
299            mFrameAvailableCondition.wait(mMutex);
300        } else if (err == OK) {
301
302            // First time seeing the buffer?  Added it to the SMS slot
303            if (item.mGraphicBuffer != NULL) {
304                mBufferSlot[item.mBuf] = item.mGraphicBuffer;
305            }
306
307            // check for the timing of this buffer
308            if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) {
309                mFirstFrameTimestamp = item.mTimestamp;
310                // Initial delay
311                if (mStartTimeNs > 0) {
312                    if (item.mTimestamp < mStartTimeNs) {
313                        // This frame predates start of record, discard
314                        mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY,
315                                EGL_NO_SYNC_KHR, Fence::NO_FENCE);
316                        continue;
317                    }
318                    mStartTimeNs = item.mTimestamp - mStartTimeNs;
319                }
320            }
321            item.mTimestamp = mStartTimeNs + (item.mTimestamp - mFirstFrameTimestamp);
322
323            mNumFramesReceived++;
324
325            break;
326        } else {
327            ALOGE("read: acquire failed with error code %d", err);
328            return ERROR_END_OF_STREAM;
329        }
330
331    }
332
333    // If the loop was exited as a result of stopping the recording,
334    // it is OK
335    if (!mStarted) {
336        ALOGV("Read: SurfaceMediaSource is stopped. Returning ERROR_END_OF_STREAM.");
337        return ERROR_END_OF_STREAM;
338    }
339
340    mCurrentSlot = item.mBuf;
341
342    // First time seeing the buffer?  Added it to the SMS slot
343    if (item.mGraphicBuffer != NULL) {
344        mBufferSlot[mCurrentSlot] = item.mGraphicBuffer;
345    }
346
347    mCurrentBuffers.push_back(mBufferSlot[mCurrentSlot]);
348    int64_t prevTimeStamp = mCurrentTimestamp;
349    mCurrentTimestamp = item.mTimestamp;
350
351    mNumFramesEncoded++;
352    // Pass the data to the MediaBuffer. Pass in only the metadata
353
354    passMetadataBuffer(buffer, mBufferSlot[mCurrentSlot]->handle);
355
356    (*buffer)->setObserver(this);
357    (*buffer)->add_ref();
358    (*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp / 1000);
359    ALOGV("Frames encoded = %d, timestamp = %lld, time diff = %lld",
360            mNumFramesEncoded, mCurrentTimestamp / 1000,
361            mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
362
363    ++mNumPendingBuffers;
364
365#if DEBUG_PENDING_BUFFERS
366    mPendingBuffers.push_back(*buffer);
367#endif
368
369    ALOGV("returning mbuf %p", *buffer);
370
371    return OK;
372}
373
374static buffer_handle_t getMediaBufferHandle(MediaBuffer *buffer) {
375    // need to convert to char* for pointer arithmetic and then
376    // copy the byte stream into our handle
377    buffer_handle_t bufferHandle;
378    memcpy(&bufferHandle, (char*)(buffer->data()) + 4, sizeof(buffer_handle_t));
379    return bufferHandle;
380}
381
382void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
383    ALOGV("signalBufferReturned");
384
385    bool foundBuffer = false;
386
387    Mutex::Autolock lock(mMutex);
388
389    buffer_handle_t bufferHandle = getMediaBufferHandle(buffer);
390
391    for (size_t i = 0; i < mCurrentBuffers.size(); i++) {
392        if (mCurrentBuffers[i]->handle == bufferHandle) {
393            mCurrentBuffers.removeAt(i);
394            foundBuffer = true;
395            break;
396        }
397    }
398
399    if (!foundBuffer) {
400        ALOGW("returned buffer was not found in the current buffer list");
401    }
402
403    for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
404        if (mBufferSlot[id] == NULL) {
405            continue;
406        }
407
408        if (bufferHandle == mBufferSlot[id]->handle) {
409            ALOGV("Slot %d returned, matches handle = %p", id,
410                    mBufferSlot[id]->handle);
411
412            mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
413                    Fence::NO_FENCE);
414
415            buffer->setObserver(0);
416            buffer->release();
417
418            foundBuffer = true;
419            break;
420        }
421    }
422
423    if (!foundBuffer) {
424        CHECK(!"signalBufferReturned: bogus buffer");
425    }
426
427#if DEBUG_PENDING_BUFFERS
428    for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
429        if (mPendingBuffers.itemAt(i) == buffer) {
430            mPendingBuffers.removeAt(i);
431            break;
432        }
433    }
434#endif
435
436    --mNumPendingBuffers;
437    mMediaBuffersAvailableCondition.broadcast();
438}
439
440// Part of the BufferQueue::ConsumerListener
441void SurfaceMediaSource::onFrameAvailable() {
442    ALOGV("onFrameAvailable");
443
444    sp<FrameAvailableListener> listener;
445    { // scope for the lock
446        Mutex::Autolock lock(mMutex);
447        mFrameAvailableCondition.broadcast();
448        listener = mFrameAvailableListener;
449    }
450
451    if (listener != NULL) {
452        ALOGV("actually calling onFrameAvailable");
453        listener->onFrameAvailable();
454    }
455}
456
457// SurfaceMediaSource hijacks this event to assume
458// the prodcuer is disconnecting from the BufferQueue
459// and that it should stop the recording
460void SurfaceMediaSource::onBuffersReleased() {
461    ALOGV("onBuffersReleased");
462
463    Mutex::Autolock lock(mMutex);
464
465    mFrameAvailableCondition.signal();
466
467    for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
468       mBufferSlot[i] = 0;
469    }
470}
471
472} // end of namespace android
473