1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16//#define LOG_NDEBUG 0
17#define LOG_TAG "SurfaceMediaSource"
18
19#include <inttypes.h>
20
21#include <media/stagefright/foundation/ADebug.h>
22#include <media/stagefright/SurfaceMediaSource.h>
23#include <media/stagefright/MediaDefs.h>
24#include <media/stagefright/MetaData.h>
25#include <OMX_IVCommon.h>
26#include <media/hardware/MetadataBufferType.h>
27
28#include <ui/GraphicBuffer.h>
29#include <gui/BufferItem.h>
30#include <gui/ISurfaceComposer.h>
31#include <gui/IGraphicBufferAlloc.h>
32#include <OMX_Component.h>
33
34#include <utils/Log.h>
35#include <utils/String8.h>
36
37#include <private/gui/ComposerService.h>
38
39namespace android {
40
41SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight) :
42    mWidth(bufferWidth),
43    mHeight(bufferHeight),
44    mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
45    mNumPendingBuffers(0),
46    mCurrentTimestamp(0),
47    mFrameRate(30),
48    mStarted(false),
49    mNumFramesReceived(0),
50    mNumFramesEncoded(0),
51    mFirstFrameTimestamp(0),
52    mMaxAcquiredBufferCount(4),  // XXX double-check the default
53    mUseAbsoluteTimestamps(false) {
54    ALOGV("SurfaceMediaSource");
55
56    if (bufferWidth == 0 || bufferHeight == 0) {
57        ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
58    }
59
60    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
61    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
62    mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
63            GRALLOC_USAGE_HW_TEXTURE);
64
65    sp<ISurfaceComposer> composer(ComposerService::getComposerService());
66
67    // Note that we can't create an sp<...>(this) in a ctor that will not keep a
68    // reference once the ctor ends, as that would cause the refcount of 'this'
69    // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>
70    // that's what we create.
71    wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
72    sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
73
74    status_t err = mConsumer->consumerConnect(proxy, false);
75    if (err != NO_ERROR) {
76        ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
77                strerror(-err), err);
78    }
79}
80
81SurfaceMediaSource::~SurfaceMediaSource() {
82    ALOGV("~SurfaceMediaSource");
83    CHECK(!mStarted);
84}
85
86nsecs_t SurfaceMediaSource::getTimestamp() {
87    ALOGV("getTimestamp");
88    Mutex::Autolock lock(mMutex);
89    return mCurrentTimestamp;
90}
91
92void SurfaceMediaSource::setFrameAvailableListener(
93        const sp<FrameAvailableListener>& listener) {
94    ALOGV("setFrameAvailableListener");
95    Mutex::Autolock lock(mMutex);
96    mFrameAvailableListener = listener;
97}
98
99void SurfaceMediaSource::dump(String8& result) const
100{
101    char buffer[1024];
102    dump(result, "", buffer, 1024);
103}
104
105void SurfaceMediaSource::dump(
106        String8& result,
107        const char* /* prefix */,
108        char* buffer,
109        size_t /* SIZE */) const
110{
111    Mutex::Autolock lock(mMutex);
112
113    result.append(buffer);
114    mConsumer->dump(result, "");
115}
116
117status_t SurfaceMediaSource::setFrameRate(int32_t fps)
118{
119    ALOGV("setFrameRate");
120    Mutex::Autolock lock(mMutex);
121    const int MAX_FRAME_RATE = 60;
122    if (fps < 0 || fps > MAX_FRAME_RATE) {
123        return BAD_VALUE;
124    }
125    mFrameRate = fps;
126    return OK;
127}
128
129bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const {
130    ALOGV("isMetaDataStoredInVideoBuffers");
131    return true;
132}
133
134int32_t SurfaceMediaSource::getFrameRate( ) const {
135    ALOGV("getFrameRate");
136    Mutex::Autolock lock(mMutex);
137    return mFrameRate;
138}
139
140status_t SurfaceMediaSource::start(MetaData *params)
141{
142    ALOGV("start");
143
144    Mutex::Autolock lock(mMutex);
145
146    CHECK(!mStarted);
147
148    mStartTimeNs = 0;
149    int64_t startTimeUs;
150    int32_t bufferCount = 0;
151    if (params) {
152        if (params->findInt64(kKeyTime, &startTimeUs)) {
153            mStartTimeNs = startTimeUs * 1000;
154        }
155
156        if (!params->findInt32(kKeyNumBuffers, &bufferCount)) {
157            ALOGE("Failed to find the advertised buffer count");
158            return UNKNOWN_ERROR;
159        }
160
161        if (bufferCount <= 1) {
162            ALOGE("bufferCount %d is too small", bufferCount);
163            return BAD_VALUE;
164        }
165
166        mMaxAcquiredBufferCount = bufferCount;
167    }
168
169    CHECK_GT(mMaxAcquiredBufferCount, 1);
170
171    status_t err =
172        mConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
173
174    if (err != OK) {
175        return err;
176    }
177
178    mNumPendingBuffers = 0;
179    mStarted = true;
180
181    return OK;
182}
183
184status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
185    ALOGV("setMaxAcquiredBufferCount(%zu)", count);
186    Mutex::Autolock lock(mMutex);
187
188    CHECK_GT(count, 1);
189    mMaxAcquiredBufferCount = count;
190
191    return OK;
192}
193
194status_t SurfaceMediaSource::setUseAbsoluteTimestamps() {
195    ALOGV("setUseAbsoluteTimestamps");
196    Mutex::Autolock lock(mMutex);
197    mUseAbsoluteTimestamps = true;
198
199    return OK;
200}
201
202status_t SurfaceMediaSource::stop()
203{
204    ALOGV("stop");
205    Mutex::Autolock lock(mMutex);
206
207    if (!mStarted) {
208        return OK;
209    }
210
211    mStarted = false;
212    mFrameAvailableCondition.signal();
213
214    while (mNumPendingBuffers > 0) {
215        ALOGI("Still waiting for %zu buffers to be returned.",
216                mNumPendingBuffers);
217
218#if DEBUG_PENDING_BUFFERS
219        for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
220            ALOGI("%d: %p", i, mPendingBuffers.itemAt(i));
221        }
222#endif
223
224        mMediaBuffersAvailableCondition.wait(mMutex);
225    }
226
227    mMediaBuffersAvailableCondition.signal();
228
229    return mConsumer->consumerDisconnect();
230}
231
232sp<MetaData> SurfaceMediaSource::getFormat()
233{
234    ALOGV("getFormat");
235
236    Mutex::Autolock lock(mMutex);
237    sp<MetaData> meta = new MetaData;
238
239    meta->setInt32(kKeyWidth, mWidth);
240    meta->setInt32(kKeyHeight, mHeight);
241    // The encoder format is set as an opaque colorformat
242    // The encoder will later find out the actual colorformat
243    // from the GL Frames itself.
244    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
245    meta->setInt32(kKeyStride, mWidth);
246    meta->setInt32(kKeySliceHeight, mHeight);
247    meta->setInt32(kKeyFrameRate, mFrameRate);
248    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
249    return meta;
250}
251
252// Pass the data to the MediaBuffer. Pass in only the metadata
253// The metadata passed consists of two parts:
254// 1. First, there is an integer indicating that it is a GRAlloc
255// source (kMetadataBufferTypeGrallocSource)
256// 2. This is followed by the buffer_handle_t that is a handle to the
257// GRalloc buffer. The encoder needs to interpret this GRalloc handle
258// and encode the frames.
259// --------------------------------------------------------------
260// |  kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
261// --------------------------------------------------------------
262// Note: Call only when you have the lock
263static void passMetadataBuffer(MediaBuffer **buffer,
264        buffer_handle_t bufferHandle) {
265    *buffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
266    char *data = (char *)(*buffer)->data();
267    if (data == NULL) {
268        ALOGE("Cannot allocate memory for metadata buffer!");
269        return;
270    }
271    OMX_U32 type = kMetadataBufferTypeGrallocSource;
272    memcpy(data, &type, 4);
273    memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
274
275    ALOGV("handle = %p, , offset = %zu, length = %zu",
276            bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
277}
278
279status_t SurfaceMediaSource::read(
280        MediaBuffer **buffer, const ReadOptions * /* options */) {
281    ALOGV("read");
282    Mutex::Autolock lock(mMutex);
283
284    *buffer = NULL;
285
286    while (mStarted && mNumPendingBuffers == mMaxAcquiredBufferCount) {
287        mMediaBuffersAvailableCondition.wait(mMutex);
288    }
289
290    // Update the current buffer info
291    // TODO: mCurrentSlot can be made a bufferstate since there
292    // can be more than one "current" slots.
293
294    BufferItem item;
295    // If the recording has started and the queue is empty, then just
296    // wait here till the frames come in from the client side
297    while (mStarted) {
298
299        status_t err = mConsumer->acquireBuffer(&item, 0);
300        if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
301            // wait for a buffer to be queued
302            mFrameAvailableCondition.wait(mMutex);
303        } else if (err == OK) {
304            err = item.mFence->waitForever("SurfaceMediaSource::read");
305            if (err) {
306                ALOGW("read: failed to wait for buffer fence: %d", err);
307            }
308
309            // First time seeing the buffer?  Added it to the SMS slot
310            if (item.mGraphicBuffer != NULL) {
311                mSlots[item.mBuf].mGraphicBuffer = item.mGraphicBuffer;
312            }
313            mSlots[item.mBuf].mFrameNumber = item.mFrameNumber;
314
315            // check for the timing of this buffer
316            if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) {
317                mFirstFrameTimestamp = item.mTimestamp;
318                // Initial delay
319                if (mStartTimeNs > 0) {
320                    if (item.mTimestamp < mStartTimeNs) {
321                        // This frame predates start of record, discard
322                        mConsumer->releaseBuffer(
323                                item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY,
324                                EGL_NO_SYNC_KHR, Fence::NO_FENCE);
325                        continue;
326                    }
327                    mStartTimeNs = item.mTimestamp - mStartTimeNs;
328                }
329            }
330            item.mTimestamp = mStartTimeNs + (item.mTimestamp - mFirstFrameTimestamp);
331
332            mNumFramesReceived++;
333
334            break;
335        } else {
336            ALOGE("read: acquire failed with error code %d", err);
337            return ERROR_END_OF_STREAM;
338        }
339
340    }
341
342    // If the loop was exited as a result of stopping the recording,
343    // it is OK
344    if (!mStarted) {
345        ALOGV("Read: SurfaceMediaSource is stopped. Returning ERROR_END_OF_STREAM.");
346        return ERROR_END_OF_STREAM;
347    }
348
349    mCurrentSlot = item.mBuf;
350
351    // First time seeing the buffer?  Added it to the SMS slot
352    if (item.mGraphicBuffer != NULL) {
353        mSlots[item.mBuf].mGraphicBuffer = item.mGraphicBuffer;
354    }
355    mSlots[item.mBuf].mFrameNumber = item.mFrameNumber;
356
357    mCurrentBuffers.push_back(mSlots[mCurrentSlot].mGraphicBuffer);
358    int64_t prevTimeStamp = mCurrentTimestamp;
359    mCurrentTimestamp = item.mTimestamp;
360
361    mNumFramesEncoded++;
362    // Pass the data to the MediaBuffer. Pass in only the metadata
363
364    passMetadataBuffer(buffer, mSlots[mCurrentSlot].mGraphicBuffer->handle);
365
366    (*buffer)->setObserver(this);
367    (*buffer)->add_ref();
368    (*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp / 1000);
369    ALOGV("Frames encoded = %d, timestamp = %" PRId64 ", time diff = %" PRId64,
370            mNumFramesEncoded, mCurrentTimestamp / 1000,
371            mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
372
373    ++mNumPendingBuffers;
374
375#if DEBUG_PENDING_BUFFERS
376    mPendingBuffers.push_back(*buffer);
377#endif
378
379    ALOGV("returning mbuf %p", *buffer);
380
381    return OK;
382}
383
384static buffer_handle_t getMediaBufferHandle(MediaBuffer *buffer) {
385    // need to convert to char* for pointer arithmetic and then
386    // copy the byte stream into our handle
387    buffer_handle_t bufferHandle;
388    memcpy(&bufferHandle, (char*)(buffer->data()) + 4, sizeof(buffer_handle_t));
389    return bufferHandle;
390}
391
392void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
393    ALOGV("signalBufferReturned");
394
395    bool foundBuffer = false;
396
397    Mutex::Autolock lock(mMutex);
398
399    buffer_handle_t bufferHandle = getMediaBufferHandle(buffer);
400
401    for (size_t i = 0; i < mCurrentBuffers.size(); i++) {
402        if (mCurrentBuffers[i]->handle == bufferHandle) {
403            mCurrentBuffers.removeAt(i);
404            foundBuffer = true;
405            break;
406        }
407    }
408
409    if (!foundBuffer) {
410        ALOGW("returned buffer was not found in the current buffer list");
411    }
412
413    for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
414        if (mSlots[id].mGraphicBuffer == NULL) {
415            continue;
416        }
417
418        if (bufferHandle == mSlots[id].mGraphicBuffer->handle) {
419            ALOGV("Slot %d returned, matches handle = %p", id,
420                    mSlots[id].mGraphicBuffer->handle);
421
422            mConsumer->releaseBuffer(id, mSlots[id].mFrameNumber,
423                                        EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
424                    Fence::NO_FENCE);
425
426            buffer->setObserver(0);
427            buffer->release();
428
429            foundBuffer = true;
430            break;
431        }
432    }
433
434    if (!foundBuffer) {
435        CHECK(!"signalBufferReturned: bogus buffer");
436    }
437
438#if DEBUG_PENDING_BUFFERS
439    for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
440        if (mPendingBuffers.itemAt(i) == buffer) {
441            mPendingBuffers.removeAt(i);
442            break;
443        }
444    }
445#endif
446
447    --mNumPendingBuffers;
448    mMediaBuffersAvailableCondition.broadcast();
449}
450
451// Part of the BufferQueue::ConsumerListener
452void SurfaceMediaSource::onFrameAvailable(const BufferItem& /* item */) {
453    ALOGV("onFrameAvailable");
454
455    sp<FrameAvailableListener> listener;
456    { // scope for the lock
457        Mutex::Autolock lock(mMutex);
458        mFrameAvailableCondition.broadcast();
459        listener = mFrameAvailableListener;
460    }
461
462    if (listener != NULL) {
463        ALOGV("actually calling onFrameAvailable");
464        listener->onFrameAvailable();
465    }
466}
467
468// SurfaceMediaSource hijacks this event to assume
469// the prodcuer is disconnecting from the BufferQueue
470// and that it should stop the recording
471void SurfaceMediaSource::onBuffersReleased() {
472    ALOGV("onBuffersReleased");
473
474    Mutex::Autolock lock(mMutex);
475
476    mFrameAvailableCondition.signal();
477
478    for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
479       mSlots[i].mGraphicBuffer = 0;
480    }
481}
482
483void SurfaceMediaSource::onSidebandStreamChanged() {
484    ALOG_ASSERT(false, "SurfaceMediaSource can't consume sideband streams");
485}
486
487} // end of namespace android
488