1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16//#define LOG_NDEBUG 0
17#define LOG_TAG "SurfaceMediaSource"
18
19#include <media/stagefright/foundation/ADebug.h>
20#include <media/stagefright/SurfaceMediaSource.h>
21#include <media/stagefright/MediaDefs.h>
22#include <media/stagefright/MetaData.h>
23#include <OMX_IVCommon.h>
24#include <media/hardware/MetadataBufferType.h>
25
26#include <ui/GraphicBuffer.h>
27#include <gui/ISurfaceComposer.h>
28#include <gui/IGraphicBufferAlloc.h>
29#include <OMX_Component.h>
30
31#include <utils/Log.h>
32#include <utils/String8.h>
33
34#include <private/gui/ComposerService.h>
35
36namespace android {
37
38SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight) :
39    mWidth(bufferWidth),
40    mHeight(bufferHeight),
41    mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
42    mNumPendingBuffers(0),
43    mCurrentTimestamp(0),
44    mFrameRate(30),
45    mStarted(false),
46    mNumFramesReceived(0),
47    mNumFramesEncoded(0),
48    mFirstFrameTimestamp(0),
49    mMaxAcquiredBufferCount(4),  // XXX double-check the default
50    mUseAbsoluteTimestamps(false) {
51    ALOGV("SurfaceMediaSource");
52
53    if (bufferWidth == 0 || bufferHeight == 0) {
54        ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
55    }
56
57    mBufferQueue = new BufferQueue();
58    mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
59    mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
60            GRALLOC_USAGE_HW_TEXTURE);
61
62    sp<ISurfaceComposer> composer(ComposerService::getComposerService());
63
64    // Note that we can't create an sp<...>(this) in a ctor that will not keep a
65    // reference once the ctor ends, as that would cause the refcount of 'this'
66    // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>
67    // that's what we create.
68    wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
69    sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
70
71    status_t err = mBufferQueue->consumerConnect(proxy, false);
72    if (err != NO_ERROR) {
73        ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
74                strerror(-err), err);
75    }
76}
77
78SurfaceMediaSource::~SurfaceMediaSource() {
79    ALOGV("~SurfaceMediaSource");
80    CHECK(!mStarted);
81}
82
83nsecs_t SurfaceMediaSource::getTimestamp() {
84    ALOGV("getTimestamp");
85    Mutex::Autolock lock(mMutex);
86    return mCurrentTimestamp;
87}
88
89void SurfaceMediaSource::setFrameAvailableListener(
90        const sp<FrameAvailableListener>& listener) {
91    ALOGV("setFrameAvailableListener");
92    Mutex::Autolock lock(mMutex);
93    mFrameAvailableListener = listener;
94}
95
96void SurfaceMediaSource::dump(String8& result) const
97{
98    char buffer[1024];
99    dump(result, "", buffer, 1024);
100}
101
102void SurfaceMediaSource::dump(String8& result, const char* prefix,
103        char* buffer, size_t SIZE) const
104{
105    Mutex::Autolock lock(mMutex);
106
107    result.append(buffer);
108    mBufferQueue->dump(result, "");
109}
110
111status_t SurfaceMediaSource::setFrameRate(int32_t fps)
112{
113    ALOGV("setFrameRate");
114    Mutex::Autolock lock(mMutex);
115    const int MAX_FRAME_RATE = 60;
116    if (fps < 0 || fps > MAX_FRAME_RATE) {
117        return BAD_VALUE;
118    }
119    mFrameRate = fps;
120    return OK;
121}
122
123bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const {
124    ALOGV("isMetaDataStoredInVideoBuffers");
125    return true;
126}
127
128int32_t SurfaceMediaSource::getFrameRate( ) const {
129    ALOGV("getFrameRate");
130    Mutex::Autolock lock(mMutex);
131    return mFrameRate;
132}
133
134status_t SurfaceMediaSource::start(MetaData *params)
135{
136    ALOGV("start");
137
138    Mutex::Autolock lock(mMutex);
139
140    CHECK(!mStarted);
141
142    mStartTimeNs = 0;
143    int64_t startTimeUs;
144    int32_t bufferCount = 0;
145    if (params) {
146        if (params->findInt64(kKeyTime, &startTimeUs)) {
147            mStartTimeNs = startTimeUs * 1000;
148        }
149
150        if (!params->findInt32(kKeyNumBuffers, &bufferCount)) {
151            ALOGE("Failed to find the advertised buffer count");
152            return UNKNOWN_ERROR;
153        }
154
155        if (bufferCount <= 1) {
156            ALOGE("bufferCount %d is too small", bufferCount);
157            return BAD_VALUE;
158        }
159
160        mMaxAcquiredBufferCount = bufferCount;
161    }
162
163    CHECK_GT(mMaxAcquiredBufferCount, 1);
164
165    status_t err =
166        mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
167
168    if (err != OK) {
169        return err;
170    }
171
172    mNumPendingBuffers = 0;
173    mStarted = true;
174
175    return OK;
176}
177
178status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
179    ALOGV("setMaxAcquiredBufferCount(%d)", count);
180    Mutex::Autolock lock(mMutex);
181
182    CHECK_GT(count, 1);
183    mMaxAcquiredBufferCount = count;
184
185    return OK;
186}
187
188status_t SurfaceMediaSource::setUseAbsoluteTimestamps() {
189    ALOGV("setUseAbsoluteTimestamps");
190    Mutex::Autolock lock(mMutex);
191    mUseAbsoluteTimestamps = true;
192
193    return OK;
194}
195
196status_t SurfaceMediaSource::stop()
197{
198    ALOGV("stop");
199    Mutex::Autolock lock(mMutex);
200
201    if (!mStarted) {
202        return OK;
203    }
204
205    while (mNumPendingBuffers > 0) {
206        ALOGI("Still waiting for %d buffers to be returned.",
207                mNumPendingBuffers);
208
209#if DEBUG_PENDING_BUFFERS
210        for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
211            ALOGI("%d: %p", i, mPendingBuffers.itemAt(i));
212        }
213#endif
214
215        mMediaBuffersAvailableCondition.wait(mMutex);
216    }
217
218    mStarted = false;
219    mFrameAvailableCondition.signal();
220    mMediaBuffersAvailableCondition.signal();
221
222    return mBufferQueue->consumerDisconnect();
223}
224
225sp<MetaData> SurfaceMediaSource::getFormat()
226{
227    ALOGV("getFormat");
228
229    Mutex::Autolock lock(mMutex);
230    sp<MetaData> meta = new MetaData;
231
232    meta->setInt32(kKeyWidth, mWidth);
233    meta->setInt32(kKeyHeight, mHeight);
234    // The encoder format is set as an opaque colorformat
235    // The encoder will later find out the actual colorformat
236    // from the GL Frames itself.
237    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
238    meta->setInt32(kKeyStride, mWidth);
239    meta->setInt32(kKeySliceHeight, mHeight);
240    meta->setInt32(kKeyFrameRate, mFrameRate);
241    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
242    return meta;
243}
244
245// Pass the data to the MediaBuffer. Pass in only the metadata
246// The metadata passed consists of two parts:
247// 1. First, there is an integer indicating that it is a GRAlloc
248// source (kMetadataBufferTypeGrallocSource)
249// 2. This is followed by the buffer_handle_t that is a handle to the
250// GRalloc buffer. The encoder needs to interpret this GRalloc handle
251// and encode the frames.
252// --------------------------------------------------------------
253// |  kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
254// --------------------------------------------------------------
255// Note: Call only when you have the lock
256static void passMetadataBuffer(MediaBuffer **buffer,
257        buffer_handle_t bufferHandle) {
258    *buffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
259    char *data = (char *)(*buffer)->data();
260    if (data == NULL) {
261        ALOGE("Cannot allocate memory for metadata buffer!");
262        return;
263    }
264    OMX_U32 type = kMetadataBufferTypeGrallocSource;
265    memcpy(data, &type, 4);
266    memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
267
268    ALOGV("handle = %p, , offset = %d, length = %d",
269            bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
270}
271
272status_t SurfaceMediaSource::read( MediaBuffer **buffer,
273                                    const ReadOptions *options)
274{
275    ALOGV("read");
276    Mutex::Autolock lock(mMutex);
277
278    *buffer = NULL;
279
280    while (mStarted && mNumPendingBuffers == mMaxAcquiredBufferCount) {
281        mMediaBuffersAvailableCondition.wait(mMutex);
282    }
283
284    // Update the current buffer info
285    // TODO: mCurrentSlot can be made a bufferstate since there
286    // can be more than one "current" slots.
287
288    BufferQueue::BufferItem item;
289    // If the recording has started and the queue is empty, then just
290    // wait here till the frames come in from the client side
291    while (mStarted) {
292
293        status_t err = mBufferQueue->acquireBuffer(&item, 0);
294        if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
295            // wait for a buffer to be queued
296            mFrameAvailableCondition.wait(mMutex);
297        } else if (err == OK) {
298            err = item.mFence->waitForever("SurfaceMediaSource::read");
299            if (err) {
300                ALOGW("read: failed to wait for buffer fence: %d", err);
301            }
302
303            // First time seeing the buffer?  Added it to the SMS slot
304            if (item.mGraphicBuffer != NULL) {
305                mSlots[item.mBuf].mGraphicBuffer = item.mGraphicBuffer;
306            }
307            mSlots[item.mBuf].mFrameNumber = item.mFrameNumber;
308
309            // check for the timing of this buffer
310            if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) {
311                mFirstFrameTimestamp = item.mTimestamp;
312                // Initial delay
313                if (mStartTimeNs > 0) {
314                    if (item.mTimestamp < mStartTimeNs) {
315                        // This frame predates start of record, discard
316                        mBufferQueue->releaseBuffer(
317                                item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY,
318                                EGL_NO_SYNC_KHR, Fence::NO_FENCE);
319                        continue;
320                    }
321                    mStartTimeNs = item.mTimestamp - mStartTimeNs;
322                }
323            }
324            item.mTimestamp = mStartTimeNs + (item.mTimestamp - mFirstFrameTimestamp);
325
326            mNumFramesReceived++;
327
328            break;
329        } else {
330            ALOGE("read: acquire failed with error code %d", err);
331            return ERROR_END_OF_STREAM;
332        }
333
334    }
335
336    // If the loop was exited as a result of stopping the recording,
337    // it is OK
338    if (!mStarted) {
339        ALOGV("Read: SurfaceMediaSource is stopped. Returning ERROR_END_OF_STREAM.");
340        return ERROR_END_OF_STREAM;
341    }
342
343    mCurrentSlot = item.mBuf;
344
345    // First time seeing the buffer?  Added it to the SMS slot
346    if (item.mGraphicBuffer != NULL) {
347        mSlots[item.mBuf].mGraphicBuffer = item.mGraphicBuffer;
348    }
349    mSlots[item.mBuf].mFrameNumber = item.mFrameNumber;
350
351    mCurrentBuffers.push_back(mSlots[mCurrentSlot].mGraphicBuffer);
352    int64_t prevTimeStamp = mCurrentTimestamp;
353    mCurrentTimestamp = item.mTimestamp;
354
355    mNumFramesEncoded++;
356    // Pass the data to the MediaBuffer. Pass in only the metadata
357
358    passMetadataBuffer(buffer, mSlots[mCurrentSlot].mGraphicBuffer->handle);
359
360    (*buffer)->setObserver(this);
361    (*buffer)->add_ref();
362    (*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp / 1000);
363    ALOGV("Frames encoded = %d, timestamp = %lld, time diff = %lld",
364            mNumFramesEncoded, mCurrentTimestamp / 1000,
365            mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
366
367    ++mNumPendingBuffers;
368
369#if DEBUG_PENDING_BUFFERS
370    mPendingBuffers.push_back(*buffer);
371#endif
372
373    ALOGV("returning mbuf %p", *buffer);
374
375    return OK;
376}
377
378static buffer_handle_t getMediaBufferHandle(MediaBuffer *buffer) {
379    // need to convert to char* for pointer arithmetic and then
380    // copy the byte stream into our handle
381    buffer_handle_t bufferHandle;
382    memcpy(&bufferHandle, (char*)(buffer->data()) + 4, sizeof(buffer_handle_t));
383    return bufferHandle;
384}
385
386void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
387    ALOGV("signalBufferReturned");
388
389    bool foundBuffer = false;
390
391    Mutex::Autolock lock(mMutex);
392
393    buffer_handle_t bufferHandle = getMediaBufferHandle(buffer);
394
395    for (size_t i = 0; i < mCurrentBuffers.size(); i++) {
396        if (mCurrentBuffers[i]->handle == bufferHandle) {
397            mCurrentBuffers.removeAt(i);
398            foundBuffer = true;
399            break;
400        }
401    }
402
403    if (!foundBuffer) {
404        ALOGW("returned buffer was not found in the current buffer list");
405    }
406
407    for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
408        if (mSlots[id].mGraphicBuffer == NULL) {
409            continue;
410        }
411
412        if (bufferHandle == mSlots[id].mGraphicBuffer->handle) {
413            ALOGV("Slot %d returned, matches handle = %p", id,
414                    mSlots[id].mGraphicBuffer->handle);
415
416            mBufferQueue->releaseBuffer(id, mSlots[id].mFrameNumber,
417                                        EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
418                    Fence::NO_FENCE);
419
420            buffer->setObserver(0);
421            buffer->release();
422
423            foundBuffer = true;
424            break;
425        }
426    }
427
428    if (!foundBuffer) {
429        CHECK(!"signalBufferReturned: bogus buffer");
430    }
431
432#if DEBUG_PENDING_BUFFERS
433    for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
434        if (mPendingBuffers.itemAt(i) == buffer) {
435            mPendingBuffers.removeAt(i);
436            break;
437        }
438    }
439#endif
440
441    --mNumPendingBuffers;
442    mMediaBuffersAvailableCondition.broadcast();
443}
444
445// Part of the BufferQueue::ConsumerListener
446void SurfaceMediaSource::onFrameAvailable() {
447    ALOGV("onFrameAvailable");
448
449    sp<FrameAvailableListener> listener;
450    { // scope for the lock
451        Mutex::Autolock lock(mMutex);
452        mFrameAvailableCondition.broadcast();
453        listener = mFrameAvailableListener;
454    }
455
456    if (listener != NULL) {
457        ALOGV("actually calling onFrameAvailable");
458        listener->onFrameAvailable();
459    }
460}
461
462// SurfaceMediaSource hijacks this event to assume
463// the prodcuer is disconnecting from the BufferQueue
464// and that it should stop the recording
465void SurfaceMediaSource::onBuffersReleased() {
466    ALOGV("onBuffersReleased");
467
468    Mutex::Autolock lock(mMutex);
469
470    mFrameAvailableCondition.signal();
471
472    for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
473       mSlots[i].mGraphicBuffer = 0;
474    }
475}
476
477} // end of namespace android
478