1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16//#define LOG_NDEBUG 0
17#define LOG_TAG "SurfaceMediaSource"
18
19#include <media/stagefright/foundation/ADebug.h>
20#include <media/stagefright/SurfaceMediaSource.h>
21#include <media/stagefright/MediaDefs.h>
22#include <media/stagefright/MetaData.h>
23#include <OMX_IVCommon.h>
24#include <MetadataBufferType.h>
25
26#include <ui/GraphicBuffer.h>
27#include <gui/ISurfaceComposer.h>
28#include <gui/IGraphicBufferAlloc.h>
29#include <OMX_Component.h>
30
31#include <utils/Log.h>
32#include <utils/String8.h>
33
34#include <private/gui/ComposerService.h>
35
36namespace android {
37
38SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeight) :
39    mWidth(bufferWidth),
40    mHeight(bufferHeight),
41    mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
42    mNumPendingBuffers(0),
43    mCurrentTimestamp(0),
44    mFrameRate(30),
45    mStarted(false),
46    mNumFramesReceived(0),
47    mNumFramesEncoded(0),
48    mFirstFrameTimestamp(0),
49    mMaxAcquiredBufferCount(4),  // XXX double-check the default
50    mUseAbsoluteTimestamps(false) {
51    ALOGV("SurfaceMediaSource");
52
53    if (bufferWidth == 0 || bufferHeight == 0) {
54        ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
55    }
56
57    mBufferQueue = new BufferQueue(true);
58    mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
59    mBufferQueue->setSynchronousMode(true);
60    mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
61            GRALLOC_USAGE_HW_TEXTURE);
62
63    sp<ISurfaceComposer> composer(ComposerService::getComposerService());
64
65    // Note that we can't create an sp<...>(this) in a ctor that will not keep a
66    // reference once the ctor ends, as that would cause the refcount of 'this'
67    // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>
68    // that's what we create.
69    wp<BufferQueue::ConsumerListener> listener;
70    sp<BufferQueue::ConsumerListener> proxy;
71    listener = static_cast<BufferQueue::ConsumerListener*>(this);
72    proxy = new BufferQueue::ProxyConsumerListener(listener);
73
74    status_t err = mBufferQueue->consumerConnect(proxy);
75    if (err != NO_ERROR) {
76        ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
77                strerror(-err), err);
78    }
79}
80
81SurfaceMediaSource::~SurfaceMediaSource() {
82    ALOGV("~SurfaceMediaSource");
83    CHECK(!mStarted);
84}
85
86nsecs_t SurfaceMediaSource::getTimestamp() {
87    ALOGV("getTimestamp");
88    Mutex::Autolock lock(mMutex);
89    return mCurrentTimestamp;
90}
91
92void SurfaceMediaSource::setFrameAvailableListener(
93        const sp<FrameAvailableListener>& listener) {
94    ALOGV("setFrameAvailableListener");
95    Mutex::Autolock lock(mMutex);
96    mFrameAvailableListener = listener;
97}
98
99void SurfaceMediaSource::dump(String8& result) const
100{
101    char buffer[1024];
102    dump(result, "", buffer, 1024);
103}
104
105void SurfaceMediaSource::dump(String8& result, const char* prefix,
106        char* buffer, size_t SIZE) const
107{
108    Mutex::Autolock lock(mMutex);
109
110    result.append(buffer);
111    mBufferQueue->dump(result);
112}
113
114status_t SurfaceMediaSource::setFrameRate(int32_t fps)
115{
116    ALOGV("setFrameRate");
117    Mutex::Autolock lock(mMutex);
118    const int MAX_FRAME_RATE = 60;
119    if (fps < 0 || fps > MAX_FRAME_RATE) {
120        return BAD_VALUE;
121    }
122    mFrameRate = fps;
123    return OK;
124}
125
126bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const {
127    ALOGV("isMetaDataStoredInVideoBuffers");
128    return true;
129}
130
131int32_t SurfaceMediaSource::getFrameRate( ) const {
132    ALOGV("getFrameRate");
133    Mutex::Autolock lock(mMutex);
134    return mFrameRate;
135}
136
137status_t SurfaceMediaSource::start(MetaData *params)
138{
139    ALOGV("start");
140
141    Mutex::Autolock lock(mMutex);
142
143    CHECK(!mStarted);
144
145    mStartTimeNs = 0;
146    int64_t startTimeUs;
147    int32_t bufferCount = 0;
148    if (params) {
149        if (params->findInt64(kKeyTime, &startTimeUs)) {
150            mStartTimeNs = startTimeUs * 1000;
151        }
152
153        if (!params->findInt32(kKeyNumBuffers, &bufferCount)) {
154            ALOGE("Failed to find the advertised buffer count");
155            return UNKNOWN_ERROR;
156        }
157
158        if (bufferCount <= 1) {
159            ALOGE("bufferCount %d is too small", bufferCount);
160            return BAD_VALUE;
161        }
162
163        mMaxAcquiredBufferCount = bufferCount;
164    }
165
166    CHECK_GT(mMaxAcquiredBufferCount, 1);
167
168    status_t err =
169        mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
170
171    if (err != OK) {
172        return err;
173    }
174
175    mNumPendingBuffers = 0;
176    mStarted = true;
177
178    return OK;
179}
180
181status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
182    ALOGV("setMaxAcquiredBufferCount(%d)", count);
183    Mutex::Autolock lock(mMutex);
184
185    CHECK_GT(count, 1);
186    mMaxAcquiredBufferCount = count;
187
188    return OK;
189}
190
191status_t SurfaceMediaSource::setUseAbsoluteTimestamps() {
192    ALOGV("setUseAbsoluteTimestamps");
193    Mutex::Autolock lock(mMutex);
194    mUseAbsoluteTimestamps = true;
195
196    return OK;
197}
198
199status_t SurfaceMediaSource::stop()
200{
201    ALOGV("stop");
202    Mutex::Autolock lock(mMutex);
203
204    if (!mStarted) {
205        return OK;
206    }
207
208    while (mNumPendingBuffers > 0) {
209        ALOGI("Still waiting for %d buffers to be returned.",
210                mNumPendingBuffers);
211
212#if DEBUG_PENDING_BUFFERS
213        for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
214            ALOGI("%d: %p", i, mPendingBuffers.itemAt(i));
215        }
216#endif
217
218        mMediaBuffersAvailableCondition.wait(mMutex);
219    }
220
221    mStarted = false;
222    mFrameAvailableCondition.signal();
223    mMediaBuffersAvailableCondition.signal();
224
225    return mBufferQueue->consumerDisconnect();
226}
227
228sp<MetaData> SurfaceMediaSource::getFormat()
229{
230    ALOGV("getFormat");
231
232    Mutex::Autolock lock(mMutex);
233    sp<MetaData> meta = new MetaData;
234
235    meta->setInt32(kKeyWidth, mWidth);
236    meta->setInt32(kKeyHeight, mHeight);
237    // The encoder format is set as an opaque colorformat
238    // The encoder will later find out the actual colorformat
239    // from the GL Frames itself.
240    meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
241    meta->setInt32(kKeyStride, mWidth);
242    meta->setInt32(kKeySliceHeight, mHeight);
243    meta->setInt32(kKeyFrameRate, mFrameRate);
244    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
245    return meta;
246}
247
248// Pass the data to the MediaBuffer. Pass in only the metadata
249// The metadata passed consists of two parts:
250// 1. First, there is an integer indicating that it is a GRAlloc
251// source (kMetadataBufferTypeGrallocSource)
252// 2. This is followed by the buffer_handle_t that is a handle to the
253// GRalloc buffer. The encoder needs to interpret this GRalloc handle
254// and encode the frames.
255// --------------------------------------------------------------
256// |  kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
257// --------------------------------------------------------------
258// Note: Call only when you have the lock
259static void passMetadataBuffer(MediaBuffer **buffer,
260        buffer_handle_t bufferHandle) {
261    *buffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
262    char *data = (char *)(*buffer)->data();
263    if (data == NULL) {
264        ALOGE("Cannot allocate memory for metadata buffer!");
265        return;
266    }
267    OMX_U32 type = kMetadataBufferTypeGrallocSource;
268    memcpy(data, &type, 4);
269    memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
270
271    ALOGV("handle = %p, , offset = %d, length = %d",
272            bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
273}
274
275status_t SurfaceMediaSource::read( MediaBuffer **buffer,
276                                    const ReadOptions *options)
277{
278    ALOGV("read");
279    Mutex::Autolock lock(mMutex);
280
281    *buffer = NULL;
282
283    while (mStarted && mNumPendingBuffers == mMaxAcquiredBufferCount) {
284        mMediaBuffersAvailableCondition.wait(mMutex);
285    }
286
287    // Update the current buffer info
288    // TODO: mCurrentSlot can be made a bufferstate since there
289    // can be more than one "current" slots.
290
291    BufferQueue::BufferItem item;
292    // If the recording has started and the queue is empty, then just
293    // wait here till the frames come in from the client side
294    while (mStarted) {
295
296        status_t err = mBufferQueue->acquireBuffer(&item);
297        if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
298            // wait for a buffer to be queued
299            mFrameAvailableCondition.wait(mMutex);
300        } else if (err == OK) {
301            err = item.mFence->waitForever("SurfaceMediaSource::read");
302            if (err) {
303                ALOGW("read: failed to wait for buffer fence: %d", err);
304            }
305
306            // First time seeing the buffer?  Added it to the SMS slot
307            if (item.mGraphicBuffer != NULL) {
308                mBufferSlot[item.mBuf] = item.mGraphicBuffer;
309            }
310
311            // check for the timing of this buffer
312            if (mNumFramesReceived == 0 && !mUseAbsoluteTimestamps) {
313                mFirstFrameTimestamp = item.mTimestamp;
314                // Initial delay
315                if (mStartTimeNs > 0) {
316                    if (item.mTimestamp < mStartTimeNs) {
317                        // This frame predates start of record, discard
318                        mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY,
319                                EGL_NO_SYNC_KHR, Fence::NO_FENCE);
320                        continue;
321                    }
322                    mStartTimeNs = item.mTimestamp - mStartTimeNs;
323                }
324            }
325            item.mTimestamp = mStartTimeNs + (item.mTimestamp - mFirstFrameTimestamp);
326
327            mNumFramesReceived++;
328
329            break;
330        } else {
331            ALOGE("read: acquire failed with error code %d", err);
332            return ERROR_END_OF_STREAM;
333        }
334
335    }
336
337    // If the loop was exited as a result of stopping the recording,
338    // it is OK
339    if (!mStarted) {
340        ALOGV("Read: SurfaceMediaSource is stopped. Returning ERROR_END_OF_STREAM.");
341        return ERROR_END_OF_STREAM;
342    }
343
344    mCurrentSlot = item.mBuf;
345
346    // First time seeing the buffer?  Added it to the SMS slot
347    if (item.mGraphicBuffer != NULL) {
348        mBufferSlot[mCurrentSlot] = item.mGraphicBuffer;
349    }
350
351    mCurrentBuffers.push_back(mBufferSlot[mCurrentSlot]);
352    int64_t prevTimeStamp = mCurrentTimestamp;
353    mCurrentTimestamp = item.mTimestamp;
354
355    mNumFramesEncoded++;
356    // Pass the data to the MediaBuffer. Pass in only the metadata
357
358    passMetadataBuffer(buffer, mBufferSlot[mCurrentSlot]->handle);
359
360    (*buffer)->setObserver(this);
361    (*buffer)->add_ref();
362    (*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp / 1000);
363    ALOGV("Frames encoded = %d, timestamp = %lld, time diff = %lld",
364            mNumFramesEncoded, mCurrentTimestamp / 1000,
365            mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
366
367    ++mNumPendingBuffers;
368
369#if DEBUG_PENDING_BUFFERS
370    mPendingBuffers.push_back(*buffer);
371#endif
372
373    ALOGV("returning mbuf %p", *buffer);
374
375    return OK;
376}
377
378static buffer_handle_t getMediaBufferHandle(MediaBuffer *buffer) {
379    // need to convert to char* for pointer arithmetic and then
380    // copy the byte stream into our handle
381    buffer_handle_t bufferHandle;
382    memcpy(&bufferHandle, (char*)(buffer->data()) + 4, sizeof(buffer_handle_t));
383    return bufferHandle;
384}
385
386void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
387    ALOGV("signalBufferReturned");
388
389    bool foundBuffer = false;
390
391    Mutex::Autolock lock(mMutex);
392
393    buffer_handle_t bufferHandle = getMediaBufferHandle(buffer);
394
395    for (size_t i = 0; i < mCurrentBuffers.size(); i++) {
396        if (mCurrentBuffers[i]->handle == bufferHandle) {
397            mCurrentBuffers.removeAt(i);
398            foundBuffer = true;
399            break;
400        }
401    }
402
403    if (!foundBuffer) {
404        ALOGW("returned buffer was not found in the current buffer list");
405    }
406
407    for (int id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
408        if (mBufferSlot[id] == NULL) {
409            continue;
410        }
411
412        if (bufferHandle == mBufferSlot[id]->handle) {
413            ALOGV("Slot %d returned, matches handle = %p", id,
414                    mBufferSlot[id]->handle);
415
416            mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
417                    Fence::NO_FENCE);
418
419            buffer->setObserver(0);
420            buffer->release();
421
422            foundBuffer = true;
423            break;
424        }
425    }
426
427    if (!foundBuffer) {
428        CHECK(!"signalBufferReturned: bogus buffer");
429    }
430
431#if DEBUG_PENDING_BUFFERS
432    for (size_t i = 0; i < mPendingBuffers.size(); ++i) {
433        if (mPendingBuffers.itemAt(i) == buffer) {
434            mPendingBuffers.removeAt(i);
435            break;
436        }
437    }
438#endif
439
440    --mNumPendingBuffers;
441    mMediaBuffersAvailableCondition.broadcast();
442}
443
444// Part of the BufferQueue::ConsumerListener
445void SurfaceMediaSource::onFrameAvailable() {
446    ALOGV("onFrameAvailable");
447
448    sp<FrameAvailableListener> listener;
449    { // scope for the lock
450        Mutex::Autolock lock(mMutex);
451        mFrameAvailableCondition.broadcast();
452        listener = mFrameAvailableListener;
453    }
454
455    if (listener != NULL) {
456        ALOGV("actually calling onFrameAvailable");
457        listener->onFrameAvailable();
458    }
459}
460
461// SurfaceMediaSource hijacks this event to assume
462// the prodcuer is disconnecting from the BufferQueue
463// and that it should stop the recording
464void SurfaceMediaSource::onBuffersReleased() {
465    ALOGV("onBuffersReleased");
466
467    Mutex::Autolock lock(mMutex);
468
469    mFrameAvailableCondition.signal();
470
471    for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
472       mBufferSlot[i] = 0;
473    }
474}
475
476} // end of namespace android
477