MediaSync.h revision 99d1f78c9b16b5668e78c353373e0e7f4592cab9
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef MEDIA_SYNC_H
18#define MEDIA_SYNC_H
19
20#include <gui/IConsumerListener.h>
21#include <gui/IProducerListener.h>
22
23#include <media/stagefright/foundation/AHandler.h>
24
25#include <utils/Condition.h>
26#include <utils/Mutex.h>
27
28namespace android {
29
30class AudioTrack;
31class BufferItem;
32class Fence;
33class GraphicBuffer;
34class IGraphicBufferConsumer;
35class IGraphicBufferProducer;
36struct MediaClock;
37
38// MediaSync manages media playback and its synchronization to a media clock
39// source. It can be also used for video-only playback.
40//
41// For video playback, it requires an output surface and provides an input
42// surface. It then controls the rendering of input buffers (buffer queued to
43// the input surface) on the output surface to happen at the appropriate time.
44//
45// For audio playback, it requires an audio track and takes updates of
46// information of rendered audio data to maintain media clock when audio track
47// serves as media clock source. (TODO: move audio rendering from JAVA to
48// native code).
49//
50// It can use the audio or video track as media clock source, as well as an
51// external clock. (TODO: actually support external clock as media clock
52// sources; use video track as media clock source for audio-and-video stream).
53//
54// In video-only mode, MediaSync will playback every video frame even though
55// a video frame arrives late based on its timestamp and last frame's.
56//
57// The client needs to configure surface (for output video rendering) and audio
58// track (for querying information of audio rendering) for MediaSync.
59//
60// Then the client needs to obtain a surface from MediaSync and render video
61// frames onto that surface. Internally, the MediaSync will receive those video
62// frames and render them onto the output surface at the appropriate time.
63//
64// The client needs to call updateQueuedAudioData() immediately after it writes
65// audio data to the audio track. Such information will be used to update media
66// clock.
67//
68class MediaSync : public AHandler {
69public:
70    // Create an instance of MediaSync.
71    static sp<MediaSync> create();
72
73    // Called when MediaSync is used to render video. It should be called
74    // before createInputSurface().
75    status_t configureSurface(const sp<IGraphicBufferProducer> &output);
76
77    // Called when audio track is used as media clock source. It should be
78    // called before updateQueuedAudioData().
79    // |nativeSampleRateInHz| is the sample rate of audio data fed into audio
80    // track. It's the same number used to create AudioTrack.
81    status_t configureAudioTrack(
82            const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz);
83
84    // Create a surface for client to render video frames. This is the surface
85    // on which the client should render video frames. Those video frames will
86    // be internally directed to output surface for rendering at appropriate
87    // time.
88    status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);
89
90    // Update just-rendered audio data size and the presentation timestamp of
91    // the first frame of that audio data. It should be called immediately
92    // after the client write audio data into AudioTrack.
93    // This function assumes continous audio stream.
94    // TODO: support gap or backwards updates.
95    status_t updateQueuedAudioData(
96            size_t sizeInBytes, int64_t presentationTimeUs);
97
98    // Set the consumer name of the input queue.
99    void setName(const AString &name);
100
101    // Set the playback in a desired speed.
102    // This method can be called any time.
103    // |rate| is the ratio between desired speed and the normal one, and should
104    // be non-negative. The meaning of rate values:
105    // 1.0 -- normal playback
106    // 0.0 -- stop or pause
107    // larger than 1.0 -- faster than normal speed
108    // between 0.0 and 1.0 -- slower than normal speed
109    status_t setPlaybackRate(float rate);
110
111    // Get the media clock used by the MediaSync so that the client can obtain
112    // corresponding media time or real time via
113    // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
114    sp<const MediaClock> getMediaClock();
115
116    // Get the play time for pending audio frames in audio sink.
117    status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);
118
119protected:
120    virtual void onMessageReceived(const sp<AMessage> &msg);
121
122private:
123    enum {
124        kWhatDrainVideo = 'dVid',
125    };
126
127    static const int MAX_OUTSTANDING_BUFFERS = 2;
128
129    // This is a thin wrapper class that lets us listen to
130    // IConsumerListener::onFrameAvailable from mInput.
131    class InputListener : public BnConsumerListener,
132                          public IBinder::DeathRecipient {
133    public:
134        InputListener(const sp<MediaSync> &sync);
135        virtual ~InputListener();
136
137        // From IConsumerListener
138        virtual void onFrameAvailable(const BufferItem &item);
139
140        // From IConsumerListener
141        // We don't care about released buffers because we detach each buffer as
142        // soon as we acquire it. See the comment for onBufferReleased below for
143        // some clarifying notes about the name.
144        virtual void onBuffersReleased() {}
145
146        // From IConsumerListener
147        // We don't care about sideband streams, since we won't relay them.
148        virtual void onSidebandStreamChanged();
149
150        // From IBinder::DeathRecipient
151        virtual void binderDied(const wp<IBinder> &who);
152
153    private:
154        sp<MediaSync> mSync;
155    };
156
157    // This is a thin wrapper class that lets us listen to
158    // IProducerListener::onBufferReleased from mOutput.
159    class OutputListener : public BnProducerListener,
160                           public IBinder::DeathRecipient {
161    public:
162        OutputListener(const sp<MediaSync> &sync);
163        virtual ~OutputListener();
164
165        // From IProducerListener
166        virtual void onBufferReleased();
167
168        // From IBinder::DeathRecipient
169        virtual void binderDied(const wp<IBinder> &who);
170
171    private:
172        sp<MediaSync> mSync;
173    };
174
175    // mIsAbandoned is set to true when the input or output dies.
176    // Once the MediaSync has been abandoned by one side, it will disconnect
177    // from the other side and not attempt to communicate with it further.
178    bool mIsAbandoned;
179
180    mutable Mutex mMutex;
181    Condition mReleaseCondition;
182    size_t mNumOutstandingBuffers;
183    sp<IGraphicBufferConsumer> mInput;
184    sp<IGraphicBufferProducer> mOutput;
185
186    sp<AudioTrack> mAudioTrack;
187    uint32_t mNativeSampleRateInHz;
188    int64_t mNumFramesWritten;
189    bool mHasAudio;
190
191    int64_t mNextBufferItemMediaUs;
192    List<BufferItem> mBufferItems;
193    sp<ALooper> mLooper;
194    float mPlaybackRate;
195
196    sp<MediaClock> mMediaClock;
197
198    MediaSync();
199
200    // Must be accessed through RefBase
201    virtual ~MediaSync();
202
203    int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
204    int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
205    int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);
206
207    void onDrainVideo_l();
208
209    // This implements the onFrameAvailable callback from IConsumerListener.
210    // It gets called from an InputListener.
211    // During this callback, we detach the buffer from the input, and queue
212    // it for rendering on the output. This call can block if there are too
213    // many outstanding buffers. If it blocks, it will resume when
214    // onBufferReleasedByOutput releases a buffer back to the input.
215    void onFrameAvailableFromInput();
216
217    // Send |bufferItem| to the output for rendering.
218    void renderOneBufferItem_l(const BufferItem &bufferItem);
219
220    // This implements the onBufferReleased callback from IProducerListener.
221    // It gets called from an OutputListener.
222    // During this callback, we detach the buffer from the output, and release
223    // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
224    void onBufferReleasedByOutput();
225
226    // Return |buffer| back to the input.
227    void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);
228
229    // When this is called, the MediaSync disconnects from (i.e., abandons) its
230    // input or output, and signals any waiting onFrameAvailable calls to wake
231    // up. This must be called with mMutex locked.
232    void onAbandoned_l(bool isInput);
233
234    // helper.
235    bool isPlaying() { return mPlaybackRate != 0.0; }
236
237    DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
238};
239
240} // namespace android
241
242#endif
243