MediaSync.h revision c8db9712d9abe9b0d74193ea7d7cff428e32e62c
1/* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef MEDIA_SYNC_H 18#define MEDIA_SYNC_H 19 20#include <gui/IConsumerListener.h> 21#include <gui/IProducerListener.h> 22 23#include <media/stagefright/foundation/AHandler.h> 24 25#include <utils/Condition.h> 26#include <utils/Mutex.h> 27 28namespace android { 29 30class AudioTrack; 31class BufferItem; 32class Fence; 33class GraphicBuffer; 34class IGraphicBufferConsumer; 35class IGraphicBufferProducer; 36struct MediaClock; 37 38// MediaSync manages media playback and its synchronization to a media clock 39// source. It can be also used for video-only playback. 40// 41// For video playback, it requires an output surface and provides an input 42// surface. It then controls the rendering of input buffers (buffer queued to 43// the input surface) on the output surface to happen at the appropriate time. 44// 45// For audio playback, it requires an audio track and takes updates of 46// information of rendered audio data to maintain media clock when audio track 47// serves as media clock source. (TODO: move audio rendering from JAVA to 48// native code). 49// 50// It can use the audio or video track as media clock source, as well as an 51// external clock. (TODO: actually support external clock as media clock 52// sources; use video track as media clock source for audio-and-video stream). 53// 54// In video-only mode, MediaSync will playback every video frame even though 55// a video frame arrives late based on its timestamp and last frame's. 56// 57// The client needs to configure surface (for output video rendering) and audio 58// track (for querying information of audio rendering) for MediaSync. 59// 60// Then the client needs to obtain a surface from MediaSync and render video 61// frames onto that surface. Internally, the MediaSync will receive those video 62// frames and render them onto the output surface at the appropriate time. 63// 64// The client needs to call updateQueuedAudioData() immediately after it writes 65// audio data to the audio track. Such information will be used to update media 66// clock. 67// 68class MediaSync : public AHandler { 69public: 70 // Create an instance of MediaSync. 71 static sp<MediaSync> create(); 72 73 // Called when MediaSync is used to render video. It should be called 74 // before createInputSurface(). 75 status_t configureSurface(const sp<IGraphicBufferProducer> &output); 76 77 // Called when audio track is used as media clock source. It should be 78 // called before updateQueuedAudioData(). 79 // |nativeSampleRateInHz| is the sample rate of audio data fed into audio 80 // track. It's the same number used to create AudioTrack. 81 status_t configureAudioTrack( 82 const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz); 83 84 // Create a surface for client to render video frames. This is the surface 85 // on which the client should render video frames. Those video frames will 86 // be internally directed to output surface for rendering at appropriate 87 // time. 88 status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer); 89 90 // Update just-rendered audio data size and the presentation timestamp of 91 // the first frame of that audio data. It should be called immediately 92 // after the client write audio data into AudioTrack. 93 // This function assumes continous audio stream. 94 // TODO: support gap or backwards updates. 95 status_t updateQueuedAudioData( 96 size_t sizeInBytes, int64_t presentationTimeUs); 97 98 // Set the consumer name of the input queue. 99 void setName(const AString &name); 100 101 // Set the playback in a desired speed. 102 // This method can be called any time. 103 // |rate| is the ratio between desired speed and the normal one, and should 104 // be non-negative. The meaning of rate values: 105 // 1.0 -- normal playback 106 // 0.0 -- stop or pause 107 // larger than 1.0 -- faster than normal speed 108 // between 0.0 and 1.0 -- slower than normal speed 109 status_t setPlaybackRate(float rate); 110 111 // Get the media clock used by the MediaSync so that the client can obtain 112 // corresponding media time or real time via 113 // MediaClock::getMediaTime() and MediaClock::getRealTimeFor(). 114 sp<const MediaClock> getMediaClock(); 115 116protected: 117 virtual void onMessageReceived(const sp<AMessage> &msg); 118 119private: 120 enum { 121 kWhatDrainVideo = 'dVid', 122 }; 123 124 static const int MAX_OUTSTANDING_BUFFERS = 2; 125 126 // This is a thin wrapper class that lets us listen to 127 // IConsumerListener::onFrameAvailable from mInput. 128 class InputListener : public BnConsumerListener, 129 public IBinder::DeathRecipient { 130 public: 131 InputListener(const sp<MediaSync> &sync); 132 virtual ~InputListener(); 133 134 // From IConsumerListener 135 virtual void onFrameAvailable(const BufferItem &item); 136 137 // From IConsumerListener 138 // We don't care about released buffers because we detach each buffer as 139 // soon as we acquire it. See the comment for onBufferReleased below for 140 // some clarifying notes about the name. 141 virtual void onBuffersReleased() {} 142 143 // From IConsumerListener 144 // We don't care about sideband streams, since we won't relay them. 145 virtual void onSidebandStreamChanged(); 146 147 // From IBinder::DeathRecipient 148 virtual void binderDied(const wp<IBinder> &who); 149 150 private: 151 sp<MediaSync> mSync; 152 }; 153 154 // This is a thin wrapper class that lets us listen to 155 // IProducerListener::onBufferReleased from mOutput. 156 class OutputListener : public BnProducerListener, 157 public IBinder::DeathRecipient { 158 public: 159 OutputListener(const sp<MediaSync> &sync); 160 virtual ~OutputListener(); 161 162 // From IProducerListener 163 virtual void onBufferReleased(); 164 165 // From IBinder::DeathRecipient 166 virtual void binderDied(const wp<IBinder> &who); 167 168 private: 169 sp<MediaSync> mSync; 170 }; 171 172 // mIsAbandoned is set to true when the input or output dies. 173 // Once the MediaSync has been abandoned by one side, it will disconnect 174 // from the other side and not attempt to communicate with it further. 175 bool mIsAbandoned; 176 177 mutable Mutex mMutex; 178 Condition mReleaseCondition; 179 size_t mNumOutstandingBuffers; 180 sp<IGraphicBufferConsumer> mInput; 181 sp<IGraphicBufferProducer> mOutput; 182 183 sp<AudioTrack> mAudioTrack; 184 uint32_t mNativeSampleRateInHz; 185 int64_t mNumFramesWritten; 186 bool mHasAudio; 187 188 int64_t mNextBufferItemMediaUs; 189 List<BufferItem> mBufferItems; 190 sp<ALooper> mLooper; 191 float mPlaybackRate; 192 193 sp<MediaClock> mMediaClock; 194 195 MediaSync(); 196 197 // Must be accessed through RefBase 198 virtual ~MediaSync(); 199 200 int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs); 201 int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames); 202 int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs); 203 204 void onDrainVideo_l(); 205 206 // This implements the onFrameAvailable callback from IConsumerListener. 207 // It gets called from an InputListener. 208 // During this callback, we detach the buffer from the input, and queue 209 // it for rendering on the output. This call can block if there are too 210 // many outstanding buffers. If it blocks, it will resume when 211 // onBufferReleasedByOutput releases a buffer back to the input. 212 void onFrameAvailableFromInput(); 213 214 // Send |bufferItem| to the output for rendering. 215 void renderOneBufferItem_l(const BufferItem &bufferItem); 216 217 // This implements the onBufferReleased callback from IProducerListener. 218 // It gets called from an OutputListener. 219 // During this callback, we detach the buffer from the output, and release 220 // it to the input. A blocked onFrameAvailable call will be allowed to proceed. 221 void onBufferReleasedByOutput(); 222 223 // Return |buffer| back to the input. 224 void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence); 225 226 // When this is called, the MediaSync disconnects from (i.e., abandons) its 227 // input or output, and signals any waiting onFrameAvailable calls to wake 228 // up. This must be called with mMutex locked. 229 void onAbandoned_l(bool isInput); 230 231 // helper. 232 bool isPlaying() { return mPlaybackRate != 0.0; } 233 234 DISALLOW_EVIL_CONSTRUCTORS(MediaSync); 235}; 236 237} // namespace android 238 239#endif 240