1 /* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef MEDIA_SYNC_H 18 #define MEDIA_SYNC_H 19 20 #include <gui/IConsumerListener.h> 21 #include <gui/IProducerListener.h> 22 23 #include <media/AudioResamplerPublic.h> 24 #include <media/AVSyncSettings.h> 25 #include <media/stagefright/foundation/AHandler.h> 26 27 #include <utils/Condition.h> 28 #include <utils/KeyedVector.h> 29 #include <utils/Mutex.h> 30 31 namespace android { 32 33 class AudioTrack; 34 class BufferItem; 35 class Fence; 36 class GraphicBuffer; 37 class IGraphicBufferConsumer; 38 class IGraphicBufferProducer; 39 struct MediaClock; 40 struct VideoFrameScheduler; 41 42 // MediaSync manages media playback and its synchronization to a media clock 43 // source. It can be also used for video-only playback. 44 // 45 // For video playback, it requires an output surface and provides an input 46 // surface. It then controls the rendering of input buffers (buffer queued to 47 // the input surface) on the output surface to happen at the appropriate time. 48 // 49 // For audio playback, it requires an audio track and takes updates of 50 // information of rendered audio data to maintain media clock when audio track 51 // serves as media clock source. (TODO: move audio rendering from JAVA to 52 // native code). 53 // 54 // It can use the audio or video track as media clock source, as well as an 55 // external clock. (TODO: actually support external clock as media clock 56 // sources; use video track as media clock source for audio-and-video stream). 57 // 58 // In video-only mode, MediaSync will playback every video frame even though 59 // a video frame arrives late based on its timestamp and last frame's. 60 // 61 // The client needs to configure surface (for output video rendering) and audio 62 // track (for querying information of audio rendering) for MediaSync. 63 // 64 // Then the client needs to obtain a surface from MediaSync and render video 65 // frames onto that surface. Internally, the MediaSync will receive those video 66 // frames and render them onto the output surface at the appropriate time. 67 // 68 // The client needs to call updateQueuedAudioData() immediately after it writes 69 // audio data to the audio track. Such information will be used to update media 70 // clock. 71 // 72 class MediaSync : public AHandler { 73 public: 74 // Create an instance of MediaSync. 75 static sp<MediaSync> create(); 76 77 // Called when MediaSync is used to render video. It should be called 78 // before createInputSurface(). 79 status_t setSurface(const sp<IGraphicBufferProducer> &output); 80 81 // Called when audio track is used as media clock source. It should be 82 // called before updateQueuedAudioData(). 83 status_t setAudioTrack(const sp<AudioTrack> &audioTrack); 84 85 // Create a surface for client to render video frames. This is the surface 86 // on which the client should render video frames. Those video frames will 87 // be internally directed to output surface for rendering at appropriate 88 // time. 89 status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer); 90 91 // Update just-rendered audio data size and the presentation timestamp of 92 // the first frame of that audio data. It should be called immediately 93 // after the client write audio data into AudioTrack. 94 // This function assumes continous audio stream. 95 // TODO: support gap or backwards updates. 96 status_t updateQueuedAudioData( 97 size_t sizeInBytes, int64_t presentationTimeUs); 98 99 // Set the consumer name of the input queue. 100 void setName(const AString &name); 101 102 // Get the media clock used by the MediaSync so that the client can obtain 103 // corresponding media time or real time via 104 // MediaClock::getMediaTime() and MediaClock::getRealTimeFor(). 105 sp<const MediaClock> getMediaClock(); 106 107 // Flush mediasync 108 void flush(); 109 110 // Set the video frame rate hint - this is used by the video FrameScheduler 111 status_t setVideoFrameRateHint(float rate); 112 113 // Get the video frame rate measurement from the FrameScheduler 114 // returns -1 if there is no measurement 115 float getVideoFrameRate(); 116 117 // Set the sync settings parameters. 118 status_t setSyncSettings(const AVSyncSettings &syncSettings); 119 120 // Gets the sync settings parameters. 121 void getSyncSettings(AVSyncSettings *syncSettings /* nonnull */); 122 123 // Sets the playback rate using playback settings. 124 // This method can be called any time. 125 status_t setPlaybackSettings(const AudioPlaybackRate &rate); 126 127 // Gets the playback rate (playback settings parameters). 128 void getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */); 129 130 // Get the play time for pending audio frames in audio sink. 131 status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs); 132 133 protected: 134 virtual void onMessageReceived(const sp<AMessage> &msg); 135 136 private: 137 enum { 138 kWhatDrainVideo = 'dVid', 139 }; 140 141 // This is a thin wrapper class that lets us listen to 142 // IConsumerListener::onFrameAvailable from mInput. 143 class InputListener : public BnConsumerListener, 144 public IBinder::DeathRecipient { 145 public: 146 InputListener(const sp<MediaSync> &sync); 147 virtual ~InputListener(); 148 149 // From IConsumerListener 150 virtual void onFrameAvailable(const BufferItem &item); 151 152 // From IConsumerListener 153 // We don't care about released buffers because we detach each buffer as 154 // soon as we acquire it. See the comment for onBufferReleased below for 155 // some clarifying notes about the name. onBuffersReleased()156 virtual void onBuffersReleased() {} 157 158 // From IConsumerListener 159 // We don't care about sideband streams, since we won't relay them. 160 virtual void onSidebandStreamChanged(); 161 162 // From IBinder::DeathRecipient 163 virtual void binderDied(const wp<IBinder> &who); 164 165 private: 166 sp<MediaSync> mSync; 167 }; 168 169 // This is a thin wrapper class that lets us listen to 170 // IProducerListener::onBufferReleased from mOutput. 171 class OutputListener : public BnProducerListener, 172 public IBinder::DeathRecipient { 173 public: 174 OutputListener(const sp<MediaSync> &sync, const sp<IGraphicBufferProducer> &output); 175 virtual ~OutputListener(); 176 177 // From IProducerListener 178 virtual void onBufferReleased(); 179 180 // From IBinder::DeathRecipient 181 virtual void binderDied(const wp<IBinder> &who); 182 183 private: 184 sp<MediaSync> mSync; 185 sp<IGraphicBufferProducer> mOutput; 186 }; 187 188 // mIsAbandoned is set to true when the input or output dies. 189 // Once the MediaSync has been abandoned by one side, it will disconnect 190 // from the other side and not attempt to communicate with it further. 191 bool mIsAbandoned; 192 193 mutable Mutex mMutex; 194 Condition mReleaseCondition; 195 size_t mNumOutstandingBuffers; 196 sp<IGraphicBufferConsumer> mInput; 197 sp<IGraphicBufferProducer> mOutput; 198 int mUsageFlagsFromOutput; 199 uint32_t mMaxAcquiredBufferCount; // max acquired buffer count 200 bool mReturnPendingInputFrame; // set while we are pending before acquiring an input frame 201 202 sp<AudioTrack> mAudioTrack; 203 uint32_t mNativeSampleRateInHz; 204 int64_t mNumFramesWritten; 205 bool mHasAudio; 206 207 int64_t mNextBufferItemMediaUs; 208 List<BufferItem> mBufferItems; 209 sp<VideoFrameScheduler> mFrameScheduler; 210 211 // Keep track of buffers received from |mInput|. This is needed because 212 // it's possible the consumer of |mOutput| could return a different 213 // GraphicBuffer::handle (e.g., due to passing buffers through IPC), 214 // and that could cause problem if the producer of |mInput| only 215 // supports pre-registered buffers. 216 KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersFromInput; 217 218 // Keep track of buffers sent to |mOutput|. When a new output surface comes 219 // in, those buffers will be returned to input and old output surface will 220 // be disconnected immediately. 221 KeyedVector<uint64_t, sp<GraphicBuffer> > mBuffersSentToOutput; 222 223 sp<ALooper> mLooper; 224 float mPlaybackRate; 225 226 AudioPlaybackRate mPlaybackSettings; 227 AVSyncSettings mSyncSettings; 228 229 sp<MediaClock> mMediaClock; 230 231 MediaSync(); 232 233 // Must be accessed through RefBase 234 virtual ~MediaSync(); 235 236 int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs); 237 int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames); 238 int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs); 239 240 void onDrainVideo_l(); 241 242 // This implements the onFrameAvailable callback from IConsumerListener. 243 // It gets called from an InputListener. 244 // During this callback, we detach the buffer from the input, and queue 245 // it for rendering on the output. This call can block if there are too 246 // many outstanding buffers. If it blocks, it will resume when 247 // onBufferReleasedByOutput releases a buffer back to the input. 248 void onFrameAvailableFromInput(); 249 250 // Send |bufferItem| to the output for rendering. 251 void renderOneBufferItem_l(const BufferItem &bufferItem); 252 253 // This implements the onBufferReleased callback from IProducerListener. 254 // It gets called from an OutputListener. 255 // During this callback, we detach the buffer from the output, and release 256 // it to the input. A blocked onFrameAvailable call will be allowed to proceed. 257 void onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output); 258 259 // Return |buffer| back to the input. 260 void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence); 261 262 // When this is called, the MediaSync disconnects from (i.e., abandons) its 263 // input or output, and signals any waiting onFrameAvailable calls to wake 264 // up. This must be called with mMutex locked. 265 void onAbandoned_l(bool isInput); 266 267 // Set the playback in a desired speed. 268 // This method can be called any time. 269 // |rate| is the ratio between desired speed and the normal one, and should 270 // be non-negative. The meaning of rate values: 271 // 1.0 -- normal playback 272 // 0.0 -- stop or pause 273 // larger than 1.0 -- faster than normal speed 274 // between 0.0 and 1.0 -- slower than normal speed 275 void updatePlaybackRate_l(float rate); 276 277 // apply new sync settings 278 void resync_l(); 279 280 // apply playback settings only - without resyncing or updating playback rate 281 status_t setPlaybackSettings_l(const AudioPlaybackRate &rate); 282 283 // helper. isPlaying()284 bool isPlaying() { return mPlaybackRate != 0.0; } 285 286 DISALLOW_EVIL_CONSTRUCTORS(MediaSync); 287 }; 288 289 } // namespace android 290 291 #endif 292