1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import android.media.AudioFormat; 19 import android.media.AudioManager; 20 import android.media.AudioTimestamp; 21 import android.media.AudioTrack; 22 import android.media.AudioAttributes; 23 import android.util.Log; 24 25 import java.nio.ByteBuffer; 26 import java.util.LinkedList; 27 28 /** 29 * Class for playing audio by using audio track. 30 * {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods will 31 * block until all data has been written to system. In order to avoid blocking, this class 32 * caculates available buffer size first then writes to audio sink. 33 */ 34 public class NonBlockingAudioTrack { 35 private static final String TAG = NonBlockingAudioTrack.class.getSimpleName(); 36 37 class QueueElement { 38 ByteBuffer data; 39 int size; 40 long pts; 41 } 42 43 private AudioTrack mAudioTrack; 44 private int mSampleRate; 45 private int mNumBytesQueued = 0; 46 private LinkedList<QueueElement> mQueue = new LinkedList<QueueElement>(); 47 private boolean mStopped; 48 NonBlockingAudioTrack(int sampleRate, int channelCount, boolean hwAvSync, int audioSessionId)49 public NonBlockingAudioTrack(int sampleRate, int channelCount, boolean hwAvSync, 50 int audioSessionId) { 51 int channelConfig; 52 switch (channelCount) { 53 case 1: 54 channelConfig = AudioFormat.CHANNEL_OUT_MONO; 55 break; 56 case 2: 57 channelConfig = AudioFormat.CHANNEL_OUT_STEREO; 58 break; 59 case 6: 60 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1; 61 break; 62 default: 63 throw new IllegalArgumentException(); 64 } 65 66 int minBufferSize = 67 AudioTrack.getMinBufferSize( 68 sampleRate, 69 channelConfig, 70 AudioFormat.ENCODING_PCM_16BIT); 71 72 int bufferSize = 2 * minBufferSize; 73 74 if (!hwAvSync) { 75 mAudioTrack = new AudioTrack( 76 AudioManager.STREAM_MUSIC, 77 sampleRate, 78 channelConfig, 79 AudioFormat.ENCODING_PCM_16BIT, 80 bufferSize, 81 AudioTrack.MODE_STREAM); 82 } 83 else { 84 // build AudioTrack using Audio Attributes and FLAG_HW_AV_SYNC 85 AudioAttributes audioAttributes = (new AudioAttributes.Builder()) 86 .setLegacyStreamType(AudioManager.STREAM_MUSIC) 87 .setFlags(AudioAttributes.FLAG_HW_AV_SYNC) 88 .build(); 89 AudioFormat audioFormat = (new AudioFormat.Builder()) 90 .setChannelMask(channelConfig) 91 .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 92 .setSampleRate(sampleRate) 93 .build(); 94 mAudioTrack = new AudioTrack(audioAttributes, audioFormat, bufferSize, 95 AudioTrack.MODE_STREAM, audioSessionId); 96 } 97 98 mSampleRate = sampleRate; 99 } 100 getAudioTimeUs()101 public long getAudioTimeUs() { 102 int numFramesPlayed = mAudioTrack.getPlaybackHeadPosition(); 103 104 return (numFramesPlayed * 1000000L) / mSampleRate; 105 } 106 getTimestamp()107 public AudioTimestamp getTimestamp() { 108 AudioTimestamp timestamp = new AudioTimestamp(); 109 mAudioTrack.getTimestamp(timestamp); 110 return timestamp; 111 } 112 getNumBytesQueued()113 public int getNumBytesQueued() { 114 return mNumBytesQueued; 115 } 116 play()117 public void play() { 118 mStopped = false; 119 mAudioTrack.play(); 120 } 121 stop()122 public void stop() { 123 if (mQueue.isEmpty()) { 124 mAudioTrack.stop(); 125 mNumBytesQueued = 0; 126 } else { 127 mStopped = true; 128 } 129 } 130 pause()131 public void pause() { 132 mAudioTrack.pause(); 133 } 134 flush()135 public void flush() { 136 if (mAudioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) { 137 return; 138 } 139 mAudioTrack.flush(); 140 mQueue.clear(); 141 mNumBytesQueued = 0; 142 mStopped = false; 143 } 144 release()145 public void release() { 146 mQueue.clear(); 147 mNumBytesQueued = 0; 148 mAudioTrack.release(); 149 mAudioTrack = null; 150 mStopped = false; 151 } 152 process()153 public void process() { 154 while (!mQueue.isEmpty()) { 155 QueueElement element = mQueue.peekFirst(); 156 int written = mAudioTrack.write(element.data, element.size, 157 AudioTrack.WRITE_NON_BLOCKING, element.pts); 158 if (written < 0) { 159 throw new RuntimeException("Audiotrack.write() failed."); 160 } 161 162 mNumBytesQueued -= written; 163 element.size -= written; 164 if (element.size != 0) { 165 break; 166 } 167 mQueue.removeFirst(); 168 } 169 if (mStopped) { 170 mAudioTrack.stop(); 171 mNumBytesQueued = 0; 172 mStopped = false; 173 } 174 } 175 getPlayState()176 public int getPlayState() { 177 return mAudioTrack.getPlayState(); 178 } 179 write(ByteBuffer data, int size, long pts)180 public void write(ByteBuffer data, int size, long pts) { 181 QueueElement element = new QueueElement(); 182 element.data = data; 183 element.size = size; 184 element.pts = pts; 185 186 // accumulate size written to queue 187 mNumBytesQueued += size; 188 mQueue.add(element); 189 } 190 } 191 192