1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.IntDef; 20 import android.annotation.IntRange; 21 import android.annotation.NonNull; 22 import android.annotation.TestApi; 23 import android.compat.annotation.UnsupportedAppUsage; 24 import android.os.Build; 25 import android.os.Parcel; 26 import android.os.Parcelable; 27 28 import java.lang.annotation.Retention; 29 import java.lang.annotation.RetentionPolicy; 30 import java.util.Arrays; 31 import java.util.Objects; 32 33 /** 34 * The {@link AudioFormat} class is used to access a number of audio format and 35 * channel configuration constants. They are for instance used 36 * in {@link AudioTrack} and {@link AudioRecord}, as valid values in individual parameters of 37 * constructors like {@link AudioTrack#AudioTrack(int, int, int, int, int, int)}, where the fourth 38 * parameter is one of the <code>AudioFormat.ENCODING_*</code> constants. 39 * The <code>AudioFormat</code> constants are also used in {@link MediaFormat} to specify 40 * audio related values commonly used in media, such as for {@link MediaFormat#KEY_CHANNEL_MASK}. 41 * <p>The {@link AudioFormat.Builder} class can be used to create instances of 42 * the <code>AudioFormat</code> format class. 43 * Refer to 44 * {@link AudioFormat.Builder} for documentation on the mechanics of the configuration and building 45 * of such instances. Here we describe the main concepts that the <code>AudioFormat</code> class 46 * allow you to convey in each instance, they are: 47 * <ol> 48 * <li><a href="#sampleRate">sample rate</a> 49 * <li><a href="#encoding">encoding</a> 50 * <li><a href="#channelMask">channel masks</a> 51 * </ol> 52 * <p>Closely associated with the <code>AudioFormat</code> is the notion of an 53 * <a href="#audioFrame">audio frame</a>, which is used throughout the documentation 54 * to represent the minimum size complete unit of audio data. 55 * 56 * <h4 id="sampleRate">Sample rate</h4> 57 * <p>Expressed in Hz, the sample rate in an <code>AudioFormat</code> instance expresses the number 58 * of audio samples for each channel per second in the content you are playing or recording. It is 59 * not the sample rate 60 * at which content is rendered or produced. For instance a sound at a media sample rate of 8000Hz 61 * can be played on a device operating at a sample rate of 48000Hz; the sample rate conversion is 62 * automatically handled by the platform, it will not play at 6x speed. 63 * 64 * <p>As of API {@link android.os.Build.VERSION_CODES#M}, 65 * sample rates up to 192kHz are supported 66 * for <code>AudioRecord</code> and <code>AudioTrack</code>, with sample rate conversion 67 * performed as needed. 68 * To improve efficiency and avoid lossy conversions, it is recommended to match the sample rate 69 * for <code>AudioRecord</code> and <code>AudioTrack</code> to the endpoint device 70 * sample rate, and limit the sample rate to no more than 48kHz unless there are special 71 * device capabilities that warrant a higher rate. 72 * 73 * <h4 id="encoding">Encoding</h4> 74 * <p>Audio encoding is used to describe the bit representation of audio data, which can be 75 * either linear PCM or compressed audio, such as AC3 or DTS. 76 * <p>For linear PCM, the audio encoding describes the sample size, 8 bits, 16 bits, or 32 bits, 77 * and the sample representation, integer or float. 78 * <ul> 79 * <li> {@link #ENCODING_PCM_8BIT}: The audio sample is a 8 bit unsigned integer in the 80 * range [0, 255], with a 128 offset for zero. This is typically stored as a Java byte in a 81 * byte array or ByteBuffer. Since the Java byte is <em>signed</em>, 82 * be careful with math operations and conversions as the most significant bit is inverted. 83 * </li> 84 * <li> {@link #ENCODING_PCM_16BIT}: The audio sample is a 16 bit signed integer 85 * typically stored as a Java short in a short array, but when the short 86 * is stored in a ByteBuffer, it is native endian (as compared to the default Java big endian). 87 * The short has full range from [-32768, 32767], 88 * and is sometimes interpreted as fixed point Q.15 data. 89 * </li> 90 * <li> {@link #ENCODING_PCM_FLOAT}: Introduced in 91 * API {@link android.os.Build.VERSION_CODES#LOLLIPOP}, this encoding specifies that 92 * the audio sample is a 32 bit IEEE single precision float. The sample can be 93 * manipulated as a Java float in a float array, though within a ByteBuffer 94 * it is stored in native endian byte order. 95 * The nominal range of <code>ENCODING_PCM_FLOAT</code> audio data is [-1.0, 1.0]. 96 * It is implementation dependent whether the positive maximum of 1.0 is included 97 * in the interval. Values outside of the nominal range are clamped before 98 * sending to the endpoint device. Beware that 99 * the handling of NaN is undefined; subnormals may be treated as zero; and 100 * infinities are generally clamped just like other values for <code>AudioTrack</code> 101 * – try to avoid infinities because they can easily generate a NaN. 102 * <br> 103 * To achieve higher audio bit depth than a signed 16 bit integer short, 104 * it is recommended to use <code>ENCODING_PCM_FLOAT</code> for audio capture, processing, 105 * and playback. 106 * Floats are efficiently manipulated by modern CPUs, 107 * have greater precision than 24 bit signed integers, 108 * and have greater dynamic range than 32 bit signed integers. 109 * <code>AudioRecord</code> as of API {@link android.os.Build.VERSION_CODES#M} and 110 * <code>AudioTrack</code> as of API {@link android.os.Build.VERSION_CODES#LOLLIPOP} 111 * support <code>ENCODING_PCM_FLOAT</code>. 112 * </li> 113 * <li> {@link #ENCODING_PCM_24BIT_PACKED}: Introduced in 114 * API {@link android.os.Build.VERSION_CODES#S}, 115 * this encoding specifies the audio sample is an 116 * extended precision 24 bit signed integer 117 * stored as a 3 Java bytes in a {@code ByteBuffer} or byte array in native endian 118 * (see {@link java.nio.ByteOrder#nativeOrder()}). 119 * Each sample has full range from [-8388608, 8388607], 120 * and can be interpreted as fixed point Q.23 data. 121 * </li> 122 * <li> {@link #ENCODING_PCM_32BIT}: Introduced in 123 * API {@link android.os.Build.VERSION_CODES#S}, 124 * this encoding specifies the audio sample is an 125 * extended precision 32 bit signed integer 126 * stored as a 4 Java bytes in a {@code ByteBuffer} or byte array in native endian 127 * (see {@link java.nio.ByteOrder#nativeOrder()}). 128 * Each sample has full range from [-2147483648, 2147483647], 129 * and can be interpreted as fixed point Q.31 data. 130 * </li> 131 * </ul> 132 * <p>For compressed audio, the encoding specifies the method of compression, 133 * for example {@link #ENCODING_AC3} and {@link #ENCODING_DTS}. The compressed 134 * audio data is typically stored as bytes in 135 * a byte array or ByteBuffer. When a compressed audio encoding is specified 136 * for an <code>AudioTrack</code>, it creates a direct (non-mixed) track 137 * for output to an endpoint (such as HDMI) capable of decoding the compressed audio. 138 * For (most) other endpoints, which are not capable of decoding such compressed audio, 139 * you will need to decode the data first, typically by creating a {@link MediaCodec}. 140 * Alternatively, one may use {@link MediaPlayer} for playback of compressed 141 * audio files or streams. 142 * <p>When compressed audio is sent out through a direct <code>AudioTrack</code>, 143 * it need not be written in exact multiples of the audio access unit; 144 * this differs from <code>MediaCodec</code> input buffers. 145 * 146 * <h4 id="channelMask">Channel mask</h4> 147 * <p>Channel masks are used in <code>AudioTrack</code> and <code>AudioRecord</code> to describe 148 * the samples and their arrangement in the audio frame. They are also used in the endpoint (e.g. 149 * a USB audio interface, a DAC connected to headphones) to specify allowable configurations of a 150 * particular device. 151 * <br>As of API {@link android.os.Build.VERSION_CODES#M}, there are two types of channel masks: 152 * channel position masks and channel index masks. 153 * 154 * <h5 id="channelPositionMask">Channel position masks</h5> 155 * Channel position masks are the original Android channel masks, and are used since API 156 * {@link android.os.Build.VERSION_CODES#BASE}. 157 * For input and output, they imply a positional nature - the location of a speaker or a microphone 158 * for recording or playback. 159 * <br>For a channel position mask, each allowed channel position corresponds to a bit in the 160 * channel mask. If that channel position is present in the audio frame, that bit is set, 161 * otherwise it is zero. The order of the bits (from lsb to msb) corresponds to the order of that 162 * position's sample in the audio frame. 163 * <br>The canonical channel position masks by channel count are as follows: 164 * <br><table> 165 * <tr><td>channel count</td><td>channel position mask</td></tr> 166 * <tr><td>1</td><td>{@link #CHANNEL_OUT_MONO}</td></tr> 167 * <tr><td>2</td><td>{@link #CHANNEL_OUT_STEREO}</td></tr> 168 * <tr><td>3</td><td>{@link #CHANNEL_OUT_STEREO} | {@link #CHANNEL_OUT_FRONT_CENTER}</td></tr> 169 * <tr><td>4</td><td>{@link #CHANNEL_OUT_QUAD}</td></tr> 170 * <tr><td>5</td><td>{@link #CHANNEL_OUT_QUAD} | {@link #CHANNEL_OUT_FRONT_CENTER}</td></tr> 171 * <tr><td>6</td><td>{@link #CHANNEL_OUT_5POINT1}</td></tr> 172 * <tr><td>7</td><td>{@link #CHANNEL_OUT_5POINT1} | {@link #CHANNEL_OUT_BACK_CENTER}</td></tr> 173 * <tr><td>8</td><td>{@link #CHANNEL_OUT_7POINT1_SURROUND}</td></tr> 174 * </table> 175 * <br>These masks are an ORed composite of individual channel masks. For example 176 * {@link #CHANNEL_OUT_STEREO} is composed of {@link #CHANNEL_OUT_FRONT_LEFT} and 177 * {@link #CHANNEL_OUT_FRONT_RIGHT}. 178 * <p> 179 * The following diagram represents the layout of the output channels, as seen from above 180 * the listener (in the center at the "lis" position, facing the front-center channel). 181 * <pre> 182 * TFL ----- TFC ----- TFR T is Top 183 * | \ | / | 184 * | FL --- FC --- FR | F is Front 185 * | |\ | /| | 186 * | | BFL-BFC-BFR | | BF is Bottom Front 187 * | | | | 188 * | FWL lis FWR | W is Wide 189 * | | | | 190 * TSL SL TC SR TSR S is Side 191 * | | | | 192 * | BL --- BC -- BR | B is Back 193 * | / \ | 194 * TBL ----- TBC ----- TBR C is Center, L/R is Left/Right 195 * </pre> 196 * All "T" (top) channels are above the listener, all "BF" (bottom-front) channels are below the 197 * listener, all others are in the listener's horizontal plane. When used in conjunction, LFE1 and 198 * LFE2 are below the listener, when used alone, LFE plane is undefined. 199 * See the channel definitions for the abbreviations 200 * 201 * <h5 id="channelIndexMask">Channel index masks</h5> 202 * Channel index masks are introduced in API {@link android.os.Build.VERSION_CODES#M}. They allow 203 * the selection of a particular channel from the source or sink endpoint by number, i.e. the first 204 * channel, the second channel, and so forth. This avoids problems with artificially assigning 205 * positions to channels of an endpoint, or figuring what the i<sup>th</sup> position bit is within 206 * an endpoint's channel position mask etc. 207 * <br>Here's an example where channel index masks address this confusion: dealing with a 4 channel 208 * USB device. Using a position mask, and based on the channel count, this would be a 209 * {@link #CHANNEL_OUT_QUAD} device, but really one is only interested in channel 0 210 * through channel 3. The USB device would then have the following individual bit channel masks: 211 * {@link #CHANNEL_OUT_FRONT_LEFT}, 212 * {@link #CHANNEL_OUT_FRONT_RIGHT}, {@link #CHANNEL_OUT_BACK_LEFT} 213 * and {@link #CHANNEL_OUT_BACK_RIGHT}. But which is channel 0 and which is 214 * channel 3? 215 * <br>For a channel index mask, each channel number is represented as a bit in the mask, from the 216 * lsb (channel 0) upwards to the msb, numerically this bit value is 217 * <code>1 << channelNumber</code>. 218 * A set bit indicates that channel is present in the audio frame, otherwise it is cleared. 219 * The order of the bits also correspond to that channel number's sample order in the audio frame. 220 * <br>For the previous 4 channel USB device example, the device would have a channel index mask 221 * <code>0xF</code>. Suppose we wanted to select only the first and the third channels; this would 222 * correspond to a channel index mask <code>0x5</code> (the first and third bits set). If an 223 * <code>AudioTrack</code> uses this channel index mask, the audio frame would consist of two 224 * samples, the first sample of each frame routed to channel 0, and the second sample of each frame 225 * routed to channel 2. 226 * The canonical channel index masks by channel count are given by the formula 227 * <code>(1 << channelCount) - 1</code>. 228 * 229 * <h5>Use cases</h5> 230 * <ul> 231 * <li><i>Channel position mask for an endpoint:</i> <code>CHANNEL_OUT_FRONT_LEFT</code>, 232 * <code>CHANNEL_OUT_FRONT_CENTER</code>, etc. for HDMI home theater purposes. 233 * <li><i>Channel position mask for an audio stream:</i> Creating an <code>AudioTrack</code> 234 * to output movie content, where 5.1 multichannel output is to be written. 235 * <li><i>Channel index mask for an endpoint:</i> USB devices for which input and output do not 236 * correspond to left or right speaker or microphone. 237 * <li><i>Channel index mask for an audio stream:</i> An <code>AudioRecord</code> may only want the 238 * third and fourth audio channels of the endpoint (i.e. the second channel pair), and not care the 239 * about position it corresponds to, in which case the channel index mask is <code>0xC</code>. 240 * Multichannel <code>AudioRecord</code> sessions should use channel index masks. 241 * </ul> 242 * <h4 id="audioFrame">Audio Frame</h4> 243 * <p>For linear PCM, an audio frame consists of a set of samples captured at the same time, 244 * whose count and 245 * channel association are given by the <a href="#channelMask">channel mask</a>, 246 * and whose sample contents are specified by the <a href="#encoding">encoding</a>. 247 * For example, a stereo 16 bit PCM frame consists of 248 * two 16 bit linear PCM samples, with a frame size of 4 bytes. 249 * For compressed audio, an audio frame may alternately 250 * refer to an access unit of compressed data bytes that is logically grouped together for 251 * decoding and bitstream access (e.g. {@link MediaCodec}), 252 * or a single byte of compressed data (e.g. {@link AudioTrack#getBufferSizeInFrames() 253 * AudioTrack.getBufferSizeInFrames()}), 254 * or the linear PCM frame result from decoding the compressed data 255 * (e.g.{@link AudioTrack#getPlaybackHeadPosition() 256 * AudioTrack.getPlaybackHeadPosition()}), 257 * depending on the context where audio frame is used. 258 * For the purposes of {@link AudioFormat#getFrameSizeInBytes()}, a compressed data format 259 * returns a frame size of 1 byte. 260 */ 261 public final class AudioFormat implements Parcelable { 262 263 //--------------------------------------------------------- 264 // Constants 265 //-------------------- 266 /** Invalid audio data format */ 267 public static final int ENCODING_INVALID = 0; 268 /** Default audio data format */ 269 public static final int ENCODING_DEFAULT = 1; 270 271 // These values must be kept in sync with core/jni/android_media_AudioFormat.h 272 // Also sync av/services/audiopolicy/managerdefault/ConfigParsingUtils.h 273 /** Audio data format: PCM 16 bit per sample. Guaranteed to be supported by devices. */ 274 public static final int ENCODING_PCM_16BIT = 2; 275 /** Audio data format: PCM 8 bit per sample. Not guaranteed to be supported by devices. */ 276 public static final int ENCODING_PCM_8BIT = 3; 277 /** Audio data format: single-precision floating-point per sample */ 278 public static final int ENCODING_PCM_FLOAT = 4; 279 /** Audio data format: AC-3 compressed, also known as Dolby Digital */ 280 public static final int ENCODING_AC3 = 5; 281 /** Audio data format: E-AC-3 compressed, also known as Dolby Digital Plus or DD+ */ 282 public static final int ENCODING_E_AC3 = 6; 283 /** Audio data format: DTS compressed */ 284 public static final int ENCODING_DTS = 7; 285 /** Audio data format: DTS HD compressed */ 286 public static final int ENCODING_DTS_HD = 8; 287 /** Audio data format: MP3 compressed */ 288 public static final int ENCODING_MP3 = 9; 289 /** Audio data format: AAC LC compressed */ 290 public static final int ENCODING_AAC_LC = 10; 291 /** Audio data format: AAC HE V1 compressed */ 292 public static final int ENCODING_AAC_HE_V1 = 11; 293 /** Audio data format: AAC HE V2 compressed */ 294 public static final int ENCODING_AAC_HE_V2 = 12; 295 296 /** Audio data format: compressed audio wrapped in PCM for HDMI 297 * or S/PDIF passthrough. 298 * For devices whose SDK version is less than {@link android.os.Build.VERSION_CODES#S}, the 299 * channel mask of IEC61937 track must be {@link #CHANNEL_OUT_STEREO}. 300 * Data should be written to the stream in a short[] array. 301 * If the data is written in a byte[] array then there may be endian problems 302 * on some platforms when converting to short internally. 303 */ 304 public static final int ENCODING_IEC61937 = 13; 305 /** Audio data format: DOLBY TRUEHD compressed 306 **/ 307 public static final int ENCODING_DOLBY_TRUEHD = 14; 308 /** Audio data format: AAC ELD compressed */ 309 public static final int ENCODING_AAC_ELD = 15; 310 /** Audio data format: AAC xHE compressed */ 311 public static final int ENCODING_AAC_XHE = 16; 312 /** Audio data format: AC-4 sync frame transport format */ 313 public static final int ENCODING_AC4 = 17; 314 /** Audio data format: E-AC-3-JOC compressed 315 * E-AC-3-JOC streams can be decoded by downstream devices supporting {@link #ENCODING_E_AC3}. 316 * Use {@link #ENCODING_E_AC3} as the AudioTrack encoding when the downstream device 317 * supports {@link #ENCODING_E_AC3} but not {@link #ENCODING_E_AC3_JOC}. 318 **/ 319 public static final int ENCODING_E_AC3_JOC = 18; 320 /** Audio data format: Dolby MAT (Metadata-enhanced Audio Transmission) 321 * Dolby MAT bitstreams are used to transmit Dolby TrueHD, channel-based PCM, or PCM with 322 * metadata (object audio) over HDMI (e.g. Dolby Atmos content). 323 **/ 324 public static final int ENCODING_DOLBY_MAT = 19; 325 /** Audio data format: OPUS compressed. */ 326 public static final int ENCODING_OPUS = 20; 327 328 /** @hide 329 * We do not permit legacy short array reads or writes for encodings 330 * introduced after this threshold. 331 */ 332 public static final int ENCODING_LEGACY_SHORT_ARRAY_THRESHOLD = ENCODING_OPUS; 333 334 /** Audio data format: PCM 24 bit per sample packed as 3 bytes. 335 * 336 * The bytes are in little-endian order, so the least significant byte 337 * comes first in the byte array. 338 * 339 * Not guaranteed to be supported by devices, may be emulated if not supported. */ 340 public static final int ENCODING_PCM_24BIT_PACKED = 21; 341 /** Audio data format: PCM 32 bit per sample. 342 * Not guaranteed to be supported by devices, may be emulated if not supported. */ 343 public static final int ENCODING_PCM_32BIT = 22; 344 345 /** Audio data format: MPEG-H baseline profile, level 3 */ 346 public static final int ENCODING_MPEGH_BL_L3 = 23; 347 /** Audio data format: MPEG-H baseline profile, level 4 */ 348 public static final int ENCODING_MPEGH_BL_L4 = 24; 349 /** Audio data format: MPEG-H low complexity profile, level 3 */ 350 public static final int ENCODING_MPEGH_LC_L3 = 25; 351 /** Audio data format: MPEG-H low complexity profile, level 4 */ 352 public static final int ENCODING_MPEGH_LC_L4 = 26; 353 /** Audio data format: DTS UHD Profile-1 compressed (aka DTS:X Profile 1) 354 * Has the same meaning and value as ENCODING_DTS_UHD_P1. 355 * @deprecated Use {@link #ENCODING_DTS_UHD_P1} instead. */ 356 @Deprecated public static final int ENCODING_DTS_UHD = 27; 357 /** Audio data format: DRA compressed */ 358 public static final int ENCODING_DRA = 28; 359 /** Audio data format: DTS HD Master Audio compressed 360 * DTS HD Master Audio stream is variable bit rate and contains lossless audio. 361 * Use {@link #ENCODING_DTS_HD_MA} for lossless audio content (DTS-HD MA Lossless) 362 * and use {@link #ENCODING_DTS_HD} for other DTS bitstreams with extension substream 363 * (DTS 8Ch Discrete, DTS Hi Res, DTS Express). */ 364 public static final int ENCODING_DTS_HD_MA = 29; 365 /** Audio data format: DTS UHD Profile-1 compressed (aka DTS:X Profile 1) 366 * Has the same meaning and value as the deprecated {@link #ENCODING_DTS_UHD}.*/ 367 public static final int ENCODING_DTS_UHD_P1 = 27; 368 /** Audio data format: DTS UHD Profile-2 compressed 369 * DTS-UHD Profile-2 supports delivery of Channel-Based Audio, Object-Based Audio 370 * and High Order Ambisonic presentations up to the fourth order. 371 * Use {@link #ENCODING_DTS_UHD_P1} to transmit DTS UHD Profile 1 (aka DTS:X Profile 1) 372 * bitstream. 373 * Use {@link #ENCODING_DTS_UHD_P2} to transmit DTS UHD Profile 2 (aka DTS:X Profile 2) 374 * bitstream. */ 375 public static final int ENCODING_DTS_UHD_P2 = 30; 376 /** Audio data format: Direct Stream Digital */ 377 public static final int ENCODING_DSD = 31; 378 379 /** @hide */ toLogFriendlyEncoding(int enc)380 public static String toLogFriendlyEncoding(int enc) { 381 switch(enc) { 382 case ENCODING_INVALID: 383 return "ENCODING_INVALID"; 384 case ENCODING_PCM_16BIT: 385 return "ENCODING_PCM_16BIT"; 386 case ENCODING_PCM_8BIT: 387 return "ENCODING_PCM_8BIT"; 388 case ENCODING_PCM_FLOAT: 389 return "ENCODING_PCM_FLOAT"; 390 case ENCODING_AC3: 391 return "ENCODING_AC3"; 392 case ENCODING_E_AC3: 393 return "ENCODING_E_AC3"; 394 case ENCODING_DTS: 395 return "ENCODING_DTS"; 396 case ENCODING_DTS_HD: 397 return "ENCODING_DTS_HD"; 398 case ENCODING_MP3: 399 return "ENCODING_MP3"; 400 case ENCODING_AAC_LC: 401 return "ENCODING_AAC_LC"; 402 case ENCODING_AAC_HE_V1: 403 return "ENCODING_AAC_HE_V1"; 404 case ENCODING_AAC_HE_V2: 405 return "ENCODING_AAC_HE_V2"; 406 case ENCODING_IEC61937: 407 return "ENCODING_IEC61937"; 408 case ENCODING_DOLBY_TRUEHD: 409 return "ENCODING_DOLBY_TRUEHD"; 410 case ENCODING_AAC_ELD: 411 return "ENCODING_AAC_ELD"; 412 case ENCODING_AAC_XHE: 413 return "ENCODING_AAC_XHE"; 414 case ENCODING_AC4: 415 return "ENCODING_AC4"; 416 case ENCODING_E_AC3_JOC: 417 return "ENCODING_E_AC3_JOC"; 418 case ENCODING_DOLBY_MAT: 419 return "ENCODING_DOLBY_MAT"; 420 case ENCODING_OPUS: 421 return "ENCODING_OPUS"; 422 case ENCODING_PCM_24BIT_PACKED: 423 return "ENCODING_PCM_24BIT_PACKED"; 424 case ENCODING_PCM_32BIT: 425 return "ENCODING_PCM_32BIT"; 426 case ENCODING_MPEGH_BL_L3: 427 return "ENCODING_MPEGH_BL_L3"; 428 case ENCODING_MPEGH_BL_L4: 429 return "ENCODING_MPEGH_BL_L4"; 430 case ENCODING_MPEGH_LC_L3: 431 return "ENCODING_MPEGH_LC_L3"; 432 case ENCODING_MPEGH_LC_L4: 433 return "ENCODING_MPEGH_LC_L4"; 434 case ENCODING_DTS_UHD_P1: 435 return "ENCODING_DTS_UHD_P1"; 436 case ENCODING_DRA: 437 return "ENCODING_DRA"; 438 case ENCODING_DTS_HD_MA: 439 return "ENCODING_DTS_HD_MA"; 440 case ENCODING_DTS_UHD_P2: 441 return "ENCODING_DTS_UHD_P2"; 442 case ENCODING_DSD: 443 return "ENCODING_DSD"; 444 default : 445 return "invalid encoding " + enc; 446 } 447 } 448 449 /** Invalid audio channel configuration */ 450 /** @deprecated Use {@link #CHANNEL_INVALID} instead. */ 451 @Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0; 452 /** Default audio channel configuration */ 453 /** @deprecated Use {@link #CHANNEL_OUT_DEFAULT} or {@link #CHANNEL_IN_DEFAULT} instead. */ 454 @Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1; 455 /** Mono audio configuration */ 456 /** @deprecated Use {@link #CHANNEL_OUT_MONO} or {@link #CHANNEL_IN_MONO} instead. */ 457 @Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2; 458 /** Stereo (2 channel) audio configuration */ 459 /** @deprecated Use {@link #CHANNEL_OUT_STEREO} or {@link #CHANNEL_IN_STEREO} instead. */ 460 @Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3; 461 462 /** Invalid audio channel mask */ 463 public static final int CHANNEL_INVALID = 0; 464 /** Default audio channel mask */ 465 public static final int CHANNEL_OUT_DEFAULT = 1; 466 467 // Output channel mask definitions below are translated to the native values defined in 468 // in /system/media/audio/include/system/audio.h in the JNI code of AudioTrack 469 /** Front left output channel (see FL in channel diagram) */ 470 public static final int CHANNEL_OUT_FRONT_LEFT = 0x4; 471 /** Front right output channel (see FR in channel diagram) */ 472 public static final int CHANNEL_OUT_FRONT_RIGHT = 0x8; 473 /** Front center output channel (see FC in channel diagram) */ 474 public static final int CHANNEL_OUT_FRONT_CENTER = 0x10; 475 /** LFE "low frequency effect" channel 476 * When used in conjunction with {@link #CHANNEL_OUT_LOW_FREQUENCY_2}, it is intended 477 * to contain the left low-frequency effect signal, also referred to as "LFE1" 478 * in ITU-R BS.2159-8 */ 479 public static final int CHANNEL_OUT_LOW_FREQUENCY = 0x20; 480 /** Back left output channel (see BL in channel diagram) */ 481 public static final int CHANNEL_OUT_BACK_LEFT = 0x40; 482 /** Back right output channel (see BR in channel diagram) */ 483 public static final int CHANNEL_OUT_BACK_RIGHT = 0x80; 484 public static final int CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x100; 485 public static final int CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x200; 486 /** Back center output channel (see BC in channel diagram) */ 487 public static final int CHANNEL_OUT_BACK_CENTER = 0x400; 488 /** Side left output channel (see SL in channel diagram) */ 489 public static final int CHANNEL_OUT_SIDE_LEFT = 0x800; 490 /** Side right output channel (see SR in channel diagram) */ 491 public static final int CHANNEL_OUT_SIDE_RIGHT = 0x1000; 492 /** Top center (above listener) output channel (see TC in channel diagram) */ 493 public static final int CHANNEL_OUT_TOP_CENTER = 0x2000; 494 /** Top front left output channel (see TFL in channel diagram above FL) */ 495 public static final int CHANNEL_OUT_TOP_FRONT_LEFT = 0x4000; 496 /** Top front center output channel (see TFC in channel diagram above FC) */ 497 public static final int CHANNEL_OUT_TOP_FRONT_CENTER = 0x8000; 498 /** Top front right output channel (see TFR in channel diagram above FR) */ 499 public static final int CHANNEL_OUT_TOP_FRONT_RIGHT = 0x10000; 500 /** Top back left output channel (see TBL in channel diagram above BL) */ 501 public static final int CHANNEL_OUT_TOP_BACK_LEFT = 0x20000; 502 /** Top back center output channel (see TBC in channel diagram above BC) */ 503 public static final int CHANNEL_OUT_TOP_BACK_CENTER = 0x40000; 504 /** Top back right output channel (see TBR in channel diagram above BR) */ 505 public static final int CHANNEL_OUT_TOP_BACK_RIGHT = 0x80000; 506 /** Top side left output channel (see TSL in channel diagram above SL) */ 507 public static final int CHANNEL_OUT_TOP_SIDE_LEFT = 0x100000; 508 /** Top side right output channel (see TSR in channel diagram above SR) */ 509 public static final int CHANNEL_OUT_TOP_SIDE_RIGHT = 0x200000; 510 /** Bottom front left output channel (see BFL in channel diagram below FL) */ 511 public static final int CHANNEL_OUT_BOTTOM_FRONT_LEFT = 0x400000; 512 /** Bottom front center output channel (see BFC in channel diagram below FC) */ 513 public static final int CHANNEL_OUT_BOTTOM_FRONT_CENTER = 0x800000; 514 /** Bottom front right output channel (see BFR in channel diagram below FR) */ 515 public static final int CHANNEL_OUT_BOTTOM_FRONT_RIGHT = 0x1000000; 516 /** The second LFE channel 517 * When used in conjunction with {@link #CHANNEL_OUT_LOW_FREQUENCY}, it is intended 518 * to contain the right low-frequency effect signal, also referred to as "LFE2" 519 * in ITU-R BS.2159-8 */ 520 public static final int CHANNEL_OUT_LOW_FREQUENCY_2 = 0x2000000; 521 /** Front wide left output channel (see FWL in channel diagram) */ 522 public static final int CHANNEL_OUT_FRONT_WIDE_LEFT = 0x4000000; 523 /** Front wide right output channel (see FWR in channel diagram) */ 524 public static final int CHANNEL_OUT_FRONT_WIDE_RIGHT = 0x8000000; 525 /** @hide 526 * Haptic channels can be used by internal framework code. Use the same values as in native. 527 */ 528 public static final int CHANNEL_OUT_HAPTIC_B = 0x10000000; 529 /** @hide */ 530 public static final int CHANNEL_OUT_HAPTIC_A = 0x20000000; 531 532 public static final int CHANNEL_OUT_MONO = CHANNEL_OUT_FRONT_LEFT; 533 public static final int CHANNEL_OUT_STEREO = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT); 534 // aka QUAD_BACK 535 public static final int CHANNEL_OUT_QUAD = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | 536 CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT); 537 /** @hide */ 538 public static final int CHANNEL_OUT_QUAD_SIDE = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | 539 CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT); 540 public static final int CHANNEL_OUT_SURROUND = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | 541 CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_CENTER); 542 // aka 5POINT1_BACK 543 /** Output channel mask for 5.1 */ 544 public static final int CHANNEL_OUT_5POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | 545 CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT); 546 /** Output channel mask for 6.1 547 * Same as 5.1 with the addition of the back center channel */ 548 public static final int CHANNEL_OUT_6POINT1 = (CHANNEL_OUT_5POINT1 | CHANNEL_OUT_BACK_CENTER); 549 /** @hide */ 550 public static final int CHANNEL_OUT_5POINT1_SIDE = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | 551 CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | 552 CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT); 553 // different from AUDIO_CHANNEL_OUT_7POINT1 used internally, and not accepted by AudioRecord. 554 /** @deprecated Not the typical 7.1 surround configuration. Use {@link #CHANNEL_OUT_7POINT1_SURROUND} instead. */ 555 @Deprecated public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | 556 CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT | 557 CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER); 558 /** Output channel mask for 7.1 */ 559 // matches AUDIO_CHANNEL_OUT_7POINT1 560 public static final int CHANNEL_OUT_7POINT1_SURROUND = ( 561 CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_FRONT_RIGHT | 562 CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT | 563 CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT | 564 CHANNEL_OUT_LOW_FREQUENCY); 565 /** Output channel mask for 5.1.2 566 * Same as 5.1 with the addition of left and right top channels */ 567 public static final int CHANNEL_OUT_5POINT1POINT2 = (CHANNEL_OUT_5POINT1 | 568 CHANNEL_OUT_TOP_SIDE_LEFT | CHANNEL_OUT_TOP_SIDE_RIGHT); 569 /** Output channel mask for 5.1.4 570 * Same as 5.1 with the addition of four top channels */ 571 public static final int CHANNEL_OUT_5POINT1POINT4 = (CHANNEL_OUT_5POINT1 | 572 CHANNEL_OUT_TOP_FRONT_LEFT | CHANNEL_OUT_TOP_FRONT_RIGHT | 573 CHANNEL_OUT_TOP_BACK_LEFT | CHANNEL_OUT_TOP_BACK_RIGHT); 574 /** Output channel mask for 7.1.2 575 * Same as 7.1 with the addition of left and right top channels*/ 576 public static final int CHANNEL_OUT_7POINT1POINT2 = (CHANNEL_OUT_7POINT1_SURROUND | 577 CHANNEL_OUT_TOP_SIDE_LEFT | CHANNEL_OUT_TOP_SIDE_RIGHT); 578 /** Output channel mask for 7.1.4 579 * Same as 7.1 with the addition of four top channels */ 580 public static final int CHANNEL_OUT_7POINT1POINT4 = (CHANNEL_OUT_7POINT1_SURROUND | 581 CHANNEL_OUT_TOP_FRONT_LEFT | CHANNEL_OUT_TOP_FRONT_RIGHT | 582 CHANNEL_OUT_TOP_BACK_LEFT | CHANNEL_OUT_TOP_BACK_RIGHT); 583 /** Output channel mask for 9.1.4 584 * Same as 7.1.4 with the addition of left and right front wide channels */ 585 public static final int CHANNEL_OUT_9POINT1POINT4 = (CHANNEL_OUT_7POINT1POINT4 586 | CHANNEL_OUT_FRONT_WIDE_LEFT | CHANNEL_OUT_FRONT_WIDE_RIGHT); 587 /** Output channel mask for 9.1.6 588 * Same as 9.1.4 with the addition of left and right top side channels */ 589 public static final int CHANNEL_OUT_9POINT1POINT6 = (CHANNEL_OUT_9POINT1POINT4 590 | CHANNEL_OUT_TOP_SIDE_LEFT | CHANNEL_OUT_TOP_SIDE_RIGHT); 591 /** @hide */ 592 public static final int CHANNEL_OUT_13POINT_360RA = ( 593 CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_FRONT_RIGHT | 594 CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT | 595 CHANNEL_OUT_TOP_FRONT_LEFT | CHANNEL_OUT_TOP_FRONT_CENTER | 596 CHANNEL_OUT_TOP_FRONT_RIGHT | 597 CHANNEL_OUT_TOP_BACK_LEFT | CHANNEL_OUT_TOP_BACK_RIGHT | 598 CHANNEL_OUT_BOTTOM_FRONT_LEFT | CHANNEL_OUT_BOTTOM_FRONT_CENTER | 599 CHANNEL_OUT_BOTTOM_FRONT_RIGHT); 600 /** @hide */ 601 public static final int CHANNEL_OUT_22POINT2 = (CHANNEL_OUT_7POINT1POINT4 | 602 CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER | 603 CHANNEL_OUT_BACK_CENTER | CHANNEL_OUT_TOP_CENTER | 604 CHANNEL_OUT_TOP_FRONT_CENTER | CHANNEL_OUT_TOP_BACK_CENTER | 605 CHANNEL_OUT_TOP_SIDE_LEFT | CHANNEL_OUT_TOP_SIDE_RIGHT | 606 CHANNEL_OUT_BOTTOM_FRONT_LEFT | CHANNEL_OUT_BOTTOM_FRONT_RIGHT | 607 CHANNEL_OUT_BOTTOM_FRONT_CENTER | 608 CHANNEL_OUT_LOW_FREQUENCY_2); 609 // CHANNEL_OUT_ALL is not yet defined; if added then it should match AUDIO_CHANNEL_OUT_ALL 610 611 /** @hide */ 612 @IntDef(flag = true, prefix = "CHANNEL_OUT", value = { 613 CHANNEL_OUT_FRONT_LEFT, 614 CHANNEL_OUT_FRONT_RIGHT, 615 CHANNEL_OUT_FRONT_CENTER, 616 CHANNEL_OUT_LOW_FREQUENCY, 617 CHANNEL_OUT_BACK_LEFT, 618 CHANNEL_OUT_BACK_RIGHT, 619 CHANNEL_OUT_FRONT_LEFT_OF_CENTER, 620 CHANNEL_OUT_FRONT_RIGHT_OF_CENTER, 621 CHANNEL_OUT_BACK_CENTER, 622 CHANNEL_OUT_SIDE_LEFT, 623 CHANNEL_OUT_SIDE_RIGHT, 624 CHANNEL_OUT_TOP_CENTER, 625 CHANNEL_OUT_TOP_FRONT_LEFT, 626 CHANNEL_OUT_TOP_FRONT_CENTER, 627 CHANNEL_OUT_TOP_FRONT_RIGHT, 628 CHANNEL_OUT_TOP_BACK_LEFT, 629 CHANNEL_OUT_TOP_BACK_CENTER, 630 CHANNEL_OUT_TOP_BACK_RIGHT, 631 CHANNEL_OUT_TOP_SIDE_LEFT, 632 CHANNEL_OUT_TOP_SIDE_RIGHT, 633 CHANNEL_OUT_BOTTOM_FRONT_LEFT, 634 CHANNEL_OUT_BOTTOM_FRONT_CENTER, 635 CHANNEL_OUT_BOTTOM_FRONT_RIGHT, 636 CHANNEL_OUT_LOW_FREQUENCY_2, 637 CHANNEL_OUT_FRONT_WIDE_LEFT, 638 CHANNEL_OUT_FRONT_WIDE_RIGHT, 639 CHANNEL_OUT_HAPTIC_B, 640 CHANNEL_OUT_HAPTIC_A 641 }) 642 @Retention(RetentionPolicy.SOURCE) 643 public @interface ChannelOut {} 644 645 /** Minimum value for sample rate, 646 * assuming AudioTrack and AudioRecord share the same limitations. 647 * @hide 648 */ 649 // never unhide 650 public static final int SAMPLE_RATE_HZ_MIN = AudioSystem.SAMPLE_RATE_HZ_MIN; 651 /** Maximum value for sample rate, 652 * assuming AudioTrack and AudioRecord share the same limitations. 653 * @hide 654 */ 655 // never unhide 656 public static final int SAMPLE_RATE_HZ_MAX = AudioSystem.SAMPLE_RATE_HZ_MAX; 657 /** Sample rate will be a route-dependent value. 658 * For AudioTrack, it is usually the sink sample rate, 659 * and for AudioRecord it is usually the source sample rate. 660 */ 661 public static final int SAMPLE_RATE_UNSPECIFIED = 0; 662 663 /** 664 * @hide 665 * Return the input channel mask corresponding to an output channel mask. 666 * This can be used for submix rerouting for the mask of the recorder to map to that of the mix. 667 * @param outMask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT 668 * @return a combination of CHANNEL_IN_* definitions matching an output channel mask 669 * @throws IllegalArgumentException 670 */ inChannelMaskFromOutChannelMask(int outMask)671 public static int inChannelMaskFromOutChannelMask(int outMask) throws IllegalArgumentException { 672 if (outMask == CHANNEL_OUT_DEFAULT) { 673 throw new IllegalArgumentException( 674 "Illegal CHANNEL_OUT_DEFAULT channel mask for input."); 675 } 676 switch (channelCountFromOutChannelMask(outMask)) { 677 case 1: 678 return CHANNEL_IN_MONO; 679 case 2: 680 return CHANNEL_IN_STEREO; 681 default: 682 throw new IllegalArgumentException("Unsupported channel configuration for input."); 683 } 684 } 685 686 /** 687 * @hide 688 * Return the number of channels from an input channel mask 689 * @param mask a combination of the CHANNEL_IN_* definitions, even CHANNEL_IN_DEFAULT 690 * @return number of channels for the mask 691 */ 692 @TestApi channelCountFromInChannelMask(int mask)693 public static int channelCountFromInChannelMask(int mask) { 694 return Integer.bitCount(mask); 695 } 696 /** 697 * @hide 698 * Return the number of channels from an output channel mask 699 * @param mask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT 700 * @return number of channels for the mask 701 */ 702 @TestApi channelCountFromOutChannelMask(int mask)703 public static int channelCountFromOutChannelMask(int mask) { 704 return Integer.bitCount(mask); 705 } 706 /** 707 * @hide 708 * Return a channel mask ready to be used by native code 709 * @param mask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT 710 * @return a native channel mask 711 */ convertChannelOutMaskToNativeMask(int javaMask)712 public static int convertChannelOutMaskToNativeMask(int javaMask) { 713 return (javaMask >> 2); 714 } 715 716 /** 717 * @hide 718 * Return a java output channel mask 719 * @param mask a native channel mask 720 * @return a combination of the CHANNEL_OUT_* definitions 721 */ convertNativeChannelMaskToOutMask(int nativeMask)722 public static int convertNativeChannelMaskToOutMask(int nativeMask) { 723 return (nativeMask << 2); 724 } 725 726 public static final int CHANNEL_IN_DEFAULT = 1; 727 // These directly match native 728 public static final int CHANNEL_IN_LEFT = 0x4; 729 public static final int CHANNEL_IN_RIGHT = 0x8; 730 public static final int CHANNEL_IN_FRONT = 0x10; 731 public static final int CHANNEL_IN_BACK = 0x20; 732 public static final int CHANNEL_IN_LEFT_PROCESSED = 0x40; 733 public static final int CHANNEL_IN_RIGHT_PROCESSED = 0x80; 734 public static final int CHANNEL_IN_FRONT_PROCESSED = 0x100; 735 public static final int CHANNEL_IN_BACK_PROCESSED = 0x200; 736 public static final int CHANNEL_IN_PRESSURE = 0x400; 737 public static final int CHANNEL_IN_X_AXIS = 0x800; 738 public static final int CHANNEL_IN_Y_AXIS = 0x1000; 739 public static final int CHANNEL_IN_Z_AXIS = 0x2000; 740 public static final int CHANNEL_IN_VOICE_UPLINK = 0x4000; 741 public static final int CHANNEL_IN_VOICE_DNLINK = 0x8000; 742 // CHANNEL_IN_BACK_LEFT to TOP_RIGHT are not microphone positions 743 // but surround channels which are used when dealing with multi-channel inputs, 744 // e.g. via HDMI input on TV. 745 /** @hide */ 746 public static final int CHANNEL_IN_BACK_LEFT = 0x10000; 747 /** @hide */ 748 public static final int CHANNEL_IN_BACK_RIGHT = 0x20000; 749 /** @hide */ 750 public static final int CHANNEL_IN_CENTER = 0x40000; 751 /** @hide */ 752 public static final int CHANNEL_IN_LOW_FREQUENCY = 0x100000; 753 /** @hide */ 754 public static final int CHANNEL_IN_TOP_LEFT = 0x200000; 755 /** @hide */ 756 public static final int CHANNEL_IN_TOP_RIGHT = 0x400000; 757 public static final int CHANNEL_IN_MONO = CHANNEL_IN_FRONT; 758 public static final int CHANNEL_IN_STEREO = (CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT); 759 // Surround channel masks corresponding to output masks, used for 760 // surround sound inputs. 761 /** @hide */ 762 public static final int CHANNEL_IN_2POINT0POINT2 = ( 763 CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT | CHANNEL_IN_TOP_LEFT | CHANNEL_IN_TOP_RIGHT); 764 /** @hide */ 765 public static final int CHANNEL_IN_2POINT1POINT2 = ( 766 CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT | CHANNEL_IN_TOP_LEFT | CHANNEL_IN_TOP_RIGHT 767 | CHANNEL_IN_LOW_FREQUENCY); 768 /** @hide */ 769 public static final int CHANNEL_IN_3POINT0POINT2 = ( 770 CHANNEL_IN_LEFT | CHANNEL_IN_CENTER | CHANNEL_IN_RIGHT | CHANNEL_IN_TOP_LEFT 771 | CHANNEL_IN_TOP_RIGHT); 772 /** @hide */ 773 public static final int CHANNEL_IN_3POINT1POINT2 = ( 774 CHANNEL_IN_LEFT | CHANNEL_IN_CENTER | CHANNEL_IN_RIGHT | CHANNEL_IN_TOP_LEFT 775 | CHANNEL_IN_TOP_RIGHT | CHANNEL_IN_LOW_FREQUENCY); 776 /** @hide */ 777 public static final int CHANNEL_IN_5POINT1 = ( 778 CHANNEL_IN_LEFT | CHANNEL_IN_CENTER | CHANNEL_IN_RIGHT | CHANNEL_IN_BACK_LEFT 779 | CHANNEL_IN_BACK_RIGHT | CHANNEL_IN_LOW_FREQUENCY); 780 /** @hide */ 781 public static final int CHANNEL_IN_FRONT_BACK = CHANNEL_IN_FRONT | CHANNEL_IN_BACK; 782 // CHANNEL_IN_ALL is not yet defined; if added then it should match AUDIO_CHANNEL_IN_ALL 783 784 /** @hide */ 785 @TestApi getBytesPerSample(int audioFormat)786 public static int getBytesPerSample(int audioFormat) 787 { 788 switch (audioFormat) { 789 case ENCODING_PCM_8BIT: 790 return 1; 791 case ENCODING_PCM_16BIT: 792 case ENCODING_IEC61937: 793 case ENCODING_DEFAULT: 794 return 2; 795 case ENCODING_PCM_24BIT_PACKED: 796 return 3; 797 case ENCODING_PCM_FLOAT: 798 case ENCODING_PCM_32BIT: 799 return 4; 800 case ENCODING_INVALID: 801 default: 802 throw new IllegalArgumentException("Bad audio format " + audioFormat); 803 } 804 } 805 806 /** @hide */ isValidEncoding(int audioFormat)807 public static boolean isValidEncoding(int audioFormat) 808 { 809 switch (audioFormat) { 810 case ENCODING_PCM_16BIT: 811 case ENCODING_PCM_8BIT: 812 case ENCODING_PCM_FLOAT: 813 case ENCODING_AC3: 814 case ENCODING_E_AC3: 815 case ENCODING_DTS: 816 case ENCODING_DTS_HD: 817 case ENCODING_MP3: 818 case ENCODING_AAC_LC: 819 case ENCODING_AAC_HE_V1: 820 case ENCODING_AAC_HE_V2: 821 case ENCODING_IEC61937: 822 case ENCODING_DOLBY_TRUEHD: 823 case ENCODING_AAC_ELD: 824 case ENCODING_AAC_XHE: 825 case ENCODING_AC4: 826 case ENCODING_E_AC3_JOC: 827 case ENCODING_DOLBY_MAT: 828 case ENCODING_OPUS: 829 case ENCODING_PCM_24BIT_PACKED: 830 case ENCODING_PCM_32BIT: 831 case ENCODING_MPEGH_BL_L3: 832 case ENCODING_MPEGH_BL_L4: 833 case ENCODING_MPEGH_LC_L3: 834 case ENCODING_MPEGH_LC_L4: 835 case ENCODING_DTS_UHD_P1: 836 case ENCODING_DRA: 837 case ENCODING_DTS_HD_MA: 838 case ENCODING_DTS_UHD_P2: 839 case ENCODING_DSD: 840 return true; 841 default: 842 return false; 843 } 844 } 845 846 /** @hide */ isPublicEncoding(int audioFormat)847 public static boolean isPublicEncoding(int audioFormat) 848 { 849 switch (audioFormat) { 850 case ENCODING_PCM_16BIT: 851 case ENCODING_PCM_8BIT: 852 case ENCODING_PCM_FLOAT: 853 case ENCODING_AC3: 854 case ENCODING_E_AC3: 855 case ENCODING_DTS: 856 case ENCODING_DTS_HD: 857 case ENCODING_MP3: 858 case ENCODING_AAC_LC: 859 case ENCODING_AAC_HE_V1: 860 case ENCODING_AAC_HE_V2: 861 case ENCODING_IEC61937: 862 case ENCODING_DOLBY_TRUEHD: 863 case ENCODING_AAC_ELD: 864 case ENCODING_AAC_XHE: 865 case ENCODING_AC4: 866 case ENCODING_E_AC3_JOC: 867 case ENCODING_DOLBY_MAT: 868 case ENCODING_OPUS: 869 case ENCODING_PCM_24BIT_PACKED: 870 case ENCODING_PCM_32BIT: 871 case ENCODING_MPEGH_BL_L3: 872 case ENCODING_MPEGH_BL_L4: 873 case ENCODING_MPEGH_LC_L3: 874 case ENCODING_MPEGH_LC_L4: 875 case ENCODING_DTS_UHD_P1: 876 case ENCODING_DRA: 877 case ENCODING_DTS_HD_MA: 878 case ENCODING_DTS_UHD_P2: 879 case ENCODING_DSD: 880 return true; 881 default: 882 return false; 883 } 884 } 885 886 /** @hide */ 887 @TestApi isEncodingLinearPcm(int audioFormat)888 public static boolean isEncodingLinearPcm(int audioFormat) 889 { 890 switch (audioFormat) { 891 case ENCODING_PCM_16BIT: 892 case ENCODING_PCM_8BIT: 893 case ENCODING_PCM_FLOAT: 894 case ENCODING_PCM_24BIT_PACKED: 895 case ENCODING_PCM_32BIT: 896 case ENCODING_DEFAULT: 897 return true; 898 case ENCODING_AC3: 899 case ENCODING_E_AC3: 900 case ENCODING_DTS: 901 case ENCODING_DTS_HD: 902 case ENCODING_MP3: 903 case ENCODING_AAC_LC: 904 case ENCODING_AAC_HE_V1: 905 case ENCODING_AAC_HE_V2: 906 case ENCODING_IEC61937: // wrapped in PCM but compressed 907 case ENCODING_DOLBY_TRUEHD: 908 case ENCODING_AAC_ELD: 909 case ENCODING_AAC_XHE: 910 case ENCODING_AC4: 911 case ENCODING_E_AC3_JOC: 912 case ENCODING_DOLBY_MAT: 913 case ENCODING_OPUS: 914 case ENCODING_MPEGH_BL_L3: 915 case ENCODING_MPEGH_BL_L4: 916 case ENCODING_MPEGH_LC_L3: 917 case ENCODING_MPEGH_LC_L4: 918 case ENCODING_DTS_UHD_P1: 919 case ENCODING_DRA: 920 case ENCODING_DTS_HD_MA: 921 case ENCODING_DTS_UHD_P2: 922 return false; 923 case ENCODING_INVALID: 924 default: 925 throw new IllegalArgumentException("Bad audio format " + audioFormat); 926 } 927 } 928 929 /** @hide */ isEncodingLinearFrames(int audioFormat)930 public static boolean isEncodingLinearFrames(int audioFormat) 931 { 932 switch (audioFormat) { 933 case ENCODING_PCM_16BIT: 934 case ENCODING_PCM_8BIT: 935 case ENCODING_PCM_FLOAT: 936 case ENCODING_IEC61937: // same size as stereo PCM 937 case ENCODING_PCM_24BIT_PACKED: 938 case ENCODING_PCM_32BIT: 939 case ENCODING_DEFAULT: 940 return true; 941 case ENCODING_AC3: 942 case ENCODING_E_AC3: 943 case ENCODING_DTS: 944 case ENCODING_DTS_HD: 945 case ENCODING_MP3: 946 case ENCODING_AAC_LC: 947 case ENCODING_AAC_HE_V1: 948 case ENCODING_AAC_HE_V2: 949 case ENCODING_DOLBY_TRUEHD: 950 case ENCODING_AAC_ELD: 951 case ENCODING_AAC_XHE: 952 case ENCODING_AC4: 953 case ENCODING_E_AC3_JOC: 954 case ENCODING_DOLBY_MAT: 955 case ENCODING_OPUS: 956 case ENCODING_MPEGH_BL_L3: 957 case ENCODING_MPEGH_BL_L4: 958 case ENCODING_MPEGH_LC_L3: 959 case ENCODING_MPEGH_LC_L4: 960 case ENCODING_DTS_UHD_P1: 961 case ENCODING_DRA: 962 case ENCODING_DTS_HD_MA: 963 case ENCODING_DTS_UHD_P2: 964 return false; 965 case ENCODING_INVALID: 966 default: 967 throw new IllegalArgumentException("Bad audio format " + audioFormat); 968 } 969 } 970 /** 971 * Returns an array of public encoding values extracted from an array of 972 * encoding values. 973 * @hide 974 */ filterPublicFormats(int[] formats)975 public static int[] filterPublicFormats(int[] formats) { 976 if (formats == null) { 977 return null; 978 } 979 int[] myCopy = Arrays.copyOf(formats, formats.length); 980 int size = 0; 981 for (int i = 0; i < myCopy.length; i++) { 982 if (isPublicEncoding(myCopy[i])) { 983 if (size != i) { 984 myCopy[size] = myCopy[i]; 985 } 986 size++; 987 } 988 } 989 return Arrays.copyOf(myCopy, size); 990 } 991 992 /** @removed */ AudioFormat()993 public AudioFormat() 994 { 995 throw new UnsupportedOperationException("There is no valid usage of this constructor"); 996 } 997 998 /** 999 * Constructor used by the JNI. Parameters are not checked for validity. 1000 */ 1001 // Update sound trigger JNI in core/jni/android_hardware_SoundTrigger.cpp when modifying this 1002 // constructor 1003 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) AudioFormat(int encoding, int sampleRate, int channelMask, int channelIndexMask)1004 private AudioFormat(int encoding, int sampleRate, int channelMask, int channelIndexMask) { 1005 this( 1006 AUDIO_FORMAT_HAS_PROPERTY_ENCODING 1007 | AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE 1008 | AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK 1009 | AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK, 1010 encoding, sampleRate, channelMask, channelIndexMask 1011 ); 1012 } 1013 AudioFormat(int propertySetMask, int encoding, int sampleRate, int channelMask, int channelIndexMask)1014 private AudioFormat(int propertySetMask, 1015 int encoding, int sampleRate, int channelMask, int channelIndexMask) { 1016 mPropertySetMask = propertySetMask; 1017 mEncoding = (propertySetMask & AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0 1018 ? encoding : ENCODING_INVALID; 1019 mSampleRate = (propertySetMask & AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE) != 0 1020 ? sampleRate : SAMPLE_RATE_UNSPECIFIED; 1021 mChannelMask = (propertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0 1022 ? channelMask : CHANNEL_INVALID; 1023 mChannelIndexMask = (propertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0 1024 ? channelIndexMask : CHANNEL_INVALID; 1025 1026 // Compute derived values. 1027 1028 final int channelIndexCount = Integer.bitCount(getChannelIndexMask()); 1029 int channelCount = channelCountFromOutChannelMask(getChannelMask()); 1030 if (channelCount == 0) { 1031 channelCount = channelIndexCount; 1032 } else if (channelCount != channelIndexCount && channelIndexCount != 0) { 1033 channelCount = 0; // position and index channel count mismatch 1034 } 1035 mChannelCount = channelCount; 1036 1037 int frameSizeInBytes = 1; 1038 try { 1039 frameSizeInBytes = getBytesPerSample(mEncoding) * channelCount; 1040 } catch (IllegalArgumentException iae) { 1041 // ignored 1042 } 1043 // it is possible that channel count is 0, so ensure we return 1 for 1044 // mFrameSizeInBytes for consistency. 1045 mFrameSizeInBytes = frameSizeInBytes != 0 ? frameSizeInBytes : 1; 1046 } 1047 1048 /** @hide */ 1049 public final static int AUDIO_FORMAT_HAS_PROPERTY_NONE = 0x0; 1050 /** @hide */ 1051 public final static int AUDIO_FORMAT_HAS_PROPERTY_ENCODING = 0x1 << 0; 1052 /** @hide */ 1053 public final static int AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE = 0x1 << 1; 1054 /** @hide */ 1055 public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK = 0x1 << 2; 1056 /** @hide */ 1057 public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK = 0x1 << 3; 1058 1059 // This is an immutable class, all member variables are final. 1060 1061 // Essential values. 1062 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 1063 private final int mEncoding; 1064 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 1065 private final int mSampleRate; 1066 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 1067 private final int mChannelMask; 1068 private final int mChannelIndexMask; 1069 private final int mPropertySetMask; 1070 1071 // Derived values computed in the constructor, cached here. 1072 private final int mChannelCount; 1073 private final int mFrameSizeInBytes; 1074 1075 /** 1076 * Return the encoding. 1077 * See the section on <a href="#encoding">encodings</a> for more information about the different 1078 * types of supported audio encoding. 1079 * @return one of the values that can be set in {@link Builder#setEncoding(int)} or 1080 * {@link AudioFormat#ENCODING_INVALID} if not set. 1081 */ getEncoding()1082 public @EncodingCanBeInvalid int getEncoding() { 1083 return mEncoding; 1084 } 1085 1086 /** 1087 * Return the sample rate. 1088 * @return one of the values that can be set in {@link Builder#setSampleRate(int)} or 1089 * {@link #SAMPLE_RATE_UNSPECIFIED} if not set. 1090 */ getSampleRate()1091 public int getSampleRate() { 1092 return mSampleRate; 1093 } 1094 1095 /** 1096 * Return the channel mask. 1097 * See the section on <a href="#channelMask">channel masks</a> for more information about 1098 * the difference between index-based masks(as returned by {@link #getChannelIndexMask()}) and 1099 * the position-based mask returned by this function. 1100 * @return one of the values that can be set in {@link Builder#setChannelMask(int)} or 1101 * {@link AudioFormat#CHANNEL_INVALID} if not set. 1102 */ getChannelMask()1103 public int getChannelMask() { 1104 return mChannelMask; 1105 } 1106 1107 /** 1108 * Return the channel index mask. 1109 * See the section on <a href="#channelMask">channel masks</a> for more information about 1110 * the difference between index-based masks, and position-based masks (as returned 1111 * by {@link #getChannelMask()}). 1112 * @return one of the values that can be set in {@link Builder#setChannelIndexMask(int)} or 1113 * {@link AudioFormat#CHANNEL_INVALID} if not set or an invalid mask was used. 1114 */ getChannelIndexMask()1115 public int getChannelIndexMask() { 1116 return mChannelIndexMask; 1117 } 1118 1119 /** 1120 * Return the channel count. 1121 * @return the channel count derived from the channel position mask or the channel index mask. 1122 * Zero is returned if both the channel position mask and the channel index mask are not set. 1123 */ getChannelCount()1124 public int getChannelCount() { 1125 return mChannelCount; 1126 } 1127 1128 /** 1129 * Return the frame size in bytes. 1130 * 1131 * For PCM or PCM packed compressed data this is the size of a sample multiplied 1132 * by the channel count. For all other cases, including invalid/unset channel masks, 1133 * this will return 1 byte. 1134 * As an example, a stereo 16-bit PCM format would have a frame size of 4 bytes, 1135 * an 8 channel float PCM format would have a frame size of 32 bytes, 1136 * and a compressed data format (not packed in PCM) would have a frame size of 1 byte. 1137 * 1138 * Both {@link AudioRecord} or {@link AudioTrack} process data in multiples of 1139 * this frame size. 1140 * 1141 * @return The audio frame size in bytes corresponding to the encoding and the channel mask. 1142 */ getFrameSizeInBytes()1143 public @IntRange(from = 1) int getFrameSizeInBytes() { 1144 return mFrameSizeInBytes; 1145 } 1146 1147 /** @hide */ getPropertySetMask()1148 public int getPropertySetMask() { 1149 return mPropertySetMask; 1150 } 1151 1152 /** @hide */ toLogFriendlyString()1153 public String toLogFriendlyString() { 1154 return String.format("%dch %dHz %s", 1155 mChannelCount, mSampleRate, toLogFriendlyEncoding(mEncoding)); 1156 } 1157 1158 /** 1159 * Builder class for {@link AudioFormat} objects. 1160 * Use this class to configure and create an AudioFormat instance. By setting format 1161 * characteristics such as audio encoding, channel mask or sample rate, you indicate which 1162 * of those are to vary from the default behavior on this device wherever this audio format 1163 * is used. See {@link AudioFormat} for a complete description of the different parameters that 1164 * can be used to configure an <code>AudioFormat</code> instance. 1165 * <p>{@link AudioFormat} is for instance used in 1166 * {@link AudioTrack#AudioTrack(AudioAttributes, AudioFormat, int, int, int)}. In this 1167 * constructor, every format characteristic set on the <code>Builder</code> (e.g. with 1168 * {@link #setSampleRate(int)}) will alter the default values used by an 1169 * <code>AudioTrack</code>. In this case for audio playback with <code>AudioTrack</code>, the 1170 * sample rate set in the <code>Builder</code> would override the platform output sample rate 1171 * which would otherwise be selected by default. 1172 */ 1173 public static class Builder { 1174 private int mEncoding = ENCODING_INVALID; 1175 private int mSampleRate = SAMPLE_RATE_UNSPECIFIED; 1176 private int mChannelMask = CHANNEL_INVALID; 1177 private int mChannelIndexMask = 0; 1178 private int mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_NONE; 1179 1180 /** 1181 * Constructs a new Builder with none of the format characteristics set. 1182 */ Builder()1183 public Builder() { 1184 } 1185 1186 /** 1187 * Constructs a new Builder from a given {@link AudioFormat}. 1188 * @param af the {@link AudioFormat} object whose data will be reused in the new Builder. 1189 */ Builder(AudioFormat af)1190 public Builder(AudioFormat af) { 1191 mEncoding = af.mEncoding; 1192 mSampleRate = af.mSampleRate; 1193 mChannelMask = af.mChannelMask; 1194 mChannelIndexMask = af.mChannelIndexMask; 1195 mPropertySetMask = af.mPropertySetMask; 1196 } 1197 1198 /** 1199 * Combines all of the format characteristics that have been set and return a new 1200 * {@link AudioFormat} object. 1201 * @return a new {@link AudioFormat} object 1202 */ build()1203 public AudioFormat build() { 1204 AudioFormat af = new AudioFormat( 1205 mPropertySetMask, 1206 mEncoding, 1207 mSampleRate, 1208 mChannelMask, 1209 mChannelIndexMask 1210 ); 1211 return af; 1212 } 1213 1214 /** 1215 * Sets the data encoding format. 1216 * @param encoding the specified encoding or default. 1217 * @return the same Builder instance. 1218 * @throws java.lang.IllegalArgumentException 1219 */ setEncoding(@ncoding int encoding)1220 public Builder setEncoding(@Encoding int encoding) throws IllegalArgumentException { 1221 switch (encoding) { 1222 case ENCODING_DEFAULT: 1223 mEncoding = ENCODING_PCM_16BIT; 1224 break; 1225 case ENCODING_PCM_16BIT: 1226 case ENCODING_PCM_8BIT: 1227 case ENCODING_PCM_FLOAT: 1228 case ENCODING_AC3: 1229 case ENCODING_E_AC3: 1230 case ENCODING_DTS: 1231 case ENCODING_DTS_HD: 1232 case ENCODING_MP3: 1233 case ENCODING_AAC_LC: 1234 case ENCODING_AAC_HE_V1: 1235 case ENCODING_AAC_HE_V2: 1236 case ENCODING_IEC61937: 1237 case ENCODING_DOLBY_TRUEHD: 1238 case ENCODING_AAC_ELD: 1239 case ENCODING_AAC_XHE: 1240 case ENCODING_AC4: 1241 case ENCODING_E_AC3_JOC: 1242 case ENCODING_DOLBY_MAT: 1243 case ENCODING_OPUS: 1244 case ENCODING_PCM_24BIT_PACKED: 1245 case ENCODING_PCM_32BIT: 1246 case ENCODING_MPEGH_BL_L3: 1247 case ENCODING_MPEGH_BL_L4: 1248 case ENCODING_MPEGH_LC_L3: 1249 case ENCODING_MPEGH_LC_L4: 1250 case ENCODING_DTS_UHD_P1: 1251 case ENCODING_DRA: 1252 case ENCODING_DTS_HD_MA: 1253 case ENCODING_DTS_UHD_P2: 1254 case ENCODING_DSD: 1255 mEncoding = encoding; 1256 break; 1257 case ENCODING_INVALID: 1258 default: 1259 throw new IllegalArgumentException("Invalid encoding " + encoding); 1260 } 1261 mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_ENCODING; 1262 return this; 1263 } 1264 1265 /** 1266 * Sets the channel position mask. 1267 * The channel position mask specifies the association between audio samples in a frame 1268 * with named endpoint channels. The samples in the frame correspond to the 1269 * named set bits in the channel position mask, in ascending bit order. 1270 * See {@link #setChannelIndexMask(int)} to specify channels 1271 * based on endpoint numbered channels. This <a href="#channelPositionMask">description of 1272 * channel position masks</a> covers the concept in more details. 1273 * @param channelMask describes the configuration of the audio channels. 1274 * <p> For output, the channelMask can be an OR-ed combination of 1275 * channel position masks, e.g. 1276 * {@link AudioFormat#CHANNEL_OUT_FRONT_LEFT}, 1277 * {@link AudioFormat#CHANNEL_OUT_FRONT_RIGHT}, 1278 * {@link AudioFormat#CHANNEL_OUT_FRONT_CENTER}, 1279 * {@link AudioFormat#CHANNEL_OUT_LOW_FREQUENCY} 1280 * {@link AudioFormat#CHANNEL_OUT_BACK_LEFT}, 1281 * {@link AudioFormat#CHANNEL_OUT_BACK_RIGHT}, 1282 * {@link AudioFormat#CHANNEL_OUT_BACK_CENTER}, 1283 * {@link AudioFormat#CHANNEL_OUT_SIDE_LEFT}, 1284 * {@link AudioFormat#CHANNEL_OUT_SIDE_RIGHT}. 1285 * <p> For output or {@link AudioTrack}, channel position masks which do not contain 1286 * matched left/right pairs are invalid. 1287 * <p> For input or {@link AudioRecord}, the mask should be 1288 * {@link AudioFormat#CHANNEL_IN_MONO} or 1289 * {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is 1290 * guaranteed to work on all devices. 1291 * @return the same <code>Builder</code> instance. 1292 * @throws IllegalArgumentException if the channel mask is invalid or 1293 * if both channel index mask and channel position mask 1294 * are specified but do not have the same channel count. 1295 */ setChannelMask(int channelMask)1296 public @NonNull Builder setChannelMask(int channelMask) { 1297 if (channelMask == CHANNEL_INVALID) { 1298 throw new IllegalArgumentException("Invalid zero channel mask"); 1299 } else if (/* channelMask != 0 && */ mChannelIndexMask != 0 && 1300 Integer.bitCount(channelMask) != Integer.bitCount(mChannelIndexMask)) { 1301 throw new IllegalArgumentException("Mismatched channel count for mask " + 1302 Integer.toHexString(channelMask).toUpperCase()); 1303 } 1304 mChannelMask = channelMask; 1305 mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK; 1306 return this; 1307 } 1308 1309 /** 1310 * Sets the channel index mask. 1311 * A channel index mask specifies the association of audio samples in the frame 1312 * with numbered endpoint channels. The i-th bit in the channel index 1313 * mask corresponds to the i-th endpoint channel. 1314 * For example, an endpoint with four channels is represented 1315 * as index mask bits 0 through 3. This <a href="#channelIndexMask>description of channel 1316 * index masks</a> covers the concept in more details. 1317 * See {@link #setChannelMask(int)} for a positional mask interpretation. 1318 * <p> Both {@link AudioTrack} and {@link AudioRecord} support 1319 * a channel index mask. 1320 * If a channel index mask is specified it is used, 1321 * otherwise the channel position mask specified 1322 * by <code>setChannelMask</code> is used. 1323 * For <code>AudioTrack</code> and <code>AudioRecord</code>, 1324 * a channel position mask is not required if a channel index mask is specified. 1325 * 1326 * @param channelIndexMask describes the configuration of the audio channels. 1327 * <p> For output, the <code>channelIndexMask</code> is an OR-ed combination of 1328 * bits representing the mapping of <code>AudioTrack</code> write samples 1329 * to output sink channels. 1330 * For example, a mask of <code>0xa</code>, or binary <code>1010</code>, 1331 * means the <code>AudioTrack</code> write frame consists of two samples, 1332 * which are routed to the second and the fourth channels of the output sink. 1333 * Unmatched output sink channels are zero filled and unmatched 1334 * <code>AudioTrack</code> write samples are dropped. 1335 * <p> For input, the <code>channelIndexMask</code> is an OR-ed combination of 1336 * bits representing the mapping of input source channels to 1337 * <code>AudioRecord</code> read samples. 1338 * For example, a mask of <code>0x5</code>, or binary 1339 * <code>101</code>, will read from the first and third channel of the input 1340 * source device and store them in the first and second sample of the 1341 * <code>AudioRecord</code> read frame. 1342 * Unmatched input source channels are dropped and 1343 * unmatched <code>AudioRecord</code> read samples are zero filled. 1344 * @return the same <code>Builder</code> instance. 1345 * @throws IllegalArgumentException if the channel index mask is invalid or 1346 * if both channel index mask and channel position mask 1347 * are specified but do not have the same channel count. 1348 */ setChannelIndexMask(int channelIndexMask)1349 public @NonNull Builder setChannelIndexMask(int channelIndexMask) { 1350 if (channelIndexMask == 0) { 1351 throw new IllegalArgumentException("Invalid zero channel index mask"); 1352 } else if (/* channelIndexMask != 0 && */ mChannelMask != 0 && 1353 Integer.bitCount(channelIndexMask) != Integer.bitCount(mChannelMask)) { 1354 throw new IllegalArgumentException("Mismatched channel count for index mask " + 1355 Integer.toHexString(channelIndexMask).toUpperCase()); 1356 } 1357 mChannelIndexMask = channelIndexMask; 1358 mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK; 1359 return this; 1360 } 1361 1362 /** 1363 * Sets the sample rate. 1364 * @param sampleRate the sample rate expressed in Hz 1365 * @return the same Builder instance. 1366 * @throws java.lang.IllegalArgumentException 1367 */ setSampleRate(int sampleRate)1368 public Builder setSampleRate(int sampleRate) throws IllegalArgumentException { 1369 // TODO Consider whether to keep the MIN and MAX range checks here. 1370 // It is not necessary and poses the problem of defining the limits independently from 1371 // native implementation or platform capabilities. 1372 if (((sampleRate < SAMPLE_RATE_HZ_MIN) || (sampleRate > SAMPLE_RATE_HZ_MAX)) && 1373 sampleRate != SAMPLE_RATE_UNSPECIFIED) { 1374 throw new IllegalArgumentException("Invalid sample rate " + sampleRate); 1375 } 1376 mSampleRate = sampleRate; 1377 mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE; 1378 return this; 1379 } 1380 } 1381 1382 @Override equals(Object o)1383 public boolean equals(Object o) { 1384 if (this == o) return true; 1385 if (o == null || getClass() != o.getClass()) return false; 1386 1387 AudioFormat that = (AudioFormat) o; 1388 1389 if (mPropertySetMask != that.mPropertySetMask) return false; 1390 1391 // return false if any of the properties is set and the values differ 1392 return !((((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) 1393 && (mEncoding != that.mEncoding)) 1394 || (((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE) != 0) 1395 && (mSampleRate != that.mSampleRate)) 1396 || (((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) 1397 && (mChannelMask != that.mChannelMask)) 1398 || (((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) 1399 && (mChannelIndexMask != that.mChannelIndexMask))); 1400 } 1401 1402 @Override hashCode()1403 public int hashCode() { 1404 return Objects.hash(mPropertySetMask, mSampleRate, mEncoding, mChannelMask, 1405 mChannelIndexMask); 1406 } 1407 1408 @Override describeContents()1409 public int describeContents() { 1410 return 0; 1411 } 1412 1413 @Override writeToParcel(Parcel dest, int flags)1414 public void writeToParcel(Parcel dest, int flags) { 1415 dest.writeInt(mPropertySetMask); 1416 dest.writeInt(mEncoding); 1417 dest.writeInt(mSampleRate); 1418 dest.writeInt(mChannelMask); 1419 dest.writeInt(mChannelIndexMask); 1420 } 1421 AudioFormat(Parcel in)1422 private AudioFormat(Parcel in) { 1423 this( 1424 in.readInt(), // propertySetMask 1425 in.readInt(), // encoding 1426 in.readInt(), // sampleRate 1427 in.readInt(), // channelMask 1428 in.readInt() // channelIndexMask 1429 ); 1430 } 1431 1432 public static final @android.annotation.NonNull Parcelable.Creator<AudioFormat> CREATOR = 1433 new Parcelable.Creator<AudioFormat>() { 1434 public AudioFormat createFromParcel(Parcel p) { 1435 return new AudioFormat(p); 1436 } 1437 public AudioFormat[] newArray(int size) { 1438 return new AudioFormat[size]; 1439 } 1440 }; 1441 1442 @Override toString()1443 public String toString () { 1444 return new String("AudioFormat:" 1445 + " props=" + mPropertySetMask 1446 + " enc=" + mEncoding 1447 + " chan=0x" + Integer.toHexString(mChannelMask).toUpperCase() 1448 + " chan_index=0x" + Integer.toHexString(mChannelIndexMask).toUpperCase() 1449 + " rate=" + mSampleRate); 1450 } 1451 1452 /** @hide */ 1453 @IntDef(flag = false, prefix = "ENCODING", value = { 1454 ENCODING_DEFAULT, 1455 ENCODING_PCM_16BIT, 1456 ENCODING_PCM_8BIT, 1457 ENCODING_PCM_FLOAT, 1458 ENCODING_AC3, 1459 ENCODING_E_AC3, 1460 ENCODING_DTS, 1461 ENCODING_DTS_HD, 1462 ENCODING_MP3, 1463 ENCODING_AAC_LC, 1464 ENCODING_AAC_HE_V1, 1465 ENCODING_AAC_HE_V2, 1466 ENCODING_IEC61937, 1467 ENCODING_DOLBY_TRUEHD, 1468 ENCODING_AAC_ELD, 1469 ENCODING_AAC_XHE, 1470 ENCODING_AC4, 1471 ENCODING_E_AC3_JOC, 1472 ENCODING_DOLBY_MAT, 1473 ENCODING_OPUS, 1474 ENCODING_PCM_24BIT_PACKED, 1475 ENCODING_PCM_32BIT, 1476 ENCODING_MPEGH_BL_L3, 1477 ENCODING_MPEGH_BL_L4, 1478 ENCODING_MPEGH_LC_L3, 1479 ENCODING_MPEGH_LC_L4, 1480 ENCODING_DTS_UHD_P1, 1481 ENCODING_DRA, 1482 ENCODING_DTS_HD_MA, 1483 ENCODING_DTS_UHD_P2, 1484 ENCODING_DSD } 1485 ) 1486 @Retention(RetentionPolicy.SOURCE) 1487 public @interface Encoding {} 1488 1489 /** @hide same as @Encoding, but adding ENCODING_INVALID */ 1490 @IntDef(flag = false, prefix = "ENCODING", value = { 1491 ENCODING_INVALID, 1492 ENCODING_DEFAULT, 1493 ENCODING_PCM_16BIT, 1494 ENCODING_PCM_8BIT, 1495 ENCODING_PCM_FLOAT, 1496 ENCODING_AC3, 1497 ENCODING_E_AC3, 1498 ENCODING_DTS, 1499 ENCODING_DTS_HD, 1500 ENCODING_MP3, 1501 ENCODING_AAC_LC, 1502 ENCODING_AAC_HE_V1, 1503 ENCODING_AAC_HE_V2, 1504 ENCODING_IEC61937, 1505 ENCODING_DOLBY_TRUEHD, 1506 ENCODING_AAC_ELD, 1507 ENCODING_AAC_XHE, 1508 ENCODING_AC4, 1509 ENCODING_E_AC3_JOC, 1510 ENCODING_DOLBY_MAT, 1511 ENCODING_OPUS, 1512 ENCODING_PCM_24BIT_PACKED, 1513 ENCODING_PCM_32BIT, 1514 ENCODING_MPEGH_BL_L3, 1515 ENCODING_MPEGH_BL_L4, 1516 ENCODING_MPEGH_LC_L3, 1517 ENCODING_MPEGH_LC_L4, 1518 ENCODING_DTS_UHD_P1, 1519 ENCODING_DRA, 1520 ENCODING_DTS_HD_MA, 1521 ENCODING_DTS_UHD_P2, 1522 ENCODING_DSD } 1523 ) 1524 @Retention(RetentionPolicy.SOURCE) 1525 public @interface EncodingCanBeInvalid {} 1526 1527 /** @hide */ 1528 public static final int[] SURROUND_SOUND_ENCODING = { 1529 ENCODING_AC3, 1530 ENCODING_E_AC3, 1531 ENCODING_DTS, 1532 ENCODING_DTS_HD, 1533 ENCODING_AAC_LC, 1534 ENCODING_DOLBY_TRUEHD, 1535 ENCODING_AC4, 1536 ENCODING_E_AC3_JOC, 1537 ENCODING_DOLBY_MAT, 1538 ENCODING_MPEGH_BL_L3, 1539 ENCODING_MPEGH_BL_L4, 1540 ENCODING_MPEGH_LC_L3, 1541 ENCODING_MPEGH_LC_L4, 1542 ENCODING_DTS_UHD_P1, 1543 ENCODING_DRA, 1544 ENCODING_DTS_HD_MA, 1545 ENCODING_DTS_UHD_P2 1546 }; 1547 1548 /** @hide */ 1549 @IntDef(flag = false, prefix = "ENCODING", value = { 1550 ENCODING_AC3, 1551 ENCODING_E_AC3, 1552 ENCODING_DTS, 1553 ENCODING_DTS_HD, 1554 ENCODING_AAC_LC, 1555 ENCODING_DOLBY_TRUEHD, 1556 ENCODING_AC4, 1557 ENCODING_E_AC3_JOC, 1558 ENCODING_DOLBY_MAT, 1559 ENCODING_MPEGH_BL_L3, 1560 ENCODING_MPEGH_BL_L4, 1561 ENCODING_MPEGH_LC_L3, 1562 ENCODING_MPEGH_LC_L4, 1563 ENCODING_DTS_UHD_P1, 1564 ENCODING_DRA, 1565 ENCODING_DTS_HD_MA, 1566 ENCODING_DTS_UHD_P2 } 1567 ) 1568 @Retention(RetentionPolicy.SOURCE) 1569 public @interface SurroundSoundEncoding {} 1570 1571 /** 1572 * @hide 1573 * 1574 * Return default name for a surround format. This is not an International name. 1575 * It is just a default to use if an international name is not available. 1576 * 1577 * @param audioFormat a surround format 1578 * @return short default name for the format. 1579 */ toDisplayName(@urroundSoundEncoding int audioFormat)1580 public static String toDisplayName(@SurroundSoundEncoding int audioFormat) { 1581 switch (audioFormat) { 1582 case ENCODING_AC3: 1583 return "Dolby Digital"; 1584 case ENCODING_E_AC3: 1585 return "Dolby Digital Plus"; 1586 case ENCODING_DTS: 1587 return "DTS"; 1588 case ENCODING_DTS_HD: 1589 return "DTS HD"; 1590 case ENCODING_AAC_LC: 1591 return "AAC"; 1592 case ENCODING_DOLBY_TRUEHD: 1593 return "Dolby TrueHD"; 1594 case ENCODING_AC4: 1595 return "Dolby AC-4"; 1596 case ENCODING_E_AC3_JOC: 1597 return "Dolby Atmos in Dolby Digital Plus"; 1598 case ENCODING_DOLBY_MAT: 1599 return "Dolby MAT"; 1600 case ENCODING_MPEGH_BL_L3: 1601 return "MPEG-H 3D Audio baseline profile level 3"; 1602 case ENCODING_MPEGH_BL_L4: 1603 return "MPEG-H 3D Audio baseline profile level 4"; 1604 case ENCODING_MPEGH_LC_L3: 1605 return "MPEG-H 3D Audio low complexity profile level 3"; 1606 case ENCODING_MPEGH_LC_L4: 1607 return "MPEG-H 3D Audio low complexity profile level 4"; 1608 case ENCODING_DTS_UHD_P1: 1609 return "DTS UHD Profile 1"; 1610 case ENCODING_DRA: 1611 return "DRA"; 1612 case ENCODING_DTS_HD_MA: 1613 return "DTS HD Master Audio"; 1614 case ENCODING_DTS_UHD_P2: 1615 return "DTS UHD Profile 2"; 1616 default: 1617 return "Unknown surround sound format"; 1618 } 1619 } 1620 1621 } 1622