1/*
2 *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCAudioSession+Private.h"
12
13#import <UIKit/UIKit.h>
14
15#include <vector>
16
17#include "rtc_base/atomic_ops.h"
18#include "rtc_base/checks.h"
19#include "rtc_base/deprecated/recursive_critical_section.h"
20
21#import "RTCAudioSessionConfiguration.h"
22#import "base/RTCLogging.h"
23
24NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)";
25NSInteger const kRTCAudioSessionErrorLockRequired = -1;
26NSInteger const kRTCAudioSessionErrorConfiguration = -2;
27NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
28
29@interface RTC_OBJC_TYPE (RTCAudioSession)
30() @property(nonatomic,
31             readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
32@end
33
34// This class needs to be thread-safe because it is accessed from many threads.
35// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
36// lock contention so coarse locks should be fine for now.
37@implementation RTC_OBJC_TYPE (RTCAudioSession) {
38  rtc::RecursiveCriticalSection _crit;
39  AVAudioSession *_session;
40  volatile int _activationCount;
41  volatile int _lockRecursionCount;
42  volatile int _webRTCSessionCount;
43  BOOL _isActive;
44  BOOL _useManualAudio;
45  BOOL _isAudioEnabled;
46  BOOL _canPlayOrRecord;
47  BOOL _isInterrupted;
48}
49
50@synthesize session = _session;
51@synthesize delegates = _delegates;
52@synthesize ignoresPreferredAttributeConfigurationErrors =
53    _ignoresPreferredAttributeConfigurationErrors;
54
55+ (instancetype)sharedInstance {
56  static dispatch_once_t onceToken;
57  static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil;
58  dispatch_once(&onceToken, ^{
59    sharedInstance = [[self alloc] init];
60  });
61  return sharedInstance;
62}
63
64- (instancetype)init {
65  return [self initWithAudioSession:[AVAudioSession sharedInstance]];
66}
67
68/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
69- (instancetype)initWithAudioSession:(id)audioSession {
70  if (self = [super init]) {
71    _session = audioSession;
72
73    NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
74    [center addObserver:self
75               selector:@selector(handleInterruptionNotification:)
76                   name:AVAudioSessionInterruptionNotification
77                 object:nil];
78    [center addObserver:self
79               selector:@selector(handleRouteChangeNotification:)
80                   name:AVAudioSessionRouteChangeNotification
81                 object:nil];
82    [center addObserver:self
83               selector:@selector(handleMediaServicesWereLost:)
84                   name:AVAudioSessionMediaServicesWereLostNotification
85                 object:nil];
86    [center addObserver:self
87               selector:@selector(handleMediaServicesWereReset:)
88                   name:AVAudioSessionMediaServicesWereResetNotification
89                 object:nil];
90    // Posted on the main thread when the primary audio from other applications
91    // starts and stops. Foreground applications may use this notification as a
92    // hint to enable or disable audio that is secondary.
93    [center addObserver:self
94               selector:@selector(handleSilenceSecondaryAudioHintNotification:)
95                   name:AVAudioSessionSilenceSecondaryAudioHintNotification
96                 object:nil];
97    // Also track foreground event in order to deal with interruption ended situation.
98    [center addObserver:self
99               selector:@selector(handleApplicationDidBecomeActive:)
100                   name:UIApplicationDidBecomeActiveNotification
101                 object:nil];
102    [_session addObserver:self
103               forKeyPath:kRTCAudioSessionOutputVolumeSelector
104                  options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
105                  context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
106
107    RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self);
108  }
109  return self;
110}
111
112- (void)dealloc {
113  [[NSNotificationCenter defaultCenter] removeObserver:self];
114  [_session removeObserver:self
115                forKeyPath:kRTCAudioSessionOutputVolumeSelector
116                   context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class];
117  RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self);
118}
119
120- (NSString *)description {
121  NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n"
122                      "  category: %@\n"
123                      "  categoryOptions: %ld\n"
124                      "  mode: %@\n"
125                      "  isActive: %d\n"
126                      "  sampleRate: %.2f\n"
127                      "  IOBufferDuration: %f\n"
128                      "  outputNumberOfChannels: %ld\n"
129                      "  inputNumberOfChannels: %ld\n"
130                      "  outputLatency: %f\n"
131                      "  inputLatency: %f\n"
132                      "  outputVolume: %f\n"
133                      "}";
134  NSString *description = [NSString stringWithFormat:format,
135      self.category, (long)self.categoryOptions, self.mode,
136      self.isActive, self.sampleRate, self.IOBufferDuration,
137      self.outputNumberOfChannels, self.inputNumberOfChannels,
138      self.outputLatency, self.inputLatency, self.outputVolume];
139  return description;
140}
141
142- (void)setIsActive:(BOOL)isActive {
143  @synchronized(self) {
144    _isActive = isActive;
145  }
146}
147
148- (BOOL)isActive {
149  @synchronized(self) {
150    return _isActive;
151  }
152}
153
154- (BOOL)isLocked {
155  return _lockRecursionCount > 0;
156}
157
158- (void)setUseManualAudio:(BOOL)useManualAudio {
159  @synchronized(self) {
160    if (_useManualAudio == useManualAudio) {
161      return;
162    }
163    _useManualAudio = useManualAudio;
164  }
165  [self updateCanPlayOrRecord];
166}
167
168- (BOOL)useManualAudio {
169  @synchronized(self) {
170    return _useManualAudio;
171  }
172}
173
174- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
175  @synchronized(self) {
176    if (_isAudioEnabled == isAudioEnabled) {
177      return;
178    }
179    _isAudioEnabled = isAudioEnabled;
180  }
181  [self updateCanPlayOrRecord];
182}
183
184- (BOOL)isAudioEnabled {
185  @synchronized(self) {
186    return _isAudioEnabled;
187  }
188}
189
190- (void)setIgnoresPreferredAttributeConfigurationErrors:
191    (BOOL)ignoresPreferredAttributeConfigurationErrors {
192  @synchronized(self) {
193    if (_ignoresPreferredAttributeConfigurationErrors ==
194        ignoresPreferredAttributeConfigurationErrors) {
195      return;
196    }
197    _ignoresPreferredAttributeConfigurationErrors = ignoresPreferredAttributeConfigurationErrors;
198  }
199}
200
201- (BOOL)ignoresPreferredAttributeConfigurationErrors {
202  @synchronized(self) {
203    return _ignoresPreferredAttributeConfigurationErrors;
204  }
205}
206
207// TODO(tkchin): Check for duplicates.
208- (void)addDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
209  RTCLog(@"Adding delegate: (%p)", delegate);
210  if (!delegate) {
211    return;
212  }
213  @synchronized(self) {
214    _delegates.push_back(delegate);
215    [self removeZeroedDelegates];
216  }
217}
218
219- (void)removeDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
220  RTCLog(@"Removing delegate: (%p)", delegate);
221  if (!delegate) {
222    return;
223  }
224  @synchronized(self) {
225    _delegates.erase(std::remove(_delegates.begin(),
226                                 _delegates.end(),
227                                 delegate),
228                     _delegates.end());
229    [self removeZeroedDelegates];
230  }
231}
232
233#pragma clang diagnostic push
234#pragma clang diagnostic ignored "-Wthread-safety-analysis"
235
236- (void)lockForConfiguration {
237  _crit.Enter();
238  rtc::AtomicOps::Increment(&_lockRecursionCount);
239}
240
241- (void)unlockForConfiguration {
242  // Don't let threads other than the one that called lockForConfiguration
243  // unlock.
244  if (_crit.TryEnter()) {
245    rtc::AtomicOps::Decrement(&_lockRecursionCount);
246    // One unlock for the tryLock, and another one to actually unlock. If this
247    // was called without anyone calling lock, we will hit an assertion.
248    _crit.Leave();
249    _crit.Leave();
250  }
251}
252
253#pragma clang diagnostic pop
254
255#pragma mark - AVAudioSession proxy methods
256
257- (NSString *)category {
258  return self.session.category;
259}
260
261- (AVAudioSessionCategoryOptions)categoryOptions {
262  return self.session.categoryOptions;
263}
264
265- (NSString *)mode {
266  return self.session.mode;
267}
268
269- (BOOL)secondaryAudioShouldBeSilencedHint {
270  return self.session.secondaryAudioShouldBeSilencedHint;
271}
272
273- (AVAudioSessionRouteDescription *)currentRoute {
274  return self.session.currentRoute;
275}
276
277- (NSInteger)maximumInputNumberOfChannels {
278  return self.session.maximumInputNumberOfChannels;
279}
280
281- (NSInteger)maximumOutputNumberOfChannels {
282  return self.session.maximumOutputNumberOfChannels;
283}
284
285- (float)inputGain {
286  return self.session.inputGain;
287}
288
289- (BOOL)inputGainSettable {
290  return self.session.inputGainSettable;
291}
292
293- (BOOL)inputAvailable {
294  return self.session.inputAvailable;
295}
296
297- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
298  return self.session.inputDataSources;
299}
300
301- (AVAudioSessionDataSourceDescription *)inputDataSource {
302  return self.session.inputDataSource;
303}
304
305- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
306  return self.session.outputDataSources;
307}
308
309- (AVAudioSessionDataSourceDescription *)outputDataSource {
310  return self.session.outputDataSource;
311}
312
313- (double)sampleRate {
314  return self.session.sampleRate;
315}
316
317- (double)preferredSampleRate {
318  return self.session.preferredSampleRate;
319}
320
321- (NSInteger)inputNumberOfChannels {
322  return self.session.inputNumberOfChannels;
323}
324
325- (NSInteger)outputNumberOfChannels {
326  return self.session.outputNumberOfChannels;
327}
328
329- (float)outputVolume {
330  return self.session.outputVolume;
331}
332
333- (NSTimeInterval)inputLatency {
334  return self.session.inputLatency;
335}
336
337- (NSTimeInterval)outputLatency {
338  return self.session.outputLatency;
339}
340
341- (NSTimeInterval)IOBufferDuration {
342  return self.session.IOBufferDuration;
343}
344
345- (NSTimeInterval)preferredIOBufferDuration {
346  return self.session.preferredIOBufferDuration;
347}
348
349// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
350// can just do atomic increments / decrements.
351- (BOOL)setActive:(BOOL)active
352            error:(NSError **)outError {
353  if (![self checkLock:outError]) {
354    return NO;
355  }
356  int activationCount = _activationCount;
357  if (!active && activationCount == 0) {
358    RTCLogWarning(@"Attempting to deactivate without prior activation.");
359  }
360  [self notifyWillSetActive:active];
361  BOOL success = YES;
362  BOOL isActive = self.isActive;
363  // Keep a local error so we can log it.
364  NSError *error = nil;
365  BOOL shouldSetActive =
366      (active && !isActive) || (!active && isActive && activationCount == 1);
367  // Attempt to activate if we're not active.
368  // Attempt to deactivate if we're active and it's the last unbalanced call.
369  if (shouldSetActive) {
370    AVAudioSession *session = self.session;
371    // AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
372    // that other audio sessions that were interrupted by our session can return
373    // to their active state. It is recommended for VoIP apps to use this
374    // option.
375    AVAudioSessionSetActiveOptions options =
376        active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
377    success = [session setActive:active
378                     withOptions:options
379                           error:&error];
380    if (outError) {
381      *outError = error;
382    }
383  }
384  if (success) {
385    if (shouldSetActive) {
386      self.isActive = active;
387      if (active && self.isInterrupted) {
388        self.isInterrupted = NO;
389        [self notifyDidEndInterruptionWithShouldResumeSession:YES];
390      }
391    }
392    if (active) {
393      [self incrementActivationCount];
394    }
395    [self notifyDidSetActive:active];
396  } else {
397    RTCLogError(@"Failed to setActive:%d. Error: %@",
398                active, error.localizedDescription);
399    [self notifyFailedToSetActive:active error:error];
400  }
401  // Decrement activation count on deactivation whether or not it succeeded.
402  if (!active) {
403    [self decrementActivationCount];
404  }
405  RTCLog(@"Number of current activations: %d", _activationCount);
406  return success;
407}
408
409- (BOOL)setCategory:(NSString *)category
410        withOptions:(AVAudioSessionCategoryOptions)options
411              error:(NSError **)outError {
412  if (![self checkLock:outError]) {
413    return NO;
414  }
415  return [self.session setCategory:category withOptions:options error:outError];
416}
417
418- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
419  if (![self checkLock:outError]) {
420    return NO;
421  }
422  return [self.session setMode:mode error:outError];
423}
424
425- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
426  if (![self checkLock:outError]) {
427    return NO;
428  }
429  return [self.session setInputGain:gain error:outError];
430}
431
432- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
433  if (![self checkLock:outError]) {
434    return NO;
435  }
436  return [self.session setPreferredSampleRate:sampleRate error:outError];
437}
438
439- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
440                               error:(NSError **)outError {
441  if (![self checkLock:outError]) {
442    return NO;
443  }
444  return [self.session setPreferredIOBufferDuration:duration error:outError];
445}
446
447- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
448                                    error:(NSError **)outError {
449  if (![self checkLock:outError]) {
450    return NO;
451  }
452  return [self.session setPreferredInputNumberOfChannels:count error:outError];
453}
454- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
455                                     error:(NSError **)outError {
456  if (![self checkLock:outError]) {
457    return NO;
458  }
459  return [self.session setPreferredOutputNumberOfChannels:count error:outError];
460}
461
462- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
463                          error:(NSError **)outError {
464  if (![self checkLock:outError]) {
465    return NO;
466  }
467  return [self.session overrideOutputAudioPort:portOverride error:outError];
468}
469
470- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
471                    error:(NSError **)outError {
472  if (![self checkLock:outError]) {
473    return NO;
474  }
475  return [self.session setPreferredInput:inPort error:outError];
476}
477
478- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
479                     error:(NSError **)outError {
480  if (![self checkLock:outError]) {
481    return NO;
482  }
483  return [self.session setInputDataSource:dataSource error:outError];
484}
485
486- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
487                      error:(NSError **)outError {
488  if (![self checkLock:outError]) {
489    return NO;
490  }
491  return [self.session setOutputDataSource:dataSource error:outError];
492}
493
494#pragma mark - Notifications
495
496- (void)handleInterruptionNotification:(NSNotification *)notification {
497  NSNumber* typeNumber =
498      notification.userInfo[AVAudioSessionInterruptionTypeKey];
499  AVAudioSessionInterruptionType type =
500      (AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
501  switch (type) {
502    case AVAudioSessionInterruptionTypeBegan:
503      RTCLog(@"Audio session interruption began.");
504      self.isActive = NO;
505      self.isInterrupted = YES;
506      [self notifyDidBeginInterruption];
507      break;
508    case AVAudioSessionInterruptionTypeEnded: {
509      RTCLog(@"Audio session interruption ended.");
510      self.isInterrupted = NO;
511      [self updateAudioSessionAfterEvent];
512      NSNumber *optionsNumber =
513          notification.userInfo[AVAudioSessionInterruptionOptionKey];
514      AVAudioSessionInterruptionOptions options =
515          optionsNumber.unsignedIntegerValue;
516      BOOL shouldResume =
517          options & AVAudioSessionInterruptionOptionShouldResume;
518      [self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
519      break;
520    }
521  }
522}
523
524- (void)handleRouteChangeNotification:(NSNotification *)notification {
525  // Get reason for current route change.
526  NSNumber* reasonNumber =
527      notification.userInfo[AVAudioSessionRouteChangeReasonKey];
528  AVAudioSessionRouteChangeReason reason =
529      (AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
530  RTCLog(@"Audio route changed:");
531  switch (reason) {
532    case AVAudioSessionRouteChangeReasonUnknown:
533      RTCLog(@"Audio route changed: ReasonUnknown");
534      break;
535    case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
536      RTCLog(@"Audio route changed: NewDeviceAvailable");
537      break;
538    case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
539      RTCLog(@"Audio route changed: OldDeviceUnavailable");
540      break;
541    case AVAudioSessionRouteChangeReasonCategoryChange:
542      RTCLog(@"Audio route changed: CategoryChange to :%@",
543             self.session.category);
544      break;
545    case AVAudioSessionRouteChangeReasonOverride:
546      RTCLog(@"Audio route changed: Override");
547      break;
548    case AVAudioSessionRouteChangeReasonWakeFromSleep:
549      RTCLog(@"Audio route changed: WakeFromSleep");
550      break;
551    case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
552      RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
553      break;
554    case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
555      RTCLog(@"Audio route changed: RouteConfigurationChange");
556      break;
557  }
558  AVAudioSessionRouteDescription* previousRoute =
559      notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
560  // Log previous route configuration.
561  RTCLog(@"Previous route: %@\nCurrent route:%@",
562         previousRoute, self.session.currentRoute);
563  [self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
564}
565
566- (void)handleMediaServicesWereLost:(NSNotification *)notification {
567  RTCLog(@"Media services were lost.");
568  [self updateAudioSessionAfterEvent];
569  [self notifyMediaServicesWereLost];
570}
571
572- (void)handleMediaServicesWereReset:(NSNotification *)notification {
573  RTCLog(@"Media services were reset.");
574  [self updateAudioSessionAfterEvent];
575  [self notifyMediaServicesWereReset];
576}
577
578- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
579  // TODO(henrika): just adding logs here for now until we know if we are ever
580  // see this notification and might be affected by it or if further actions
581  // are required.
582  NSNumber *typeNumber =
583      notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
584  AVAudioSessionSilenceSecondaryAudioHintType type =
585      (AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
586  switch (type) {
587    case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
588      RTCLog(@"Another application's primary audio has started.");
589      break;
590    case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
591      RTCLog(@"Another application's primary audio has stopped.");
592      break;
593  }
594}
595
596- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
597  BOOL isInterrupted = self.isInterrupted;
598  RTCLog(@"Application became active after an interruption. Treating as interruption "
599          "end. isInterrupted changed from %d to 0.",
600         isInterrupted);
601  if (isInterrupted) {
602    self.isInterrupted = NO;
603    [self updateAudioSessionAfterEvent];
604  }
605  // Always treat application becoming active as an interruption end event.
606  [self notifyDidEndInterruptionWithShouldResumeSession:YES];
607}
608
609#pragma mark - Private
610
611+ (NSError *)lockError {
612  NSDictionary *userInfo = @{
613    NSLocalizedDescriptionKey:
614        @"Must call lockForConfiguration before calling this method."
615  };
616  NSError *error =
617      [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
618                                 code:kRTCAudioSessionErrorLockRequired
619                             userInfo:userInfo];
620  return error;
621}
622
623- (std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> >)delegates {
624  @synchronized(self) {
625    // Note: this returns a copy.
626    return _delegates;
627  }
628}
629
630// TODO(tkchin): check for duplicates.
631- (void)pushDelegate:(id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)>)delegate {
632  @synchronized(self) {
633    _delegates.insert(_delegates.begin(), delegate);
634  }
635}
636
637- (void)removeZeroedDelegates {
638  @synchronized(self) {
639    _delegates.erase(
640        std::remove_if(_delegates.begin(),
641                       _delegates.end(),
642                       [](id delegate) -> bool { return delegate == nil; }),
643        _delegates.end());
644  }
645}
646
647- (int)activationCount {
648  return _activationCount;
649}
650
651- (int)incrementActivationCount {
652  RTCLog(@"Incrementing activation count.");
653  return rtc::AtomicOps::Increment(&_activationCount);
654}
655
656- (NSInteger)decrementActivationCount {
657  RTCLog(@"Decrementing activation count.");
658  return rtc::AtomicOps::Decrement(&_activationCount);
659}
660
661- (int)webRTCSessionCount {
662  return _webRTCSessionCount;
663}
664
665- (BOOL)canPlayOrRecord {
666  return !self.useManualAudio || self.isAudioEnabled;
667}
668
669- (BOOL)isInterrupted {
670  @synchronized(self) {
671    return _isInterrupted;
672  }
673}
674
675- (void)setIsInterrupted:(BOOL)isInterrupted {
676  @synchronized(self) {
677    if (_isInterrupted == isInterrupted) {
678      return;
679   }
680   _isInterrupted = isInterrupted;
681  }
682}
683
684- (BOOL)checkLock:(NSError **)outError {
685  // Check ivar instead of trying to acquire lock so that we won't accidentally
686  // acquire lock if it hasn't already been called.
687  if (!self.isLocked) {
688    if (outError) {
689      *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError];
690    }
691    return NO;
692  }
693  return YES;
694}
695
696- (BOOL)beginWebRTCSession:(NSError **)outError {
697  if (outError) {
698    *outError = nil;
699  }
700  if (![self checkLock:outError]) {
701    return NO;
702  }
703  rtc::AtomicOps::Increment(&_webRTCSessionCount);
704  [self notifyDidStartPlayOrRecord];
705  return YES;
706}
707
708- (BOOL)endWebRTCSession:(NSError **)outError {
709  if (outError) {
710    *outError = nil;
711  }
712  if (![self checkLock:outError]) {
713    return NO;
714  }
715  rtc::AtomicOps::Decrement(&_webRTCSessionCount);
716  [self notifyDidStopPlayOrRecord];
717  return YES;
718}
719
720- (BOOL)configureWebRTCSession:(NSError **)outError {
721  if (outError) {
722    *outError = nil;
723  }
724  if (![self checkLock:outError]) {
725    return NO;
726  }
727  RTCLog(@"Configuring audio session for WebRTC.");
728
729  // Configure the AVAudioSession and activate it.
730  // Provide an error even if there isn't one so we can log it.
731  NSError *error = nil;
732  RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
733      [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
734  if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
735    RTCLogError(@"Failed to set WebRTC audio configuration: %@",
736                error.localizedDescription);
737    // Do not call setActive:NO if setActive:YES failed.
738    if (outError) {
739      *outError = error;
740    }
741    return NO;
742  }
743
744  // Ensure that the device currently supports audio input.
745  // TODO(tkchin): Figure out if this is really necessary.
746  if (!self.inputAvailable) {
747    RTCLogError(@"No audio input path is available!");
748    [self unconfigureWebRTCSession:nil];
749    if (outError) {
750      *outError = [self configurationErrorWithDescription:@"No input path."];
751    }
752    return NO;
753  }
754
755  // It can happen (e.g. in combination with BT devices) that the attempt to set
756  // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
757  // configuration attempt using the sample rate that worked using the active
758  // audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
759  // combination with BT headsets. Using this "trick" seems to avoid a state
760  // where Core Audio asks for a different number of audio frames than what the
761  // session's I/O buffer duration corresponds to.
762  // TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
763  // tested on a limited set of iOS devices and BT devices.
764  double sessionSampleRate = self.sampleRate;
765  double preferredSampleRate = webRTCConfig.sampleRate;
766  if (sessionSampleRate != preferredSampleRate) {
767    RTCLogWarning(
768        @"Current sample rate (%.2f) is not the preferred rate (%.2f)",
769        sessionSampleRate, preferredSampleRate);
770    if (![self setPreferredSampleRate:sessionSampleRate
771                                error:&error]) {
772      RTCLogError(@"Failed to set preferred sample rate: %@",
773                  error.localizedDescription);
774      if (outError) {
775        *outError = error;
776      }
777    }
778  }
779
780  return YES;
781}
782
783- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
784  if (outError) {
785    *outError = nil;
786  }
787  if (![self checkLock:outError]) {
788    return NO;
789  }
790  RTCLog(@"Unconfiguring audio session for WebRTC.");
791  [self setActive:NO error:outError];
792
793  return YES;
794}
795
796- (NSError *)configurationErrorWithDescription:(NSString *)description {
797  NSDictionary* userInfo = @{
798    NSLocalizedDescriptionKey: description,
799  };
800  return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
801                                    code:kRTCAudioSessionErrorConfiguration
802                                userInfo:userInfo];
803}
804
805- (void)updateAudioSessionAfterEvent {
806  BOOL shouldActivate = self.activationCount > 0;
807  AVAudioSessionSetActiveOptions options = shouldActivate ?
808      0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
809  NSError *error = nil;
810  if ([self.session setActive:shouldActivate
811                  withOptions:options
812                        error:&error]) {
813    self.isActive = shouldActivate;
814  } else {
815    RTCLogError(@"Failed to set session active to %d. Error:%@",
816                shouldActivate, error.localizedDescription);
817  }
818}
819
820- (void)updateCanPlayOrRecord {
821  BOOL canPlayOrRecord = NO;
822  BOOL shouldNotify = NO;
823  @synchronized(self) {
824    canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
825    if (_canPlayOrRecord == canPlayOrRecord) {
826      return;
827    }
828    _canPlayOrRecord = canPlayOrRecord;
829    shouldNotify = YES;
830  }
831  if (shouldNotify) {
832    [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
833  }
834}
835
836- (void)audioSessionDidActivate:(AVAudioSession *)session {
837  if (_session != session) {
838    RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
839  }
840  RTCLog(@"Audio session was externally activated.");
841  [self incrementActivationCount];
842  self.isActive = YES;
843  // When a CallKit call begins, it's possible that we receive an interruption
844  // begin without a corresponding end. Since we know that we have an activated
845  // audio session at this point, just clear any saved interruption flag since
846  // the app may never be foregrounded during the duration of the call.
847  if (self.isInterrupted) {
848    RTCLog(@"Clearing interrupted state due to external activation.");
849    self.isInterrupted = NO;
850  }
851  // Treat external audio session activation as an end interruption event.
852  [self notifyDidEndInterruptionWithShouldResumeSession:YES];
853}
854
855- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
856  if (_session != session) {
857    RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
858  }
859  RTCLog(@"Audio session was externally deactivated.");
860  self.isActive = NO;
861  [self decrementActivationCount];
862}
863
864- (void)observeValueForKeyPath:(NSString *)keyPath
865                      ofObject:(id)object
866                        change:(NSDictionary *)change
867                       context:(void *)context {
868  if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) {
869    if (object == _session) {
870      NSNumber *newVolume = change[NSKeyValueChangeNewKey];
871      RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
872      [self notifyDidChangeOutputVolume:newVolume.floatValue];
873    }
874  } else {
875    [super observeValueForKeyPath:keyPath
876                         ofObject:object
877                           change:change
878                          context:context];
879  }
880}
881
882- (void)notifyDidBeginInterruption {
883  for (auto delegate : self.delegates) {
884    SEL sel = @selector(audioSessionDidBeginInterruption:);
885    if ([delegate respondsToSelector:sel]) {
886      [delegate audioSessionDidBeginInterruption:self];
887    }
888  }
889}
890
891- (void)notifyDidEndInterruptionWithShouldResumeSession:
892    (BOOL)shouldResumeSession {
893  for (auto delegate : self.delegates) {
894    SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
895    if ([delegate respondsToSelector:sel]) {
896      [delegate audioSessionDidEndInterruption:self
897                           shouldResumeSession:shouldResumeSession];
898    }
899  }
900}
901
902- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
903    previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
904  for (auto delegate : self.delegates) {
905    SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
906    if ([delegate respondsToSelector:sel]) {
907      [delegate audioSessionDidChangeRoute:self
908                                    reason:reason
909                             previousRoute:previousRoute];
910    }
911  }
912}
913
914- (void)notifyMediaServicesWereLost {
915  for (auto delegate : self.delegates) {
916    SEL sel = @selector(audioSessionMediaServerTerminated:);
917    if ([delegate respondsToSelector:sel]) {
918      [delegate audioSessionMediaServerTerminated:self];
919    }
920  }
921}
922
923- (void)notifyMediaServicesWereReset {
924  for (auto delegate : self.delegates) {
925    SEL sel = @selector(audioSessionMediaServerReset:);
926    if ([delegate respondsToSelector:sel]) {
927      [delegate audioSessionMediaServerReset:self];
928    }
929  }
930}
931
932- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
933  for (auto delegate : self.delegates) {
934    SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
935    if ([delegate respondsToSelector:sel]) {
936      [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
937    }
938  }
939}
940
941- (void)notifyDidStartPlayOrRecord {
942  for (auto delegate : self.delegates) {
943    SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
944    if ([delegate respondsToSelector:sel]) {
945      [delegate audioSessionDidStartPlayOrRecord:self];
946    }
947  }
948}
949
950- (void)notifyDidStopPlayOrRecord {
951  for (auto delegate : self.delegates) {
952    SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
953    if ([delegate respondsToSelector:sel]) {
954      [delegate audioSessionDidStopPlayOrRecord:self];
955    }
956  }
957}
958
959- (void)notifyDidChangeOutputVolume:(float)volume {
960  for (auto delegate : self.delegates) {
961    SEL sel = @selector(audioSession:didChangeOutputVolume:);
962    if ([delegate respondsToSelector:sel]) {
963      [delegate audioSession:self didChangeOutputVolume:volume];
964    }
965  }
966}
967
968- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
969  for (auto delegate : self.delegates) {
970    SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
971    if ([delegate respondsToSelector:sel]) {
972      [delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
973    }
974  }
975}
976
977- (void)notifyWillSetActive:(BOOL)active {
978  for (id delegate : self.delegates) {
979    SEL sel = @selector(audioSession:willSetActive:);
980    if ([delegate respondsToSelector:sel]) {
981      [delegate audioSession:self willSetActive:active];
982    }
983  }
984}
985
986- (void)notifyDidSetActive:(BOOL)active {
987  for (id delegate : self.delegates) {
988    SEL sel = @selector(audioSession:didSetActive:);
989    if ([delegate respondsToSelector:sel]) {
990      [delegate audioSession:self didSetActive:active];
991    }
992  }
993}
994
995- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error {
996  for (id delegate : self.delegates) {
997    SEL sel = @selector(audioSession:failedToSetActive:error:);
998    if ([delegate respondsToSelector:sel]) {
999      [delegate audioSession:self failedToSetActive:active error:error];
1000    }
1001  }
1002}
1003
1004@end
1005