1/*
2 *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import <Foundation/Foundation.h>
12
13#import "RTCCameraVideoCapturer.h"
14#import "base/RTCLogging.h"
15#import "base/RTCVideoFrameBuffer.h"
16#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
17
18#if TARGET_OS_IPHONE
19#import "helpers/UIDevice+RTCDevice.h"
20#endif
21
22#import "helpers/AVCaptureSession+DevicePosition.h"
23#import "helpers/RTCDispatcher+Private.h"
24#include "rtc_base/system/gcd_helpers.h"
25
26const int64_t kNanosecondsPerSecond = 1000000000;
27
28@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
29()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
30                                                           readonly) dispatch_queue_t frameQueue;
31@property(nonatomic, strong) AVCaptureDevice *currentDevice;
32@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
33@property(nonatomic, assign) BOOL isRunning;
34// Will the session be running once all asynchronous operations have been completed?
35@property(nonatomic, assign) BOOL willBeRunning;
36@end
37
38@implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) {
39  AVCaptureVideoDataOutput *_videoDataOutput;
40  AVCaptureSession *_captureSession;
41  FourCharCode _preferredOutputPixelFormat;
42  FourCharCode _outputPixelFormat;
43  RTCVideoRotation _rotation;
44#if TARGET_OS_IPHONE
45  UIDeviceOrientation _orientation;
46  BOOL _generatingOrientationNotifications;
47#endif
48}
49
50@synthesize frameQueue = _frameQueue;
51@synthesize captureSession = _captureSession;
52@synthesize currentDevice = _currentDevice;
53@synthesize hasRetriedOnFatalError = _hasRetriedOnFatalError;
54@synthesize isRunning = _isRunning;
55@synthesize willBeRunning = _willBeRunning;
56
57- (instancetype)init {
58  return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
59}
60
61- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
62  return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
63}
64
65// This initializer is used for testing.
66- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
67                  captureSession:(AVCaptureSession *)captureSession {
68  if (self = [super initWithDelegate:delegate]) {
69    // Create the capture session and all relevant inputs and outputs. We need
70    // to do this in init because the application may want the capture session
71    // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
72    // created here are retained until dealloc and never recreated.
73    if (![self setupCaptureSession:captureSession]) {
74      return nil;
75    }
76    NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
77#if TARGET_OS_IPHONE
78    _orientation = UIDeviceOrientationPortrait;
79    _rotation = RTCVideoRotation_90;
80    [center addObserver:self
81               selector:@selector(deviceOrientationDidChange:)
82                   name:UIDeviceOrientationDidChangeNotification
83                 object:nil];
84    [center addObserver:self
85               selector:@selector(handleCaptureSessionInterruption:)
86                   name:AVCaptureSessionWasInterruptedNotification
87                 object:_captureSession];
88    [center addObserver:self
89               selector:@selector(handleCaptureSessionInterruptionEnded:)
90                   name:AVCaptureSessionInterruptionEndedNotification
91                 object:_captureSession];
92    [center addObserver:self
93               selector:@selector(handleApplicationDidBecomeActive:)
94                   name:UIApplicationDidBecomeActiveNotification
95                 object:[UIApplication sharedApplication]];
96#endif
97    [center addObserver:self
98               selector:@selector(handleCaptureSessionRuntimeError:)
99                   name:AVCaptureSessionRuntimeErrorNotification
100                 object:_captureSession];
101    [center addObserver:self
102               selector:@selector(handleCaptureSessionDidStartRunning:)
103                   name:AVCaptureSessionDidStartRunningNotification
104                 object:_captureSession];
105    [center addObserver:self
106               selector:@selector(handleCaptureSessionDidStopRunning:)
107                   name:AVCaptureSessionDidStopRunningNotification
108                 object:_captureSession];
109  }
110  return self;
111}
112
113- (void)dealloc {
114  NSAssert(!_willBeRunning,
115           @"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
116           @"call stopCapture?");
117  [[NSNotificationCenter defaultCenter] removeObserver:self];
118}
119
120+ (NSArray<AVCaptureDevice *> *)captureDevices {
121#if defined(WEBRTC_IOS) && defined(__IPHONE_10_0) && \
122    __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
123  AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
124      discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
125                            mediaType:AVMediaTypeVideo
126                             position:AVCaptureDevicePositionUnspecified];
127  return session.devices;
128#else
129  return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
130#endif
131}
132
133+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
134  // Support opening the device in any format. We make sure it's converted to a format we
135  // can handle, if needed, in the method `-setupVideoDataOutput`.
136  return device.formats;
137}
138
139- (FourCharCode)preferredOutputPixelFormat {
140  return _preferredOutputPixelFormat;
141}
142
143- (void)startCaptureWithDevice:(AVCaptureDevice *)device
144                        format:(AVCaptureDeviceFormat *)format
145                           fps:(NSInteger)fps {
146  [self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
147}
148
149- (void)stopCapture {
150  [self stopCaptureWithCompletionHandler:nil];
151}
152
153- (void)startCaptureWithDevice:(AVCaptureDevice *)device
154                        format:(AVCaptureDeviceFormat *)format
155                           fps:(NSInteger)fps
156             completionHandler:(nullable void (^)(NSError *))completionHandler {
157  _willBeRunning = YES;
158  [RTC_OBJC_TYPE(RTCDispatcher)
159      dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
160                    block:^{
161                      RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
162
163#if TARGET_OS_IPHONE
164                      dispatch_async(dispatch_get_main_queue(), ^{
165                        if (!self->_generatingOrientationNotifications) {
166                          [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
167                          self->_generatingOrientationNotifications = YES;
168                        }
169                      });
170#endif
171
172                      self.currentDevice = device;
173
174                      NSError *error = nil;
175                      if (![self.currentDevice lockForConfiguration:&error]) {
176                        RTCLogError(@"Failed to lock device %@. Error: %@",
177                                    self.currentDevice,
178                                    error.userInfo);
179                        if (completionHandler) {
180                          completionHandler(error);
181                        }
182                        self.willBeRunning = NO;
183                        return;
184                      }
185                      [self reconfigureCaptureSessionInput];
186                      [self updateOrientation];
187                      [self updateDeviceCaptureFormat:format fps:fps];
188                      [self updateVideoDataOutputPixelFormat:format];
189                      [self.captureSession startRunning];
190                      [self.currentDevice unlockForConfiguration];
191                      self.isRunning = YES;
192                      if (completionHandler) {
193                        completionHandler(nil);
194                      }
195                    }];
196}
197
198- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
199  _willBeRunning = NO;
200  [RTC_OBJC_TYPE(RTCDispatcher)
201      dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
202                    block:^{
203                      RTCLogInfo("Stop");
204                      self.currentDevice = nil;
205                      for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
206                        [self.captureSession removeInput:oldInput];
207                      }
208                      [self.captureSession stopRunning];
209
210#if TARGET_OS_IPHONE
211                      dispatch_async(dispatch_get_main_queue(), ^{
212                        if (self->_generatingOrientationNotifications) {
213                          [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
214                          self->_generatingOrientationNotifications = NO;
215                        }
216                      });
217#endif
218                      self.isRunning = NO;
219                      if (completionHandler) {
220                        completionHandler();
221                      }
222                    }];
223}
224
225#pragma mark iOS notifications
226
227#if TARGET_OS_IPHONE
228- (void)deviceOrientationDidChange:(NSNotification *)notification {
229  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
230                                              block:^{
231                                                [self updateOrientation];
232                                              }];
233}
234#endif
235
236#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
237
238- (void)captureOutput:(AVCaptureOutput *)captureOutput
239    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
240           fromConnection:(AVCaptureConnection *)connection {
241  NSParameterAssert(captureOutput == _videoDataOutput);
242
243  if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
244      !CMSampleBufferDataIsReady(sampleBuffer)) {
245    return;
246  }
247
248  CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
249  if (pixelBuffer == nil) {
250    return;
251  }
252
253#if TARGET_OS_IPHONE
254  // Default to portrait orientation on iPhone.
255  BOOL usingFrontCamera = NO;
256  // Check the image's EXIF for the camera the image came from as the image could have been
257  // delayed as we set alwaysDiscardsLateVideoFrames to NO.
258  AVCaptureDevicePosition cameraPosition =
259      [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
260  if (cameraPosition != AVCaptureDevicePositionUnspecified) {
261    usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
262  } else {
263    AVCaptureDeviceInput *deviceInput =
264        (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
265    usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
266  }
267  switch (_orientation) {
268    case UIDeviceOrientationPortrait:
269      _rotation = RTCVideoRotation_90;
270      break;
271    case UIDeviceOrientationPortraitUpsideDown:
272      _rotation = RTCVideoRotation_270;
273      break;
274    case UIDeviceOrientationLandscapeLeft:
275      _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
276      break;
277    case UIDeviceOrientationLandscapeRight:
278      _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
279      break;
280    case UIDeviceOrientationFaceUp:
281    case UIDeviceOrientationFaceDown:
282    case UIDeviceOrientationUnknown:
283      // Ignore.
284      break;
285  }
286#else
287  // No rotation on Mac.
288  _rotation = RTCVideoRotation_0;
289#endif
290
291  RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
292      [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
293  int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
294      kNanosecondsPerSecond;
295  RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
296      [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
297                                                  rotation:_rotation
298                                               timeStampNs:timeStampNs];
299  [self.delegate capturer:self didCaptureVideoFrame:videoFrame];
300}
301
302- (void)captureOutput:(AVCaptureOutput *)captureOutput
303    didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
304         fromConnection:(AVCaptureConnection *)connection {
305#if TARGET_OS_IPHONE
306  CFStringRef droppedReason =
307      CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
308#else
309  // DroppedFrameReason unavailable on macOS.
310  CFStringRef droppedReason = nil;
311#endif
312  RTCLogError(@"Dropped sample buffer. Reason: %@", (__bridge NSString *)droppedReason);
313}
314
315#pragma mark - AVCaptureSession notifications
316
317- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
318  NSString *reasonString = nil;
319#if TARGET_OS_IPHONE
320  NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
321  if (reason) {
322    switch (reason.intValue) {
323      case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
324        reasonString = @"VideoDeviceNotAvailableInBackground";
325        break;
326      case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
327        reasonString = @"AudioDeviceInUseByAnotherClient";
328        break;
329      case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
330        reasonString = @"VideoDeviceInUseByAnotherClient";
331        break;
332      case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
333        reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
334        break;
335    }
336  }
337#endif
338  RTCLog(@"Capture session interrupted: %@", reasonString);
339}
340
341- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
342  RTCLog(@"Capture session interruption ended.");
343}
344
345- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
346  NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
347  RTCLogError(@"Capture session runtime error: %@", error);
348
349  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
350                                              block:^{
351#if TARGET_OS_IPHONE
352                                                if (error.code == AVErrorMediaServicesWereReset) {
353                                                  [self handleNonFatalError];
354                                                } else {
355                                                  [self handleFatalError];
356                                                }
357#else
358        [self handleFatalError];
359#endif
360                                              }];
361}
362
363- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
364  RTCLog(@"Capture session started.");
365
366  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
367                                              block:^{
368                                                // If we successfully restarted after an unknown
369                                                // error, allow future retries on fatal errors.
370                                                self.hasRetriedOnFatalError = NO;
371                                              }];
372}
373
374- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
375  RTCLog(@"Capture session stopped.");
376}
377
378- (void)handleFatalError {
379  [RTC_OBJC_TYPE(RTCDispatcher)
380      dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
381                    block:^{
382                      if (!self.hasRetriedOnFatalError) {
383                        RTCLogWarning(@"Attempting to recover from fatal capture error.");
384                        [self handleNonFatalError];
385                        self.hasRetriedOnFatalError = YES;
386                      } else {
387                        RTCLogError(@"Previous fatal error recovery failed.");
388                      }
389                    }];
390}
391
392- (void)handleNonFatalError {
393  [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
394                                              block:^{
395                                                RTCLog(@"Restarting capture session after error.");
396                                                if (self.isRunning) {
397                                                  [self.captureSession startRunning];
398                                                }
399                                              }];
400}
401
402#if TARGET_OS_IPHONE
403
404#pragma mark - UIApplication notifications
405
406- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
407  [RTC_OBJC_TYPE(RTCDispatcher)
408      dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
409                    block:^{
410                      if (self.isRunning && !self.captureSession.isRunning) {
411                        RTCLog(@"Restarting capture session on active.");
412                        [self.captureSession startRunning];
413                      }
414                    }];
415}
416
417#endif  // TARGET_OS_IPHONE
418
419#pragma mark - Private
420
421- (dispatch_queue_t)frameQueue {
422  if (!_frameQueue) {
423    _frameQueue = RTCDispatchQueueCreateWithTarget(
424        "org.webrtc.cameravideocapturer.video",
425        DISPATCH_QUEUE_SERIAL,
426        dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
427  }
428  return _frameQueue;
429}
430
431- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
432  NSAssert(_captureSession == nil, @"Setup capture session called twice.");
433  _captureSession = captureSession;
434#if defined(WEBRTC_IOS)
435  _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
436  _captureSession.usesApplicationAudioSession = NO;
437#endif
438  [self setupVideoDataOutput];
439  // Add the output.
440  if (![_captureSession canAddOutput:_videoDataOutput]) {
441    RTCLogError(@"Video data output unsupported.");
442    return NO;
443  }
444  [_captureSession addOutput:_videoDataOutput];
445
446  return YES;
447}
448
449- (void)setupVideoDataOutput {
450  NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
451  AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
452
453  // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
454  // device with the most efficient output format first. Find the first format that we support.
455  NSSet<NSNumber *> *supportedPixelFormats =
456      [RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
457  NSMutableOrderedSet *availablePixelFormats =
458      [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
459  [availablePixelFormats intersectSet:supportedPixelFormats];
460  NSNumber *pixelFormat = availablePixelFormats.firstObject;
461  NSAssert(pixelFormat, @"Output device has no supported formats.");
462
463  _preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
464  _outputPixelFormat = _preferredOutputPixelFormat;
465  videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
466  videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
467  [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
468  _videoDataOutput = videoDataOutput;
469}
470
471- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
472  FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
473  if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
474    mediaSubType = _preferredOutputPixelFormat;
475  }
476
477  if (mediaSubType != _outputPixelFormat) {
478    _outputPixelFormat = mediaSubType;
479    _videoDataOutput.videoSettings =
480        @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
481  }
482}
483
484#pragma mark - Private, called inside capture queue
485
486- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
487  NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
488           @"updateDeviceCaptureFormat must be called on the capture queue.");
489  @try {
490    _currentDevice.activeFormat = format;
491    _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
492  } @catch (NSException *exception) {
493    RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
494    return;
495  }
496}
497
498- (void)reconfigureCaptureSessionInput {
499  NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
500           @"reconfigureCaptureSessionInput must be called on the capture queue.");
501  NSError *error = nil;
502  AVCaptureDeviceInput *input =
503      [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
504  if (!input) {
505    RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
506    return;
507  }
508  [_captureSession beginConfiguration];
509  for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
510    [_captureSession removeInput:oldInput];
511  }
512  if ([_captureSession canAddInput:input]) {
513    [_captureSession addInput:input];
514  } else {
515    RTCLogError(@"Cannot add camera as an input to the session.");
516  }
517  [_captureSession commitConfiguration];
518}
519
520- (void)updateOrientation {
521  NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
522           @"updateOrientation must be called on the capture queue.");
523#if TARGET_OS_IPHONE
524  _orientation = [UIDevice currentDevice].orientation;
525#endif
526}
527
528@end
529