diff --git a/Signal.xcodeproj/project.pbxproj b/Signal.xcodeproj/project.pbxproj index c6f2749e1..d52c9cd19 100644 --- a/Signal.xcodeproj/project.pbxproj +++ b/Signal.xcodeproj/project.pbxproj @@ -307,9 +307,10 @@ 45DF5DF21DDB843F00C936C7 /* CompareSafetyNumbersActivity.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45DF5DF11DDB843F00C936C7 /* CompareSafetyNumbersActivity.swift */; }; 45E5A6991F61E6DE001E4A8A /* MarqueeLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */; }; 45E7A6A81E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */; }; - 45F170AC1E2F0351003FC1F2 /* CallAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170AB1E2F0351003FC1F2 /* CallAudioSession.swift */; }; 45F170BB1E2FC5D3003FC1F2 /* CallAudioService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */; }; 45F170D61E315310003FC1F2 /* Weak.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170D51E315310003FC1F2 /* Weak.swift */; }; + 45F59A082028E4FB00E8D2B0 /* OWSAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */; }; + 45F59A0A2029140500E8D2B0 /* OWSVideoPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */; }; 45F659731E1BD99C00444429 /* CallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */; }; 45F659821E1BE77000444429 /* NonCallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659811E1BE77000444429 /* NonCallKitCallUIAdaptee.swift */; }; 45FBC5C81DF8575700E9B410 /* CallKitCallManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45FBC59A1DF8575700E9B410 /* CallKitCallManager.swift */; }; @@ -855,11 +856,12 @@ 45E2E91E1E13EE3500457AA0 /* OWSCallNotificationsAdaptee.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; lineEnding = 0; name = OWSCallNotificationsAdaptee.h; path = UserInterface/OWSCallNotificationsAdaptee.h; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.objcpp; }; 45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MarqueeLabel.swift; sourceTree = ""; }; 45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DisplayableTextFilterTest.swift; sourceTree = ""; }; - 45F170AB1E2F0351003FC1F2 /* CallAudioSession.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallAudioSession.swift; sourceTree = ""; }; + 45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OWSAudioSession.swift; sourceTree = ""; }; 45F170B31E2F0A6A003FC1F2 /* RTCAudioSession.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCAudioSession.h; sourceTree = ""; }; 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallAudioService.swift; sourceTree = ""; }; 45F170D51E315310003FC1F2 /* Weak.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Weak.swift; sourceTree = ""; }; 45F3AEB51DFDE7900080CE33 /* AvatarImageView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AvatarImageView.swift; sourceTree = ""; }; + 45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OWSVideoPlayer.swift; sourceTree = ""; }; 45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallUIAdaptee.swift; sourceTree = ""; }; 45F659811E1BE77000444429 /* NonCallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NonCallKitCallUIAdaptee.swift; sourceTree = ""; }; 45FBC59A1DF8575700E9B410 /* CallKitCallManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallManager.swift; sourceTree = ""; }; @@ -1200,6 +1202,7 @@ 346129921FD1E30000532771 /* migrations */, 347850671FD9B78A007B8332 /* NoopCallMessageHandler.swift */, 347850681FD9B78A007B8332 /* NoopNotificationsManager.swift */, + 45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */, 346129561FD1D74B00532771 /* Release.h */, 346129571FD1D74B00532771 /* Release.m */, 346129581FD1D74B00532771 /* SignalKeyingStorage.h */, @@ -1587,6 +1590,7 @@ 34CA1C281F7164F700E51C51 /* MediaMessageView.swift */, 45BC829C1FD9C4B400011CF3 /* ShareViewDelegate.swift */, 453034AA200289F50018945D /* VideoPlayerView.swift */, + 45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */, ); path = attachments; sourceTree = ""; @@ -1699,7 +1703,6 @@ 45FBC5D01DF8592E00E9B410 /* SignalCall.swift */, 458DE9D51DEE3FD00071BB03 /* PeerConnectionClient.swift */, 4574A5D51DD6704700C6B692 /* CallService.swift */, - 45F170AB1E2F0351003FC1F2 /* CallAudioSession.swift */, 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */, 452C468E1E427E200087B011 /* OutboundCallInitiator.swift */, ); @@ -2768,6 +2771,7 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 45F59A0A2029140500E8D2B0 /* OWSVideoPlayer.swift in Sources */, 344F249B200FD03300CFB4F4 /* SharingThreadPickerViewController.m in Sources */, 45194F951FD7216600333B2C /* TSUnreadIndicatorInteraction.m in Sources */, 45BE4EA22012AD2000935E59 /* DisappearingTimerConfigurationView.swift in Sources */, @@ -2831,6 +2835,7 @@ 451F8A481FD715BA005CB9DA /* OWSContactAvatarBuilder.m in Sources */, 346129A61FD1F09100532771 /* OWSContactsManager.m in Sources */, 346129D21FD2085A00532771 /* CommonStrings.swift in Sources */, + 45F59A082028E4FB00E8D2B0 /* OWSAudioSession.swift in Sources */, 34612A071FD7238600532771 /* OWSContactsSyncing.m in Sources */, 346129DF1FD5C02A00532771 /* LockInteractionController.m in Sources */, 451F8A471FD715BA005CB9DA /* OWSAvatarBuilder.m in Sources */, @@ -2926,7 +2931,6 @@ 34330AA31E79686200DF2FB9 /* OWSProgressView.m in Sources */, 34CA1C271F7156F300E51C51 /* MessageDetailViewController.swift in Sources */, 34D5CCA91EAE3D30005515DB /* AvatarViewHelper.m in Sources */, - 45F170AC1E2F0351003FC1F2 /* CallAudioSession.swift in Sources */, 34D1F0B71F87F8850066283D /* OWSGenericAttachmentView.m in Sources */, 34B3F8801E8DF1700035BE1A /* InviteFlow.swift in Sources */, 34B3F8871E8DF1700035BE1A /* NotificationSettingsViewController.m in Sources */, diff --git a/Signal/src/ViewControllers/CallViewController.swift b/Signal/src/ViewControllers/CallViewController.swift index bfca7e7a8..9b92ba729 100644 --- a/Signal/src/ViewControllers/CallViewController.swift +++ b/Signal/src/ViewControllers/CallViewController.swift @@ -10,7 +10,7 @@ import SignalMessaging // TODO: Add category so that button handlers can be defined where button is created. // TODO: Ensure buttons enabled & disabled as necessary. -class CallViewController: OWSViewController, CallObserver, CallServiceObserver { +class CallViewController: OWSViewController, CallObserver, CallServiceObserver, CallAudioServiceDelegate { let TAG = "[CallViewController]" @@ -140,6 +140,9 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { self.call = call self.thread = TSContactThread.getOrCreateThread(contactId: call.remotePhoneNumber) super.init(nibName: nil, bundle: nil) + + assert(callUIAdapter.audioService.delegate == nil) + callUIAdapter.audioService.delegate = self observeNotifications() } @@ -148,10 +151,6 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { selector:#selector(didBecomeActive), name:NSNotification.Name.OWSApplicationDidBecomeActive, object:nil) - - NotificationCenter.default.addObserver(forName: CallAudioServiceSessionChanged, object: nil, queue: nil) { [weak self] _ in - self?.didChangeAudioSession() - } } deinit { @@ -379,21 +378,6 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { ]) } - func didChangeAudioSession() { - AssertIsOnMainThread() - - // Which sources are available depends on the state of your Session. - // When the audio session is not yet in PlayAndRecord none are available - // Then if we're in speakerphone, bluetooth isn't available. - // So we acrew all possible audio sources in a set, and that list lives as longs as the CallViewController - // The downside of this is that if you e.g. unpair your bluetooth mid call, it will still appear as an option - // until your next call. - // FIXME: There's got to be a better way, but this is where I landed after a bit of work, and seems to work - // pretty well in practrice. - let availableInputs = callUIAdapter.audioService.availableInputs - self.allAudioSources.formUnion(availableInputs) - } - func presentAudioSourcePicker() { AssertIsOnMainThread() @@ -724,7 +708,7 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { return } - // Marquee scrolling is distractingn during a video call, disable it. + // Marquee scrolling is distracting during a video call, disable it. contactNameLabel.labelize = call.hasLocalVideo audioModeMuteButton.isSelected = call.isMuted @@ -779,8 +763,6 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { audioSourceButton.isHidden = false } else { // No bluetooth audio detected - - audioSourceButton.isSelected = call.isSpeakerphoneEnabled audioSourceButton.setImage(#imageLiteral(resourceName: "audio-call-speaker-inactive"), for: .normal) audioSourceButton.setImage(#imageLiteral(resourceName: "audio-call-speaker-active"), for: .selected) @@ -820,6 +802,29 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { updateCallStatusLabel(callState: call.state) } + // We update the audioSourceButton outside of the main `updateCallUI` + // because `updateCallUI` is intended to be idempotent, which isn't possible + // with external speaker state because: + // - the system API which enables the external speaker is a (somewhat slow) asyncronous + // operation + // - we want to give immediate UI feedback by marking the pressed button as selected + // before the operation completes. + func updateAudioSourceButtonIsSelected() { + guard callUIAdapter.audioService.isSpeakerphoneEnabled else { + self.audioSourceButton.isSelected = false + return + } + + // VideoChat mode enables the output speaker, but we don't + // want to highlight the speaker button in that case. + guard !call.hasLocalVideo else { + self.audioSourceButton.isSelected = false + return + } + + self.audioSourceButton.isSelected = true + } + // MARK: - Actions /** @@ -852,13 +857,9 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { func didPressSpeakerphone(sender button: UIButton) { Logger.info("\(TAG) called \(#function)") + button.isSelected = !button.isSelected - if button.isSelected { - callUIAdapter.setAudioSource(call: call, audioSource: AudioSource.builtInSpeaker) - } else { - // use default audio source - callUIAdapter.setAudioSource(call: call, audioSource: nil) - } + callUIAdapter.audioService.requestSpeakerphone(isEnabled: button.isSelected) } func didPressTextMessage(sender button: UIButton) { @@ -961,6 +962,29 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { self.updateCallUI(callState: call.state) } + // MARK: CallAudioServiceDelegate + + func callAudioService(_ callAudioService: CallAudioService, didUpdateIsSpeakerphoneEnabled isSpeakerphoneEnabled: Bool) { + AssertIsOnMainThread() + + updateAudioSourceButtonIsSelected() + } + + func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService) { + AssertIsOnMainThread() + + // Which sources are available depends on the state of your Session. + // When the audio session is not yet in PlayAndRecord none are available + // Then if we're in speakerphone, bluetooth isn't available. + // So we accrue all possible audio sources in a set, and that list lives as longs as the CallViewController + // The downside of this is that if you e.g. unpair your bluetooth mid call, it will still appear as an option + // until your next call. + // FIXME: There's got to be a better way, but this is where I landed after a bit of work, and seems to work + // pretty well in practice. + let availableInputs = callAudioService.availableInputs + self.allAudioSources.formUnion(availableInputs) + } + // MARK: - Video internal func updateLocalVideoTrack(localVideoTrack: RTCVideoTrack?) { @@ -979,6 +1003,7 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { localVideoView.isHidden = isHidden updateLocalVideoLayout() + updateAudioSourceButtonIsSelected() } var hasRemoteVideoTrack: Bool { @@ -1002,6 +1027,8 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver { } internal func dismissIfPossible(shouldDelay: Bool, ignoreNag: Bool = false, completion: (() -> Swift.Void)? = nil) { + callUIAdapter.audioService.delegate = nil + if hasDismissed { // Don't dismiss twice. return diff --git a/Signal/src/ViewControllers/ConversationView/ConversationViewController.m b/Signal/src/ViewControllers/ConversationView/ConversationViewController.m index 868142685..2f586cca2 100644 --- a/Signal/src/ViewControllers/ConversationView/ConversationViewController.m +++ b/Signal/src/ViewControllers/ConversationView/ConversationViewController.m @@ -3174,17 +3174,14 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) { NSURL *fileURL = [NSURL fileURLWithPath:filepath]; // Setup audio session - AVAudioSession *session = [AVAudioSession sharedInstance]; - OWSAssert(session.recordPermission == AVAudioSessionRecordPermissionGranted); - - NSError *error; - [session setCategory:AVAudioSessionCategoryRecord error:&error]; - if (error) { - OWSFail(@"%@ Couldn't configure audio session: %@", self.logTag, error); + BOOL configuredAudio = [OWSAudioSession.shared setRecordCategory]; + if (!configuredAudio) { + OWSFail(@"%@ Couldn't configure audio session", self.logTag); [self cancelVoiceMemo]; return; } + NSError *error; // Initiate and prepare the recorder self.audioRecorder = [[AVAudioRecorder alloc] initWithURL:fileURL settings:@{ @@ -3232,7 +3229,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) { NSTimeInterval durationSeconds = self.audioRecorder.currentTime; - [self.audioRecorder stop]; + [self stopRecording]; const NSTimeInterval kMinimumRecordingTimeSeconds = 1.f; if (durationSeconds < kMinimumRecordingTimeSeconds) { @@ -3279,20 +3276,18 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) { } } -- (void)cancelRecordingVoiceMemo +- (void)stopRecording { - OWSAssertIsOnMainThread(); - - DDLogDebug(@"cancelRecordingVoiceMemo"); - - [self resetRecordingVoiceMemo]; + [self.audioRecorder stop]; + [OWSAudioSession.shared endAudioActivity]; } -- (void)resetRecordingVoiceMemo +- (void)cancelRecordingVoiceMemo { OWSAssertIsOnMainThread(); + DDLogDebug(@"cancelRecordingVoiceMemo"); - [self.audioRecorder stop]; + [self stopRecording]; self.audioRecorder = nil; self.voiceMessageUUID = nil; } diff --git a/Signal/src/ViewControllers/MediaDetailViewController.m b/Signal/src/ViewControllers/MediaDetailViewController.m index af8370a62..c7603bb19 100644 --- a/Signal/src/ViewControllers/MediaDetailViewController.m +++ b/Signal/src/ViewControllers/MediaDetailViewController.m @@ -16,6 +16,7 @@ #import #import #import +#import #import #import @@ -46,7 +47,10 @@ NS_ASSUME_NONNULL_BEGIN #pragma mark - -@interface MediaDetailViewController () +@interface MediaDetailViewController () @property (nonatomic) UIScrollView *scrollView; @property (nonatomic) UIView *mediaView; @@ -64,7 +68,7 @@ NS_ASSUME_NONNULL_BEGIN @property (nonatomic) UIToolbar *footerBar; @property (nonatomic) BOOL areToolbarsHidden; -@property (nonatomic, nullable) AVPlayer *videoPlayer; +@property (nonatomic, nullable) OWSVideoPlayer *videoPlayer; @property (nonatomic, nullable) UIButton *playVideoButton; @property (nonatomic, nullable) PlayerProgressBar *videoProgressBar; @property (nonatomic, nullable) UIBarButtonItem *videoPlayBarButton; @@ -306,7 +310,7 @@ NS_ASSUME_NONNULL_BEGIN if (@available(iOS 9, *)) { PlayerProgressBar *videoProgressBar = [PlayerProgressBar new]; videoProgressBar.delegate = self; - videoProgressBar.player = self.videoPlayer; + videoProgressBar.player = self.videoPlayer.avPlayer; self.videoProgressBar = videoProgressBar; [self.view addSubview:videoProgressBar]; @@ -435,17 +439,13 @@ NS_ASSUME_NONNULL_BEGIN } if (@available(iOS 9.0, *)) { - AVPlayer *player = [[AVPlayer alloc] initWithURL:self.attachmentUrl]; + OWSVideoPlayer *player = [[OWSVideoPlayer alloc] initWithUrl:self.attachmentUrl]; [player seekToTime:kCMTimeZero]; + player.delegate = self; self.videoPlayer = player; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(playerItemDidPlayToCompletion:) - name:AVPlayerItemDidPlayToEndTimeNotification - object:player.currentItem]; - VideoPlayerView *playerView = [VideoPlayerView new]; - playerView.player = player; + playerView.player = player.avPlayer; [NSLayoutConstraint autoSetPriority:UILayoutPriorityDefaultLow forConstraints:^{ @@ -892,20 +892,12 @@ NS_ASSUME_NONNULL_BEGIN { if (@available(iOS 9, *)) { OWSAssert(self.videoPlayer); - AVPlayer *player = self.videoPlayer; [self updateFooterBarButtonItemsWithIsPlayingVideo:YES]; self.playVideoButton.hidden = YES; self.areToolbarsHidden = YES; - OWSAssert(player.currentItem); - AVPlayerItem *item = player.currentItem; - if (CMTIME_COMPARE_INLINE(item.currentTime, ==, item.duration)) { - // Rewind for repeated plays - [player seekToTime:kCMTimeZero]; - } - - [player play]; + [self.videoPlayer play]; } else { [self legacyPlayVideo]; return; @@ -921,7 +913,9 @@ NS_ASSUME_NONNULL_BEGIN [self.videoPlayer pause]; } -- (void)playerItemDidPlayToCompletion:(NSNotification *)notification +#pragma mark - OWSVideoPlayer + +- (void)videoPlayerDidPlayToCompletion:(OWSVideoPlayer *)videoPlayer { OWSAssert(self.isVideo); OWSAssert(self.videoPlayer); @@ -933,6 +927,8 @@ NS_ASSUME_NONNULL_BEGIN [self updateFooterBarButtonItemsWithIsPlayingVideo:NO]; } +#pragma mark - PlayerProgressBarDelegate + - (void)playerProgressBarDidStartScrubbing:(PlayerProgressBar *)playerProgressBar { OWSAssert(self.videoPlayer); diff --git a/Signal/src/ViewControllers/MessageDetailViewController.swift b/Signal/src/ViewControllers/MessageDetailViewController.swift index 68cced685..2b959393b 100644 --- a/Signal/src/ViewControllers/MessageDetailViewController.swift +++ b/Signal/src/ViewControllers/MessageDetailViewController.swift @@ -88,8 +88,6 @@ class MessageDetailViewController: OWSViewController, UIScrollViewDelegate, Medi override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) - mediaMessageView?.viewWillAppear(animated) - updateTextLayout() if mode == .focusOnMetadata { @@ -118,12 +116,6 @@ class MessageDetailViewController: OWSViewController, UIScrollViewDelegate, Medi } } - override func viewWillDisappear(_ animated: Bool) { - super.viewWillDisappear(animated) - - mediaMessageView?.viewWillDisappear(animated) - } - // MARK: - Create Views private func createViews() { diff --git a/Signal/src/call/CallAudioService.swift b/Signal/src/call/CallAudioService.swift index 574a910f8..6617bbe24 100644 --- a/Signal/src/call/CallAudioService.swift +++ b/Signal/src/call/CallAudioService.swift @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Open Whisper Systems. All rights reserved. +// Copyright (c) 2018 Open Whisper Systems. All rights reserved. // import Foundation @@ -7,8 +7,6 @@ import AVFoundation import SignalServiceKit import SignalMessaging -public let CallAudioServiceSessionChanged = Notification.Name("CallAudioServiceSessionChanged") - struct AudioSource: Hashable { let image: UIImage @@ -88,11 +86,21 @@ struct AudioSource: Hashable { } } +protocol CallAudioServiceDelegate: class { + func callAudioService(_ callAudioService: CallAudioService, didUpdateIsSpeakerphoneEnabled isEnabled: Bool) + func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService) +} + @objc class CallAudioService: NSObject, CallObserver { private var vibrateTimer: Timer? private let audioPlayer = AVAudioPlayer() private let handleRinging: Bool + weak var delegate: CallAudioServiceDelegate? { + willSet { + assert(newValue == nil || delegate == nil) + } + } class Sound: NSObject { @@ -137,8 +145,11 @@ struct AudioSource: Hashable { // `pulseDuration` is the small pause between the two vibrations in the pair. private let pulseDuration = 0.2 - var audioSession: CallAudioSession { - return CallAudioSession.shared + var audioSession: OWSAudioSession { + return OWSAudioSession.shared + } + var avAudioSession: AVAudioSession { + return AVAudioSession.sharedInstance() } // MARK: - Initializers @@ -151,7 +162,12 @@ struct AudioSource: Hashable { SwiftSingletons.register(self) // Configure audio session so we don't prompt user with Record permission until call is connected. - audioSession.configure() + + audioSession.configureRTCAudio() + NotificationCenter.default.addObserver(forName: .AVAudioSessionRouteChange, object: avAudioSession, queue: nil) { _ in + assert(!Thread.isMainThread) + self.updateIsSpeakerphoneEnabled() + } } // MARK: - CallObserver @@ -177,6 +193,12 @@ struct AudioSource: Hashable { AssertIsOnMainThread() ensureProperAudioSession(call: call) + + if let audioSource = audioSource, audioSource.isBuiltInSpeaker { + self.isSpeakerphoneEnabled = true + } else { + self.isSpeakerphoneEnabled = false + } } internal func hasLocalVideoDidChange(call: SignalCall, hasLocalVideo: Bool) { @@ -185,11 +207,43 @@ struct AudioSource: Hashable { ensureProperAudioSession(call: call) } + // Speakerphone can be manipulated by the in-app callscreen or via the system callscreen (CallKit). + // Unlike other CallKit CallScreen buttons, enabling doesn't trigger a CXAction, so it's not as simple + // to track state changes. Instead we never store the state and directly access the ground-truth in the + // AVAudioSession. + private(set) var isSpeakerphoneEnabled: Bool = false { + didSet { + self.delegate?.callAudioService(self, didUpdateIsSpeakerphoneEnabled: isSpeakerphoneEnabled) + } + } + + public func requestSpeakerphone(isEnabled: Bool) { + // This is a little too slow to execute on the main thread and the results are not immediately available after execution + // anyway, so we dispatch async. If you need to know the new value, you'll need to check isSpeakerphoneEnabled and take + // advantage of the CallAudioServiceDelegate.callAudioService(_:didUpdateIsSpeakerphoneEnabled:) + DispatchQueue.global().async { + do { + try self.avAudioSession.overrideOutputAudioPort( isEnabled ? .speaker : .none ) + } catch { + owsFail("\(self.logTag) failed to set \(#function) = \(isEnabled) with error: \(error)") + } + } + } + + private func updateIsSpeakerphoneEnabled() { + let value = avAudioSession.currentRoute.outputs.contains { (portDescription: AVAudioSessionPortDescription) -> Bool in + return portDescription.portName == AVAudioSessionPortBuiltInSpeaker + } + DispatchQueue.main.async { + self.isSpeakerphoneEnabled = value + } + } + private func ensureProperAudioSession(call: SignalCall?) { AssertIsOnMainThread() guard let call = call else { - setAudioSession(category: AVAudioSessionCategoryPlayback, + setAudioSession(category: AVAudioSessionCategorySoloAmbient, mode: AVAudioSessionModeDefault) return } @@ -206,7 +260,7 @@ struct AudioSource: Hashable { // SoloAmbient plays through speaker, but respects silent switch setAudioSession(category: AVAudioSessionCategorySoloAmbient, mode: AVAudioSessionModeDefault) - } else if call.state == .connected, call.hasLocalVideo { + } else if call.hasLocalVideo { // Because ModeVideoChat affects gain, we don't want to apply it until the call is connected. // otherwise sounds like ringing will be extra loud for video vs. speakerphone @@ -227,26 +281,15 @@ struct AudioSource: Hashable { options: options) } - let session = AVAudioSession.sharedInstance() do { // It's important to set preferred input *after* ensuring properAudioSession // because some sources are only valid for certain category/option combinations. - let existingPreferredInput = session.preferredInput + let existingPreferredInput = avAudioSession.preferredInput if existingPreferredInput != call.audioSource?.portDescription { Logger.info("\(self.logTag) changing preferred input: \(String(describing: existingPreferredInput)) -> \(String(describing: call.audioSource?.portDescription))") - try session.setPreferredInput(call.audioSource?.portDescription) + try avAudioSession.setPreferredInput(call.audioSource?.portDescription) } - if call.isSpeakerphoneEnabled || (call.hasLocalVideo && call.state != .connected) { - // We want consistent ringer-volume between speaker-phone and video chat. - // But because using VideoChat mode has noticeably higher output gain, we treat - // video chat like speakerphone mode until the call is connected. - Logger.verbose("\(self.logTag) enabling speakerphone overrideOutputAudioPort(.speaker)") - try session.overrideOutputAudioPort(.speaker) - } else { - Logger.verbose("\(self.logTag) disabling spearkerphone overrideOutputAudioPort(.none) ") - try session.overrideOutputAudioPort(.none) - } } catch { owsFail("\(self.logTag) failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)") } @@ -328,6 +371,7 @@ struct AudioSource: Hashable { AssertIsOnMainThread() play(sound: Sound.failure) + handleCallEnded(call: call) } private func handleLocalHangup(call: SignalCall) { @@ -363,7 +407,8 @@ struct AudioSource: Hashable { AssertIsOnMainThread() // Stop solo audio, revert to default. - setAudioSession(category: AVAudioSessionCategoryAmbient) + isSpeakerphoneEnabled = false + setAudioSession(category: AVAudioSessionCategorySoloAmbient) } // MARK: Playing Sounds @@ -439,9 +484,7 @@ struct AudioSource: Hashable { // Specifically if you call it while speakerphone is enabled you won't see // any connected bluetooth routes. var availableInputs: [AudioSource] { - let session = AVAudioSession.sharedInstance() - - guard let availableInputs = session.availableInputs else { + guard let availableInputs = avAudioSession.availableInputs else { // I'm not sure why this would happen, but it may indicate an error. // In practice, I haven't seen it on iOS9+. // @@ -468,8 +511,7 @@ struct AudioSource: Hashable { // system state to determine the current audio source. // If a bluetooth is connected, this will be bluetooth, otherwise // this will be the receiver. - let session = AVAudioSession.sharedInstance() - guard let portDescription = session.currentRoute.inputs.first else { + guard let portDescription = avAudioSession.currentRoute.inputs.first else { return nil } @@ -482,13 +524,12 @@ struct AudioSource: Hashable { AssertIsOnMainThread() - let session = AVAudioSession.sharedInstance() var audioSessionChanged = false do { if #available(iOS 10.0, *), let mode = mode { - let oldCategory = session.category - let oldMode = session.mode - let oldOptions = session.categoryOptions + let oldCategory = avAudioSession.category + let oldMode = avAudioSession.mode + let oldOptions = avAudioSession.categoryOptions guard oldCategory != category || oldMode != mode || oldOptions != options else { return @@ -505,13 +546,13 @@ struct AudioSource: Hashable { if oldOptions != options { Logger.debug("\(self.logTag) audio session changed options: \(oldOptions) -> \(options) ") } - try session.setCategory(category, mode: mode, options: options) + try avAudioSession.setCategory(category, mode: mode, options: options) } else { - let oldCategory = session.category - let oldOptions = session.categoryOptions + let oldCategory = avAudioSession.category + let oldOptions = avAudioSession.categoryOptions - guard session.category != category || session.categoryOptions != options else { + guard avAudioSession.category != category || avAudioSession.categoryOptions != options else { return } @@ -523,7 +564,7 @@ struct AudioSource: Hashable { if oldOptions != options { Logger.debug("\(self.logTag) audio session changed options: \(oldOptions) -> \(options) ") } - try session.setCategory(category, with: options) + try avAudioSession.setCategory(category, with: options) } } catch { @@ -533,8 +574,7 @@ struct AudioSource: Hashable { if audioSessionChanged { Logger.info("\(self.logTag) in \(#function)") - // Update call view synchronously; already on main thread. - NotificationCenter.default.post(name:CallAudioServiceSessionChanged, object: nil) + self.delegate?.callAudioServiceDidChangeAudioSession(self) } } } diff --git a/Signal/src/call/CallAudioSession.swift b/Signal/src/call/CallAudioSession.swift deleted file mode 100644 index 4d5d9c9bb..000000000 --- a/Signal/src/call/CallAudioSession.swift +++ /dev/null @@ -1,51 +0,0 @@ -// -// Copyright (c) 2017 Open Whisper Systems. All rights reserved. -// - -import Foundation -import WebRTC - -/** - * By default WebRTC starts the audio session (PlayAndRecord) immediately upon creating the peer connection - * but we want to create the peer connection and set up all the signaling channels before we prompt the user - * for an incoming call. Without manually handling the session, this would result in the user seeing a recording - * permission requested (and recording banner) before they even know they have an incoming call. - * - * By using the `useManualAudio` and `isAudioEnabled` attributes of the RTCAudioSession we can delay recording until - * it makes sense. - */ -class CallAudioSession { - - let TAG = "[CallAudioSession]" - - // Force singleton access - static let shared = CallAudioSession() - private init() {} - - /** - * The private class that manages AVAudioSession for WebRTC - */ - private let rtcAudioSession = RTCAudioSession.sharedInstance() - - /** - * This must be called before any audio tracks are added to the peerConnection, else we'll start recording before all - * our signaling is set up. - */ - func configure() { - Logger.info("\(TAG) in \(#function)") - rtcAudioSession.useManualAudio = true - } - - /** - * Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves. - * See header for details on manual audio. - */ - var isRTCAudioEnabled: Bool { - get { - return rtcAudioSession.isAudioEnabled - } - set { - rtcAudioSession.isAudioEnabled = newValue - } - } -} diff --git a/Signal/src/call/CallService.swift b/Signal/src/call/CallService.swift index 14295a942..fb11017cc 100644 --- a/Signal/src/call/CallService.swift +++ b/Signal/src/call/CallService.swift @@ -1024,6 +1024,9 @@ protocol CallServiceObserver: class { // this.messageSender.cancelInFlightRequests(); if let peerConnectionClient = self.peerConnectionClient { + // Stop audio capture ASAP + ensureAudioState(call: call, peerConnectionClient: peerConnectionClient) + // If the call is connected, we can send the hangup via the data channel for faster hangup. let message = DataChannelMessage.forHangup(callId: call.signalingId) peerConnectionClient.sendDataChannelMessage(data: message.asData(), description: "hangup", isCritical: true) diff --git a/Signal/src/call/NonCallKitCallUIAdaptee.swift b/Signal/src/call/NonCallKitCallUIAdaptee.swift index 18b995a4f..b7848d611 100644 --- a/Signal/src/call/NonCallKitCallUIAdaptee.swift +++ b/Signal/src/call/NonCallKitCallUIAdaptee.swift @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Open Whisper Systems. All rights reserved. +// Copyright (c) 2018 Open Whisper Systems. All rights reserved. // import Foundation @@ -89,7 +89,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee { return } - CallAudioSession.shared.isRTCAudioEnabled = true + OWSAudioSession.shared.isRTCAudioEnabled = true self.callService.handleAnswerCall(call) } @@ -123,7 +123,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee { func recipientAcceptedCall(_ call: SignalCall) { AssertIsOnMainThread() - CallAudioSession.shared.isRTCAudioEnabled = true + OWSAudioSession.shared.isRTCAudioEnabled = true } func localHangupCall(_ call: SignalCall) { diff --git a/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift b/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift index f44d0af58..9226070ef 100644 --- a/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift +++ b/Signal/src/call/Speakerbox/CallKitCallUIAdaptee.swift @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Open Whisper Systems. All rights reserved. +// Copyright (c) 2018 Open Whisper Systems. All rights reserved. // import Foundation @@ -345,14 +345,14 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate { Logger.debug("\(TAG) Received \(#function)") - CallAudioSession.shared.isRTCAudioEnabled = true + OWSAudioSession.shared.isRTCAudioEnabled = true } func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) { AssertIsOnMainThread() Logger.debug("\(TAG) Received \(#function)") - CallAudioSession.shared.isRTCAudioEnabled = false + OWSAudioSession.shared.isRTCAudioEnabled = false } // MARK: - Util diff --git a/SignalMessaging/attachments/AttachmentApprovalViewController.swift b/SignalMessaging/attachments/AttachmentApprovalViewController.swift index aeed76185..32f4b515d 100644 --- a/SignalMessaging/attachments/AttachmentApprovalViewController.swift +++ b/SignalMessaging/attachments/AttachmentApprovalViewController.swift @@ -13,7 +13,7 @@ public protocol AttachmentApprovalViewControllerDelegate: class { } @objc -public class AttachmentApprovalViewController: OWSViewController, CaptioningToolbarDelegate, PlayerProgressBarDelegate { +public class AttachmentApprovalViewController: OWSViewController, CaptioningToolbarDelegate, PlayerProgressBarDelegate, OWSVideoPlayerDelegate { let TAG = "[AttachmentApprovalViewController]" weak var delegate: AttachmentApprovalViewControllerDelegate? @@ -27,7 +27,7 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool // MARK: Properties let attachment: SignalAttachment - private var videoPlayer: AVPlayer? + private var videoPlayer: OWSVideoPlayer? private(set) var bottomToolbar: UIView! private(set) var mediaMessageView: MediaMessageView! @@ -79,8 +79,6 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool super.viewWillAppear(animated) CurrentAppContext().setStatusBarHidden(true, animated: animated) - - mediaMessageView.viewWillAppear(animated) } override public func viewDidAppear(_ animated: Bool) { @@ -92,8 +90,6 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool Logger.debug("\(logTag) in \(#function)") super.viewWillDisappear(animated) - mediaMessageView.viewWillDisappear(animated) - // Since this VC is being dismissed, the "show status bar" animation would feel like // it's occuring on the presenting view controller - it's better not to animate at all. CurrentAppContext().setStatusBarHidden(false, animated: false) @@ -182,16 +178,12 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool return } - let player = AVPlayer(url: videoURL) + let player = OWSVideoPlayer(url: videoURL) self.videoPlayer = player - - NotificationCenter.default.addObserver(self, - selector: #selector(playerItemDidPlayToCompletion(_:)), - name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, - object: player.currentItem) + player.delegate = self let playerView = VideoPlayerView() - playerView.player = player + playerView.player = player.avPlayer self.mediaMessageView.addSubview(playerView) playerView.autoPinEdgesToSuperviewEdges() @@ -199,7 +191,7 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool playerView.addGestureRecognizer(pauseGesture) let progressBar = PlayerProgressBar() - progressBar.player = player + progressBar.player = player.avPlayer progressBar.delegate = self // we don't want the progress bar to zoom during "pinch-to-zoom" @@ -300,17 +292,6 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool UIView.animate(withDuration: 0.1) { playVideoButton.alpha = 0.0 } - - guard let item = videoPlayer.currentItem else { - owsFail("\(TAG) video player item was unexpectedly nil") - return - } - - if item.currentTime() == item.duration { - // Rewind for repeated plays, but only if it previously played to end. - videoPlayer.seek(to: kCMTimeZero) - } - videoPlayer.play() } else { self.playLegacyVideo() @@ -353,11 +334,12 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool } @objc - private func playerItemDidPlayToCompletion(_ notification: Notification) { + public func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer) { guard let playVideoButton = self.playVideoButton else { owsFail("\(TAG) playVideoButton was unexpectedly nil") return } + UIView.animate(withDuration: 0.1) { playVideoButton.alpha = 1.0 } diff --git a/SignalMessaging/attachments/MediaMessageView.swift b/SignalMessaging/attachments/MediaMessageView.swift index c7750252e..5362fb34f 100644 --- a/SignalMessaging/attachments/MediaMessageView.swift +++ b/SignalMessaging/attachments/MediaMessageView.swift @@ -87,18 +87,6 @@ public class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate { NotificationCenter.default.removeObserver(self) } - // MARK: View Lifecycle - - @objc - public func viewWillAppear(_ animated: Bool) { - OWSAudioAttachmentPlayer.setAudioIgnoresHardwareMuteSwitch(true) - } - - @objc - public func viewWillDisappear(_ animated: Bool) { - OWSAudioAttachmentPlayer.setAudioIgnoresHardwareMuteSwitch(false) - } - // MARK: - Create Views private func createViews() { diff --git a/SignalMessaging/attachments/OWSAudioAttachmentPlayer.h b/SignalMessaging/attachments/OWSAudioAttachmentPlayer.h index ab5d7ed17..7d8a4a6c0 100644 --- a/SignalMessaging/attachments/OWSAudioAttachmentPlayer.h +++ b/SignalMessaging/attachments/OWSAudioAttachmentPlayer.h @@ -1,5 +1,5 @@ // -// Copyright (c) 2017 Open Whisper Systems. All rights reserved. +// Copyright (c) 2018 Open Whisper Systems. All rights reserved. // NS_ASSUME_NONNULL_BEGIN @@ -25,8 +25,6 @@ typedef NS_ENUM(NSInteger, AudioPlaybackState) { @interface OWSAudioAttachmentPlayer : NSObject -+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore; - @property (nonatomic, readonly, weak) id delegate; // This property can be used to associate instances of the player with view diff --git a/SignalMessaging/attachments/OWSAudioAttachmentPlayer.m b/SignalMessaging/attachments/OWSAudioAttachmentPlayer.m index fc8990ca0..1e440fa08 100644 --- a/SignalMessaging/attachments/OWSAudioAttachmentPlayer.m +++ b/SignalMessaging/attachments/OWSAudioAttachmentPlayer.m @@ -22,17 +22,6 @@ NS_ASSUME_NONNULL_BEGIN @implementation OWSAudioAttachmentPlayer -+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore -{ - NSError *error = nil; - BOOL success = [[AVAudioSession sharedInstance] - setCategory:(shouldIgnore ? AVAudioSessionCategoryPlayback : AVAudioSessionCategoryPlayAndRecord)error:&error]; - OWSAssert(!error); - if (!success || error) { - DDLogError(@"%@ Error in setAudioIgnoresHardwareMuteSwitch: %d", self.logTag, shouldIgnore); - } -} - - (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id)delegate { self = [super init]; @@ -76,7 +65,7 @@ NS_ASSUME_NONNULL_BEGIN OWSAssert(self.mediaUrl); OWSAssert([self.delegate audioPlaybackState] != AudioPlaybackState_Playing); - [[self class] setAudioIgnoresHardwareMuteSwitch:YES]; + [OWSAudioSession.shared setPlaybackCategory]; [self.audioPlayerPoller invalidate]; @@ -101,7 +90,6 @@ NS_ASSUME_NONNULL_BEGIN self.audioPlayer.delegate = self; } - [self.audioPlayer prepareToPlay]; [self.audioPlayer play]; [self.audioPlayerPoller invalidate]; self.audioPlayerPoller = [NSTimer weakScheduledTimerWithTimeInterval:.05f @@ -123,6 +111,7 @@ NS_ASSUME_NONNULL_BEGIN [self.audioPlayerPoller invalidate]; [self.delegate setAudioProgress:[self.audioPlayer currentTime] duration:[self.audioPlayer duration]]; + [OWSAudioSession.shared endAudioActivity]; [DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self]; } @@ -135,6 +124,7 @@ NS_ASSUME_NONNULL_BEGIN [self.audioPlayerPoller invalidate]; [self.delegate setAudioProgress:0 duration:0]; + [OWSAudioSession.shared endAudioActivity]; [DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self]; } diff --git a/SignalMessaging/attachments/OWSVideoPlayer.swift b/SignalMessaging/attachments/OWSVideoPlayer.swift new file mode 100644 index 000000000..4134fb5df --- /dev/null +++ b/SignalMessaging/attachments/OWSVideoPlayer.swift @@ -0,0 +1,72 @@ +// +// Copyright (c) 2018 Open Whisper Systems. All rights reserved. +// + +import Foundation +import AVFoundation + +@objc +protocol OWSVideoPlayerDelegate: class { + @available(iOSApplicationExtension 9.0, *) + func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer) +} + +@objc +public class OWSVideoPlayer: NSObject { + + let avPlayer: AVPlayer + + weak var delegate: OWSVideoPlayerDelegate? + + @available(iOS 9.0, *) + init(url: URL) { + self.avPlayer = AVPlayer(url: url) + + super.init() + + NotificationCenter.default.addObserver(self, + selector: #selector(playerItemDidPlayToCompletion(_:)), + name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, + object: avPlayer.currentItem) + } + + // MARK: Playback Controls + + @available(iOS 9.0, *) + public func pause() { + avPlayer.pause() + OWSAudioSession.shared.endAudioActivity() + } + + @available(iOS 9.0, *) + public func play() { + OWSAudioSession.shared.setPlaybackCategory() + + guard let item = avPlayer.currentItem else { + owsFail("\(logTag) video player item was unexpectedly nil") + return + } + + if item.currentTime() == item.duration { + // Rewind for repeated plays, but only if it previously played to end. + avPlayer.seek(to: kCMTimeZero) + } + + avPlayer.play() + } + + @available(iOS 9.0, *) + @objc(seekToTime:) + public func seek(to time: CMTime) { + avPlayer.seek(to: time) + } + + // MARK: private + + @objc + @available(iOS 9.0, *) + private func playerItemDidPlayToCompletion(_ notification: Notification) { + self.delegate?.videoPlayerDidPlayToCompletion(self) + OWSAudioSession.shared.endAudioActivity() + } +} diff --git a/SignalMessaging/contacts/ViewControllerUtils.h b/SignalMessaging/contacts/ViewControllerUtils.h index 1be8a4dd6..630b4041d 100644 --- a/SignalMessaging/contacts/ViewControllerUtils.h +++ b/SignalMessaging/contacts/ViewControllerUtils.h @@ -18,8 +18,6 @@ NS_ASSUME_NONNULL_BEGIN replacementString:(NSString *)insertionText countryCode:(NSString *)countryCode; -+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore; - + (NSString *)examplePhoneNumberForCountryCode:(NSString *)countryCode callingCode:(NSString *)callingCode; @end diff --git a/SignalMessaging/contacts/ViewControllerUtils.m b/SignalMessaging/contacts/ViewControllerUtils.m index 5d4d81927..482b593e3 100644 --- a/SignalMessaging/contacts/ViewControllerUtils.m +++ b/SignalMessaging/contacts/ViewControllerUtils.m @@ -74,17 +74,6 @@ NS_ASSUME_NONNULL_BEGIN [textField setSelectedTextRange:[textField textRangeFromPosition:pos toPosition:pos]]; } -+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore -{ - NSError *error = nil; - BOOL success = [[AVAudioSession sharedInstance] - setCategory:(shouldIgnore ? AVAudioSessionCategoryPlayback : AVAudioSessionCategoryPlayAndRecord)error:&error]; - OWSAssert(!error); - if (!success || error) { - DDLogError(@"%@ Error in setAudioIgnoresHardwareMuteSwitch: %d", self.logTag, shouldIgnore); - } -} - + (NSString *)examplePhoneNumberForCountryCode:(NSString *)countryCode callingCode:(NSString *)callingCode { OWSAssert(countryCode.length > 0); diff --git a/SignalMessaging/environment/OWSAudioSession.swift b/SignalMessaging/environment/OWSAudioSession.swift new file mode 100644 index 000000000..685b9243c --- /dev/null +++ b/SignalMessaging/environment/OWSAudioSession.swift @@ -0,0 +1,101 @@ +// +// Copyright (c) 2018 Open Whisper Systems. All rights reserved. +// + +import Foundation +import WebRTC + +@objc +public class OWSAudioSession: NSObject { + + // Force singleton access + public static let shared = OWSAudioSession() + private override init() {} + private let avAudioSession = AVAudioSession.sharedInstance() + + // Ignores hardware mute switch, plays through external speaker + public func setPlaybackCategory() { + Logger.debug("\(logTag) in \(#function)") + + // In general, we should have put the audio session back to it's default + // category when we were done with whatever activity required it to be modified + assert(avAudioSession.category == AVAudioSessionCategorySoloAmbient) + + do { + try avAudioSession.setCategory(AVAudioSessionCategoryPlayback) + } catch { + owsFail("\(logTag) in \(#function) failed with error: \(error)") + } + } + + public func setRecordCategory() -> Bool { + Logger.debug("\(logTag) in \(#function)") + + // In general, we should have put the audio session back to it's default + // category when we were done with whatever activity required it to be modified + assert(avAudioSession.category == AVAudioSessionCategorySoloAmbient) + + assert(avAudioSession.recordPermission() == .granted) + + do { + try avAudioSession.setCategory(AVAudioSessionCategoryRecord) + return true + } catch { + owsFail("\(logTag) in \(#function) failed with error: \(error)") + return false + } + } + + public func endAudioActivity() { + Logger.debug("\(logTag) in \(#function)") + + do { + try avAudioSession.setCategory(AVAudioSessionCategorySoloAmbient) + + // When playing audio in Signal, other apps audio (e.g. Music) is paused. + // By notifying when we deactivate, the other app can resume playback. + try avAudioSession.setActive(false, with: [.notifyOthersOnDeactivation]) + } catch { + owsFail("\(logTag) in \(#function) failed with error: \(error)") + } + } + + // MARK: - WebRTC Audio + + /** + * By default WebRTC starts the audio session (PlayAndRecord) immediately upon creating the peer connection + * but we want to create the peer connection and set up all the signaling channels before we prompt the user + * for an incoming call. Without manually handling the session, this would result in the user seeing a recording + * permission requested (and recording banner) before they even know they have an incoming call. + * + * By using the `useManualAudio` and `isAudioEnabled` attributes of the RTCAudioSession we can delay recording until + * it makes sense. + */ + + /** + * The private class that manages AVAudioSession for WebRTC + */ + private let rtcAudioSession = RTCAudioSession.sharedInstance() + + /** + * This must be called before any audio tracks are added to the peerConnection, else we'll start recording before all + * our signaling is set up. + */ + public func configureRTCAudio() { + Logger.info("\(logTag) in \(#function)") + rtcAudioSession.useManualAudio = true + } + + /** + * Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves. + * See header for details on manual audio. + */ + public var isRTCAudioEnabled: Bool { + get { + return rtcAudioSession.isAudioEnabled + } + set { + rtcAudioSession.isAudioEnabled = newValue + } + } +}