Fix volume burst when call connects

By centralizing AudioSession management onto the AudioService, we can
avoid enabling the RTCAudioSession while we're mid-ring.

Also allows us to centralize and remove redundant audio session logic.

// FREEBIE
pull/1/head
Michael Kirk 8 years ago
parent 402d4157c8
commit c7642cc628

@ -936,6 +936,11 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
self.updateCallUI(callState: call.state)
}
func holdDidChange(call: SignalCall, isOnHold: Bool) {
AssertIsOnMainThread()
self.updateCallUI(callState: call.state)
}
internal func audioSourceDidChange(call: SignalCall, audioSource: AudioSource?) {
AssertIsOnMainThread()
self.updateCallUI(callState: call.state)

@ -137,10 +137,20 @@ struct AudioSource: Hashable {
// `pulseDuration` is the small pause between the two vibrations in the pair.
private let pulseDuration = 0.2
static private let sharedAudioSession = CallAudioSession()
var audioSession: CallAudioSession {
return type(of: self).sharedAudioSession
}
// MARK: - Initializers
init(handleRinging: Bool) {
self.handleRinging = handleRinging
super.init()
// Configure audio session so we don't prompt user with Record permission until call is connected.
audioSession.configure()
}
// MARK: - CallObserver
@ -152,7 +162,14 @@ struct AudioSource: Hashable {
internal func muteDidChange(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
Logger.verbose("\(TAG) in \(#function) is no-op")
ensureProperAudioSession(call: call)
}
internal func holdDidChange(call: SignalCall, isOnHold: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
internal func audioSourceDidChange(call: SignalCall, audioSource: AudioSource?) {
@ -219,7 +236,7 @@ struct AudioSource: Hashable {
try session.setPreferredInput(call.audioSource?.portDescription)
}
if call.isSpeakerphoneEnabled || (call.hasLocalVideo && call.state != .connected) {
if call.isSpeakerphoneEnabled || (call.hasLocalVideo && call.state != .connected) {
// We want consistent ringer-volume between speaker-phone and video chat.
// But because using VideoChat mode has noticeably higher output gain, we treat
// video chat like speakerphone mode until the call is connected.
@ -232,6 +249,12 @@ struct AudioSource: Hashable {
} catch {
owsFail("\(TAG) failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)")
}
if call.state == .connected, !call.isOnHold {
audioSession.isRTCAudioEnabled = true
} else {
audioSession.isRTCAudioEnabled = false
}
}
// MARK: - Service action handlers
@ -290,7 +313,6 @@ struct AudioSource: Hashable {
Logger.debug("\(TAG) \(#function)")
AssertIsOnMainThread()
// FIXME if you toggled speakerphone before this point, the outgoing ring does not play through speaker. Why?
self.play(sound: Sound.outgoingRing)
}

@ -1,4 +1,5 @@
// Copyright © 2017 Open Whisper Systems. All rights reserved.
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import Foundation
@ -21,7 +22,6 @@ class CallAudioSession {
*/
private let rtcAudioSession = RTCAudioSession.sharedInstance()
/**
* This must be called before any audio tracks are added to the peerConnection, else we'll start recording before all
* our signaling is set up.
@ -32,19 +32,15 @@ class CallAudioSession {
}
/**
* Because we useManualAudio with our RTCAudioSession, we have to start the recording audio session ourselves.
*/
func start() {
Logger.info("\(TAG) in \(#function)")
rtcAudioSession.isAudioEnabled = true
}
/**
* Because we useManualAudio with our RTCAudioSession, we have to stop the recording audio session ourselves.
* Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves.
* Else, we start recording before the next call is ringing.
*/
func stop() {
Logger.info("\(TAG) in \(#function)")
rtcAudioSession.isAudioEnabled = false
var isRTCAudioEnabled: Bool {
get {
return rtcAudioSession.isAudioEnabled
}
set {
rtcAudioSession.isAudioEnabled = newValue
}
}
}

@ -1441,7 +1441,6 @@ protocol CallServiceObserver: class {
self.remoteVideoTrack = nil
self.isRemoteVideoEnabled = false
PeerConnectionClient.stopAudioSession()
self.peerConnectionClient?.terminate()
Logger.debug("\(TAG) setting peerConnectionClient in \(#function)")
self.peerConnectionClient = nil
@ -1495,6 +1494,11 @@ protocol CallServiceObserver: class {
// Do nothing
}
internal func holdDidChange(call: SignalCall, isOnHold: Bool) {
AssertIsOnMainThread()
// Do nothing
}
internal func audioSourceDidChange(call: SignalCall, audioSource: AudioSource?) {
AssertIsOnMainThread()
// Do nothing

@ -81,7 +81,6 @@ class NonCallKitCallUIAdaptee: CallUIAdaptee {
return
}
PeerConnectionClient.startAudioSession()
self.callService.handleAnswerCall(call)
}
@ -114,8 +113,7 @@ class NonCallKitCallUIAdaptee: CallUIAdaptee {
func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread()
PeerConnectionClient.startAudioSession()
// no-op
}
func localHangupCall(_ call: SignalCall) {

@ -104,7 +104,6 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
private var audioSender: RTCRtpSender?
private var audioTrack: RTCAudioTrack?
private var audioConstraints: RTCMediaConstraints
static private let sharedAudioSession = CallAudioSession()
// Video
@ -146,8 +145,6 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
super.init()
// Configure audio session so we don't prompt user with Record permission until call is connected.
type(of: self).configureAudioSession()
peerConnection = factory.peerConnection(with: configuration,
constraints: connectionConstraints,
delegate: self)
@ -703,20 +700,6 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
}
}
// Mark: Audio Session
class func configureAudioSession() {
sharedAudioSession.configure()
}
class func startAudioSession() {
sharedAudioSession.start()
}
class func stopAudioSession() {
sharedAudioSession.stop()
}
// MARK: Helpers
/**

@ -26,6 +26,7 @@ protocol CallObserver: class {
func stateDidChange(call: SignalCall, state: CallState)
func hasLocalVideoDidChange(call: SignalCall, hasLocalVideo: Bool)
func muteDidChange(call: SignalCall, isMuted: Bool)
func holdDidChange(call: SignalCall, isOnHold: Bool)
func audioSourceDidChange(call: SignalCall, audioSource: AudioSource?)
}
@ -123,7 +124,16 @@ protocol CallObserver: class {
return audioSource.isBuiltInSpeaker
}
var isOnHold = false
var isOnHold = false {
didSet {
AssertIsOnMainThread()
Logger.debug("\(TAG) isOnHold changed: \(oldValue) -> \(self.isOnHold)")
for observer in observers {
observer.value?.holdDidChange(call: self, isOnHold: isOnHold)
}
}
}
var connectedDate: NSDate?

@ -217,9 +217,6 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
AssertIsOnMainThread()
Logger.info("\(self.TAG) \(#function)")
// Stop any in-progress WebRTC related audio.
PeerConnectionClient.stopAudioSession()
// End any ongoing calls if the provider resets, and remove them from the app's list of calls,
// since they are no longer valid.
callService.handleFailedCurrentCall(error: .providerReset)
@ -302,16 +299,6 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
// Update the SignalCall's underlying hold state.
call.isOnHold = action.isOnHold
// Stop or start audio in response to holding or unholding the call.
if call.isOnHold {
// stopAudio() <-- SpeakerBox
PeerConnectionClient.stopAudioSession()
} else {
// startAudio() <-- SpeakerBox
// This is redundant with what happens in `provider(_:didActivate:)`
//PeerConnectionClient.startAudioSession()
}
// Signal to the system that the action has been successfully performed.
action.fulfill()
}
@ -355,8 +342,8 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
Logger.debug("\(TAG) Received \(#function)")
// Start recording
PeerConnectionClient.startAudioSession()
// Audio Session is managed by CallAudioService, which observes changes on the
// SignalCall directly.
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
@ -364,10 +351,8 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
Logger.debug("\(TAG) Received \(#function)")
/*
Restart any non-call related audio now that the app's audio session has been
de-activated after having its priority restored to normal.
*/
// Audio Session is managed by CallAudioService, which observes changes on the
// SignalCall directly.
}
// MARK: - Util

Loading…
Cancel
Save