Activate audio at the proper time

// FREEBIE
pull/1/head
Michael Kirk 8 years ago
parent 81f097c1f5
commit 30b50e1489

@ -137,9 +137,8 @@ struct AudioSource: Hashable {
// `pulseDuration` is the small pause between the two vibrations in the pair. // `pulseDuration` is the small pause between the two vibrations in the pair.
private let pulseDuration = 0.2 private let pulseDuration = 0.2
static private let sharedAudioSession = CallAudioSession()
var audioSession: CallAudioSession { var audioSession: CallAudioSession {
return type(of: self).sharedAudioSession return CallAudioSession.shared
} }
// MARK: - Initializers // MARK: - Initializers
@ -249,12 +248,6 @@ struct AudioSource: Hashable {
} catch { } catch {
owsFail("\(TAG) failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)") owsFail("\(TAG) failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)")
} }
if call.state == .connected, !call.isOnHold {
audioSession.isRTCAudioEnabled = true
} else {
audioSession.isRTCAudioEnabled = false
}
} }
// MARK: - Service action handlers // MARK: - Service action handlers

@ -17,6 +17,11 @@ import WebRTC
class CallAudioSession { class CallAudioSession {
let TAG = "[CallAudioSession]" let TAG = "[CallAudioSession]"
// Force singleton access
static let shared = CallAudioSession()
private init() {}
/** /**
* The private class that manages AVAudioSession for WebRTC * The private class that manages AVAudioSession for WebRTC
*/ */
@ -33,7 +38,7 @@ class CallAudioSession {
/** /**
* Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves. * Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves.
* Else, we start recording before the next call is ringing. * See header for details on manual audio.
*/ */
var isRTCAudioEnabled: Bool { var isRTCAudioEnabled: Bool {
get { get {

@ -81,6 +81,7 @@ class NonCallKitCallUIAdaptee: CallUIAdaptee {
return return
} }
CallAudioSession.shared.isRTCAudioEnabled = true
self.callService.handleAnswerCall(call) self.callService.handleAnswerCall(call)
} }
@ -113,7 +114,8 @@ class NonCallKitCallUIAdaptee: CallUIAdaptee {
func recipientAcceptedCall(_ call: SignalCall) { func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread() AssertIsOnMainThread()
// no-op
CallAudioSession.shared.isRTCAudioEnabled = true
} }
func localHangupCall(_ call: SignalCall) { func localHangupCall(_ call: SignalCall) {

@ -341,17 +341,14 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
Logger.debug("\(TAG) Received \(#function)") Logger.debug("\(TAG) Received \(#function)")
// Audio Session is managed by CallAudioService, which observes changes on the CallAudioSession.shared.isRTCAudioEnabled = true
// SignalCall directly.
} }
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) { func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
AssertIsOnMainThread() AssertIsOnMainThread()
Logger.debug("\(TAG) Received \(#function)") Logger.debug("\(TAG) Received \(#function)")
CallAudioSession.shared.isRTCAudioEnabled = false
// Audio Session is managed by CallAudioService, which observes changes on the
// SignalCall directly.
} }
// MARK: - Util // MARK: - Util

Loading…
Cancel
Save