hoist audio session singleton to Environment

pull/1/head
Michael Kirk 6 years ago
parent 3d022adf4e
commit 3b4188f34b

@ -1107,6 +1107,7 @@ static NSTimeInterval launchStartedAt;
// If there were any messages in our local queue which we hadn't yet processed.
[SSKEnvironment.shared.messageReceiver handleAnyUnprocessedEnvelopesAsync];
[SSKEnvironment.shared.batchMessageProcessor handleAnyUnprocessedEnvelopesAsync];
[Environment.shared.audioSession setup];
if (!Environment.shared.preferences.hasGeneratedThumbnails) {
[self.primaryStorage.newDatabaseConnection

@ -289,6 +289,15 @@ typedef enum : NSUInteger {
_recordVoiceNoteAudioActivity = [AudioActivity recordActivityWithAudioDescription:audioActivityDescription];
}
#pragma mark - Dependencies
- (OWSAudioSession *)audioSession
{
return Environment.shared.audioSession;
}
#pragma mark
- (void)addNotificationListeners
{
[[NSNotificationCenter defaultCenter] addObserver:self
@ -3614,7 +3623,7 @@ typedef enum : NSUInteger {
NSURL *fileURL = [NSURL fileURLWithPath:filepath];
// Setup audio session
BOOL configuredAudio = [OWSAudioSession.shared startAudioActivity:self.recordVoiceNoteAudioActivity];
BOOL configuredAudio = [self.audioSession startAudioActivity:self.recordVoiceNoteAudioActivity];
if (!configuredAudio) {
OWSFailDebug(@"Couldn't configure audio session");
[self cancelVoiceMemo];
@ -3715,7 +3724,7 @@ typedef enum : NSUInteger {
- (void)stopRecording
{
[self.audioRecorder stop];
[OWSAudioSession.shared endAudioActivity:self.recordVoiceNoteAudioActivity];
[self.audioSession endAudioActivity:self.recordVoiceNoteAudioActivity];
}
- (void)cancelRecordingVoiceMemo

@ -110,8 +110,9 @@ protocol CallAudioServiceDelegate: class {
private let pulseDuration = 0.2
var audioSession: OWSAudioSession {
return OWSAudioSession.shared
return Environment.shared.audioSession
}
var avAudioSession: AVAudioSession {
return AVAudioSession.sharedInstance()
}

@ -26,13 +26,22 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
super.init()
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK:
func startOutgoingCall(handle: String) -> SignalCall {
AssertIsOnMainThread()
let call = SignalCall.outgoingCall(localId: UUID(), remotePhoneNumber: handle)
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
let success = self.audioSession.startAudioActivity(call.audioActivity)
assert(success)
self.callService.handleOutgoingCall(call).retainUntilComplete()
@ -84,7 +93,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
return
}
OWSAudioSession.shared.isRTCAudioEnabled = true
self.audioSession.isRTCAudioEnabled = true
self.callService.handleAnswerCall(call)
}
@ -118,7 +127,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread()
OWSAudioSession.shared.isRTCAudioEnabled = true
self.audioSession.isRTCAudioEnabled = true
}
func localHangupCall(_ call: SignalCall) {

@ -98,6 +98,12 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
self.provider.setDelegate(self, queue: nil)
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK: CallUIAdaptee
func startOutgoingCall(handle: String) -> SignalCall {
@ -107,7 +113,7 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
let call = SignalCall.outgoingCall(localId: UUID(), remotePhoneNumber: handle)
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
self.audioSession.startAudioActivity(call.audioActivity)
// Add the new outgoing call to the app's list of calls.
// So we can find it in the provider delegate callbacks.
@ -379,16 +385,16 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
Logger.debug("Received")
OWSAudioSession.shared.startAudioActivity(self.audioActivity)
OWSAudioSession.shared.isRTCAudioEnabled = true
self.audioSession.startAudioActivity(self.audioActivity)
self.audioSession.isRTCAudioEnabled = true
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
AssertIsOnMainThread()
Logger.debug("Received")
OWSAudioSession.shared.isRTCAudioEnabled = false
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
self.audioSession.isRTCAudioEnabled = false
self.audioSession.endAudioActivity(self.audioActivity)
}
// MARK: - Util

@ -129,11 +129,19 @@ extension CallUIAdaptee {
callService.addObserverAndSyncState(observer: self)
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK:
internal func reportIncomingCall(_ call: SignalCall, thread: TSContactThread) {
AssertIsOnMainThread()
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
audioSession.startAudioActivity(call.audioActivity)
let callerName = self.contactsManager.displayName(forPhoneIdentifier: call.remotePhoneNumber)
adaptee.reportIncomingCall(call, callerName: callerName)
@ -181,7 +189,7 @@ extension CallUIAdaptee {
AssertIsOnMainThread()
if let call = call {
OWSAudioSession.shared.endAudioActivity(call.audioActivity)
self.audioSession.endAudioActivity(call.audioActivity)
}
}

@ -32,17 +32,23 @@ public class OWSVideoPlayer: NSObject {
object: avPlayer.currentItem)
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK: Playback Controls
@objc
public func pause() {
avPlayer.pause()
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
audioSession.endAudioActivity(self.audioActivity)
}
@objc
public func play() {
let success = OWSAudioSession.shared.startAudioActivity(self.audioActivity)
let success = audioSession.startAudioActivity(self.audioActivity)
assert(success)
guard let item = avPlayer.currentItem else {
@ -62,7 +68,7 @@ public class OWSVideoPlayer: NSObject {
public func stop() {
avPlayer.pause()
avPlayer.seek(to: kCMTimeZero)
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
audioSession.endAudioActivity(self.audioActivity)
}
@objc(seekToTime:)
@ -75,6 +81,6 @@ public class OWSVideoPlayer: NSObject {
@objc
private func playerItemDidPlayToCompletion(_ notification: Notification) {
self.delegate?.videoPlayerDidPlayToCompletion(self)
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
audioSession.endAudioActivity(self.audioActivity)
}
}

@ -83,14 +83,16 @@ NS_ASSUME_NONNULL_BEGIN
[[OWSOutgoingReceiptManager alloc] initWithPrimaryStorage:primaryStorage];
OWSSyncManager *syncManager = [[OWSSyncManager alloc] initDefault];
OWSAudioSession *audioSession = [OWSAudioSession new];
OWSSounds *sounds = [[OWSSounds alloc] initWithPrimaryStorage:primaryStorage];
LockInteractionController *lockInteractionController = [[LockInteractionController alloc] initDefault];
OWSWindowManager *windowManager = [[OWSWindowManager alloc] initDefault];
[Environment setShared:[[Environment alloc] initWithPreferences:preferences
sounds:sounds
lockInteractionController:lockInteractionController
windowManager:windowManager]];
[Environment setShared:[[Environment alloc] initWithAudioSession:audioSession
preferences:preferences
sounds:sounds
lockInteractionController:lockInteractionController
windowManager:windowManager]];
[SSKEnvironment setShared:[[SSKEnvironment alloc] initWithContactsManager:contactsManager
messageSender:messageSender

@ -5,6 +5,7 @@
#import <SignalServiceKit/SSKEnvironment.h>
@class LockInteractionController;
@class OWSAudioSession;
@class OWSContactsManager;
@class OWSPreferences;
@class OWSSounds;
@ -17,16 +18,18 @@
* It also handles network configuration for testing/deployment server configurations.
*
**/
// TODO: Rename to AppEnvironment?
// TODO: Rename to SMGEnvironment?
@interface Environment : NSObject
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPreferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager;
- (instancetype)initWithAudioSession:(OWSAudioSession *)audioSession
preferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager;
@property (nonatomic, readonly) OWSAudioSession *audioSession;
@property (nonatomic, readonly) OWSContactsManager *contactsManager;
@property (nonatomic, readonly) OWSPreferences *preferences;
@property (nonatomic, readonly) OWSSounds *sounds;

@ -11,6 +11,7 @@ static Environment *sharedEnvironment = nil;
@interface Environment ()
@property (nonatomic) OWSAudioSession *audioSession;
@property (nonatomic) OWSContactsManager *contactsManager;
@property (nonatomic) OWSPreferences *preferences;
@property (nonatomic) OWSSounds *sounds;
@ -47,20 +48,24 @@ static Environment *sharedEnvironment = nil;
sharedEnvironment = nil;
}
- (instancetype)initWithPreferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager {
- (instancetype)initWithAudioSession:(OWSAudioSession *)audioSession
preferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager
{
self = [super init];
if (!self) {
return self;
}
OWSAssertDebug(audioSession);
OWSAssertDebug(preferences);
OWSAssertDebug(sounds);
OWSAssertDebug(lockInteractionController);
OWSAssertDebug(windowManager);
_audioSession = audioSession;
_preferences = preferences;
_sounds = sounds;
_lockInteractionController = lockInteractionController;

@ -35,7 +35,13 @@ public class AudioActivity: NSObject {
}
deinit {
OWSAudioSession.shared.ensureAudioSessionActivationStateAfterDelay()
audioSession.ensureAudioSessionActivationStateAfterDelay()
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK: Factory Methods
@ -65,17 +71,9 @@ public class AudioActivity: NSObject {
@objc
public class OWSAudioSession: NSObject {
// Force singleton access
@objc public static let shared = OWSAudioSession()
private override init() {}
@objc
public func setup() {
NotificationCenter.default.addObserver(forName: .UIDeviceProximityStateDidChange,
object: nil,
queue: nil) { [weak self] _ in
self?.ensureProximityState()
}
NotificationCenter.default.addObserver(self, selector: #selector(proximitySensorStateDidChange(notification:)), name: .UIDeviceProximityStateDidChange, object: nil)
}
// MARK: Dependencies

@ -84,6 +84,15 @@ NS_ASSUME_NONNULL_BEGIN
[self stop];
}
#pragma mark - Dependencies
- (OWSAudioSession *)audioSession
{
return Environment.shared.audioSession;
}
#pragma mark
- (void)applicationDidEnterBackground:(NSNotification *)notification
{
[self stop];
@ -107,7 +116,7 @@ NS_ASSUME_NONNULL_BEGIN
{
OWSAssertIsOnMainThread();
BOOL success = [OWSAudioSession.shared startAudioActivity:audioActivity];
BOOL success = [self.audioSession startAudioActivity:audioActivity];
OWSAssertDebug(success);
OWSAssertDebug(self.mediaUrl);
@ -179,8 +188,8 @@ NS_ASSUME_NONNULL_BEGIN
- (void)endAudioActivities
{
[OWSAudioSession.shared endAudioActivity:self.playbackAudioActivity];
[OWSAudioSession.shared endAudioActivity:self.currentCategoryAudioActivity];
[self.audioSession endAudioActivity:self.playbackAudioActivity];
[self.audioSession endAudioActivity:self.currentCategoryAudioActivity];
}
- (void)togglePlayStateWithPlaybackAudioCategory

Loading…
Cancel
Save