Merge branch 'mkirk/voicenote-earpiece' into release/2.31.0

pull/1/head
Michael Kirk 6 years ago
commit 0270423a17

@ -1107,6 +1107,7 @@ static NSTimeInterval launchStartedAt;
// If there were any messages in our local queue which we hadn't yet processed.
[SSKEnvironment.shared.messageReceiver handleAnyUnprocessedEnvelopesAsync];
[SSKEnvironment.shared.batchMessageProcessor handleAnyUnprocessedEnvelopesAsync];
[Environment.shared.audioSession setup];
if (!Environment.shared.preferences.hasGeneratedThumbnails) {
[self.primaryStorage.newDatabaseConnection

@ -112,10 +112,10 @@ NS_ASSUME_NONNULL_BEGIN
- (void)soundWasSelected:(OWSSound)sound
{
[self.audioPlayer stop];
self.audioPlayer = [OWSSounds audioPlayerForSound:sound];
self.audioPlayer = [OWSSounds audioPlayerForSound:sound audioBehavior:OWSAudioBehavior_Playback];
// Suppress looping in this view.
self.audioPlayer.isLooping = NO;
[self.audioPlayer playWithPlaybackAudioCategory];
[self.audioPlayer play];
if (self.currentSound == sound) {
return;

@ -2,8 +2,6 @@
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
#import "OWSAudioPlayer.h"
NS_ASSUME_NONNULL_BEGIN
@class ConversationStyle;

@ -149,7 +149,7 @@ typedef enum : NSUInteger {
@property (nonatomic) TSThread *thread;
@property (nonatomic, readonly) YapDatabaseConnection *editingDatabaseConnection;
@property (nonatomic, readonly) AudioActivity *voiceNoteAudioActivity;
@property (nonatomic, readonly) OWSAudioActivity *recordVoiceNoteAudioActivity;
@property (nonatomic, readonly) NSTimeInterval viewControllerCreatedAt;
// These two properties must be updated in lockstep.
@ -286,9 +286,18 @@ typedef enum : NSUInteger {
_contactShareViewHelper.delegate = self;
NSString *audioActivityDescription = [NSString stringWithFormat:@"%@ voice note", self.logTag];
_voiceNoteAudioActivity = [[AudioActivity alloc] initWithAudioDescription:audioActivityDescription];
_recordVoiceNoteAudioActivity = [[OWSAudioActivity alloc] initWithAudioDescription:audioActivityDescription behavior:OWSAudioBehavior_PlayAndRecord];
}
#pragma mark - Dependencies
- (OWSAudioSession *)audioSession
{
return Environment.shared.audioSession;
}
#pragma mark
- (void)addNotificationListeners
{
[[NSNotificationCenter defaultCenter] addObserver:self
@ -2229,11 +2238,13 @@ typedef enum : NSUInteger {
[self.audioAttachmentPlayer stop];
self.audioAttachmentPlayer = nil;
}
self.audioAttachmentPlayer =
[[OWSAudioPlayer alloc] initWithMediaUrl:attachmentStream.originalMediaURL delegate:viewItem];
[[OWSAudioPlayer alloc] initWithMediaUrl:attachmentStream.originalMediaURL audioBehavior:OWSAudioBehavior_AudioMessagePlayback delegate:viewItem];
// Associate the player with this media adapter.
self.audioAttachmentPlayer.owner = viewItem;
[self.audioAttachmentPlayer playWithPlaybackAudioCategory];
[self.audioAttachmentPlayer play];
}
- (void)didTapTruncatedTextMessage:(id<ConversationViewItem>)conversationItem
@ -3613,7 +3624,7 @@ typedef enum : NSUInteger {
NSURL *fileURL = [NSURL fileURLWithPath:filepath];
// Setup audio session
BOOL configuredAudio = [OWSAudioSession.shared startRecordingAudioActivity:self.voiceNoteAudioActivity];
BOOL configuredAudio = [self.audioSession startAudioActivity:self.recordVoiceNoteAudioActivity];
if (!configuredAudio) {
OWSFailDebug(@"Couldn't configure audio session");
[self cancelVoiceMemo];
@ -3714,7 +3725,7 @@ typedef enum : NSUInteger {
- (void)stopRecording
{
[self.audioRecorder stop];
[OWSAudioSession.shared endAudioActivity:self.voiceNoteAudioActivity];
[self.audioSession endAudioActivity:self.recordVoiceNoteAudioActivity];
}
- (void)cancelRecordingVoiceMemo

@ -675,12 +675,12 @@ class MessageDetailViewController: OWSViewController, MediaGalleryDataSourceDele
self.audioAttachmentPlayer = nil
}
let audioAttachmentPlayer = OWSAudioPlayer(mediaUrl: mediaURL, delegate: viewItem)
let audioAttachmentPlayer = OWSAudioPlayer(mediaUrl: mediaURL, audioBehavior: .audioMessagePlayback, delegate: viewItem)
self.audioAttachmentPlayer = audioAttachmentPlayer
// Associate the player with this media adapter.
audioAttachmentPlayer.owner = viewItem
audioAttachmentPlayer.playWithPlaybackAudioCategory()
audioAttachmentPlayer.play()
}
func didTapTruncatedTextMessage(_ conversationItem: ConversationViewItem) {

@ -110,8 +110,9 @@ protocol CallAudioServiceDelegate: class {
private let pulseDuration = 0.2
var audioSession: OWSAudioSession {
return OWSAudioSession.shared
return Environment.shared.audioSession
}
var avAudioSession: AVAudioSession {
return AVAudioSession.sharedInstance()
}
@ -396,7 +397,7 @@ protocol CallAudioServiceDelegate: class {
}
private func play(sound: OWSSound) {
guard let newPlayer = OWSSounds.audioPlayer(for: sound) else {
guard let newPlayer = OWSSounds.audioPlayer(for: sound, audioBehavior: .call) else {
owsFailDebug("unable to build player for sound: \(OWSSounds.displayName(for: sound))")
return
}
@ -406,7 +407,7 @@ protocol CallAudioServiceDelegate: class {
// we're playing the same sound, since the player is memoized on the sound instance, we'd otherwise
// stop the sound we just started.
self.currentPlayer?.stop()
newPlayer.playWithCurrentAudioCategory()
newPlayer.play()
self.currentPlayer = newPlayer
}

@ -26,13 +26,22 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
super.init()
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK:
func startOutgoingCall(handle: String) -> SignalCall {
AssertIsOnMainThread()
let call = SignalCall.outgoingCall(localId: UUID(), remotePhoneNumber: handle)
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
let success = self.audioSession.startAudioActivity(call.audioActivity)
assert(success)
self.callService.handleOutgoingCall(call).retainUntilComplete()
@ -84,7 +93,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
return
}
OWSAudioSession.shared.isRTCAudioEnabled = true
self.audioSession.isRTCAudioEnabled = true
self.callService.handleAnswerCall(call)
}
@ -118,7 +127,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread()
OWSAudioSession.shared.isRTCAudioEnabled = true
self.audioSession.isRTCAudioEnabled = true
}
func localHangupCall(_ call: SignalCall) {

@ -161,7 +161,7 @@ protocol CallObserver: class {
self.state = state
self.remotePhoneNumber = remotePhoneNumber
self.thread = TSContactThread.getOrCreateThread(contactId: remotePhoneNumber)
self.audioActivity = AudioActivity(audioDescription: "[SignalCall] with \(remotePhoneNumber)")
self.audioActivity = AudioActivity(audioDescription: "[SignalCall] with \(remotePhoneNumber)", behavior: .call)
}
// A string containing the three identifiers for this call.

@ -88,7 +88,7 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
self.provider = type(of: self).sharedProvider(useSystemCallLog: useSystemCallLog)
self.audioActivity = AudioActivity(audioDescription: "[CallKitCallUIAdaptee]")
self.audioActivity = AudioActivity(audioDescription: "[CallKitCallUIAdaptee]", behavior: .call)
self.showNamesOnCallScreen = showNamesOnCallScreen
super.init()
@ -98,6 +98,12 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
self.provider.setDelegate(self, queue: nil)
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK: CallUIAdaptee
func startOutgoingCall(handle: String) -> SignalCall {
@ -107,7 +113,7 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
let call = SignalCall.outgoingCall(localId: UUID(), remotePhoneNumber: handle)
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
self.audioSession.startAudioActivity(call.audioActivity)
// Add the new outgoing call to the app's list of calls.
// So we can find it in the provider delegate callbacks.
@ -379,16 +385,16 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
Logger.debug("Received")
OWSAudioSession.shared.startAudioActivity(self.audioActivity)
OWSAudioSession.shared.isRTCAudioEnabled = true
self.audioSession.startAudioActivity(self.audioActivity)
self.audioSession.isRTCAudioEnabled = true
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
AssertIsOnMainThread()
Logger.debug("Received")
OWSAudioSession.shared.isRTCAudioEnabled = false
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
self.audioSession.isRTCAudioEnabled = false
self.audioSession.endAudioActivity(self.audioActivity)
}
// MARK: - Util

@ -129,11 +129,19 @@ extension CallUIAdaptee {
callService.addObserverAndSyncState(observer: self)
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK:
internal func reportIncomingCall(_ call: SignalCall, thread: TSContactThread) {
AssertIsOnMainThread()
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
audioSession.startAudioActivity(call.audioActivity)
let callerName = self.contactsManager.displayName(forPhoneIdentifier: call.remotePhoneNumber)
adaptee.reportIncomingCall(call, callerName: callerName)
@ -181,7 +189,7 @@ extension CallUIAdaptee {
AssertIsOnMainThread()
if let call = call {
OWSAudioSession.shared.endAudioActivity(call.audioActivity)
self.audioSession.endAudioActivity(call.audioActivity)
}
}

@ -131,7 +131,7 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
return
}
audioPlayer = OWSAudioPlayer(mediaUrl: dataUrl, delegate: self)
audioPlayer = OWSAudioPlayer(mediaUrl: dataUrl, audioBehavior: .playback, delegate: self)
var subviews = [UIView]()

@ -22,7 +22,7 @@ public class OWSVideoPlayer: NSObject {
@objc init(url: URL) {
self.avPlayer = AVPlayer(url: url)
self.audioActivity = AudioActivity(audioDescription: "[OWSVideoPlayer] url:\(url)")
self.audioActivity = AudioActivity(audioDescription: "[OWSVideoPlayer] url:\(url)", behavior: .playback)
super.init()
@ -32,17 +32,24 @@ public class OWSVideoPlayer: NSObject {
object: avPlayer.currentItem)
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK: Playback Controls
@objc
public func pause() {
avPlayer.pause()
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
audioSession.endAudioActivity(self.audioActivity)
}
@objc
public func play() {
OWSAudioSession.shared.startPlaybackAudioActivity(self.audioActivity)
let success = audioSession.startAudioActivity(self.audioActivity)
assert(success)
guard let item = avPlayer.currentItem else {
owsFailDebug("video player item was unexpectedly nil")
@ -61,7 +68,7 @@ public class OWSVideoPlayer: NSObject {
public func stop() {
avPlayer.pause()
avPlayer.seek(to: kCMTimeZero)
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
audioSession.endAudioActivity(self.audioActivity)
}
@objc(seekToTime:)
@ -74,6 +81,6 @@ public class OWSVideoPlayer: NSObject {
@objc
private func playerItemDidPlayToCompletion(_ notification: Notification) {
self.delegate?.videoPlayerDidPlayToCompletion(self)
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
audioSession.endAudioActivity(self.audioActivity)
}
}

@ -83,14 +83,16 @@ NS_ASSUME_NONNULL_BEGIN
[[OWSOutgoingReceiptManager alloc] initWithPrimaryStorage:primaryStorage];
OWSSyncManager *syncManager = [[OWSSyncManager alloc] initDefault];
OWSAudioSession *audioSession = [OWSAudioSession new];
OWSSounds *sounds = [[OWSSounds alloc] initWithPrimaryStorage:primaryStorage];
LockInteractionController *lockInteractionController = [[LockInteractionController alloc] initDefault];
OWSWindowManager *windowManager = [[OWSWindowManager alloc] initDefault];
[Environment setShared:[[Environment alloc] initWithPreferences:preferences
sounds:sounds
lockInteractionController:lockInteractionController
windowManager:windowManager]];
[Environment setShared:[[Environment alloc] initWithAudioSession:audioSession
preferences:preferences
sounds:sounds
lockInteractionController:lockInteractionController
windowManager:windowManager]];
[SSKEnvironment setShared:[[SSKEnvironment alloc] initWithContactsManager:contactsManager
messageSender:messageSender

@ -5,6 +5,7 @@
#import <SignalServiceKit/SSKEnvironment.h>
@class LockInteractionController;
@class OWSAudioSession;
@class OWSContactsManager;
@class OWSPreferences;
@class OWSSounds;
@ -17,16 +18,18 @@
* It also handles network configuration for testing/deployment server configurations.
*
**/
// TODO: Rename to AppEnvironment?
// TODO: Rename to SMGEnvironment?
@interface Environment : NSObject
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPreferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager;
- (instancetype)initWithAudioSession:(OWSAudioSession *)audioSession
preferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager;
@property (nonatomic, readonly) OWSAudioSession *audioSession;
@property (nonatomic, readonly) OWSContactsManager *contactsManager;
@property (nonatomic, readonly) OWSPreferences *preferences;
@property (nonatomic, readonly) OWSSounds *sounds;

@ -11,6 +11,7 @@ static Environment *sharedEnvironment = nil;
@interface Environment ()
@property (nonatomic) OWSAudioSession *audioSession;
@property (nonatomic) OWSContactsManager *contactsManager;
@property (nonatomic) OWSPreferences *preferences;
@property (nonatomic) OWSSounds *sounds;
@ -47,20 +48,24 @@ static Environment *sharedEnvironment = nil;
sharedEnvironment = nil;
}
- (instancetype)initWithPreferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager {
- (instancetype)initWithAudioSession:(OWSAudioSession *)audioSession
preferences:(OWSPreferences *)preferences
sounds:(OWSSounds *)sounds
lockInteractionController:(LockInteractionController *)lockInteractionController
windowManager:(OWSWindowManager *)windowManager
{
self = [super init];
if (!self) {
return self;
}
OWSAssertDebug(audioSession);
OWSAssertDebug(preferences);
OWSAssertDebug(sounds);
OWSAssertDebug(lockInteractionController);
OWSAssertDebug(windowManager);
_audioSession = audioSession;
_preferences = preferences;
_sounds = sounds;
_lockInteractionController = lockInteractionController;

@ -5,102 +5,118 @@
import Foundation
import WebRTC
@objc
@objc(OWSAudioActivity)
public class AudioActivity: NSObject {
let audioDescription: String
override public var description: String {
return "<\(self.logTag) audioDescription: \"\(audioDescription)\">"
}
let behavior: OWSAudioBehavior
@objc
public init(audioDescription: String) {
public init(audioDescription: String, behavior: OWSAudioBehavior) {
self.audioDescription = audioDescription
self.behavior = behavior
}
deinit {
OWSAudioSession.shared.ensureAudioSessionActivationStateAfterDelay()
audioSession.ensureAudioSessionActivationStateAfterDelay()
}
// MARK: Dependencies
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK:
override public var description: String {
return "<\(self.logTag) audioDescription: \"\(audioDescription)\">"
}
}
@objc
public class OWSAudioSession: NSObject {
// Force singleton access
@objc public static let shared = OWSAudioSession()
private override init() {}
private let avAudioSession = AVAudioSession.sharedInstance()
@objc
public func setup() {
NotificationCenter.default.addObserver(self, selector: #selector(proximitySensorStateDidChange(notification:)), name: .UIDeviceProximityStateDidChange, object: nil)
}
private var currentActivities: [Weak<AudioActivity>] = []
// MARK: Dependencies
// Respects hardware mute switch, plays through external speaker, mixes with backround audio
// appropriate for foreground sound effects.
@objc
public func startAmbientAudioActivity(_ audioActivity: AudioActivity) {
Logger.debug("")
private let avAudioSession = AVAudioSession.sharedInstance()
objc_sync_enter(self)
defer { objc_sync_exit(self) }
private let device = UIDevice.current
startAudioActivity(audioActivity)
guard currentActivities.count == 1 else {
// We don't want to clobber the audio capabilities configured by (e.g.) media playback or an in-progress call
Logger.info("not touching audio session since another currentActivity exists.")
return
}
// MARK:
do {
try avAudioSession.setCategory(AVAudioSessionCategoryAmbient)
} catch {
owsFailDebug("failed with error: \(error)")
}
private var currentActivities: [Weak<AudioActivity>] = []
var aggregateBehaviors: Set<OWSAudioBehavior> {
return Set(self.currentActivities.compactMap { $0.value?.behavior })
}
// Ignores hardware mute switch, plays through external speaker
@objc
public func startPlaybackAudioActivity(_ audioActivity: AudioActivity) {
Logger.debug("")
public func startAudioActivity(_ audioActivity: AudioActivity) -> Bool {
Logger.debug("with \(audioActivity)")
objc_sync_enter(self)
defer { objc_sync_exit(self) }
startAudioActivity(audioActivity)
self.currentActivities.append(Weak(value: audioActivity))
do {
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
if aggregateBehaviors.contains(.call) {
// Do nothing while on a call.
// WebRTC/CallAudioService manages call audio
// Eventually it would be nice to consolidate more of the audio
// session handling.
} else {
if aggregateBehaviors.contains(.playAndRecord) {
assert(avAudioSession.recordPermission() == .granted)
try avAudioSession.setCategory(AVAudioSessionCategoryRecord)
} else if aggregateBehaviors.contains(.audioMessagePlayback) {
try ensureCategoryForProximityState()
} else if aggregateBehaviors.contains(.playback) {
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
} else {
owsFailDebug("no category option specified. Leaving category untouched.")
}
if aggregateBehaviors.contains(.audioMessagePlayback) {
self.device.isProximityMonitoringEnabled = true
} else {
self.device.isProximityMonitoringEnabled = false
}
}
return true
} catch {
owsFailDebug("failed with error: \(error)")
return false
}
}
@objc
public func startRecordingAudioActivity(_ audioActivity: AudioActivity) -> Bool {
Logger.debug("")
objc_sync_enter(self)
defer { objc_sync_exit(self) }
assert(avAudioSession.recordPermission() == .granted)
startAudioActivity(audioActivity)
func proximitySensorStateDidChange(notification: Notification) {
do {
try avAudioSession.setCategory(AVAudioSessionCategoryRecord)
return true
try ensureCategoryForProximityState()
} catch {
owsFailDebug("failed with error: \(error)")
return false
owsFailDebug("error in response to proximity change: \(error)")
}
}
@objc
public func startAudioActivity(_ audioActivity: AudioActivity) {
Logger.debug("with \(audioActivity)")
objc_sync_enter(self)
defer { objc_sync_exit(self) }
func ensureCategoryForProximityState() throws {
if aggregateBehaviors.contains(.audioMessagePlayback) {
if self.device.proximityState {
Logger.debug("proximityState: true")
self.currentActivities.append(Weak(value: audioActivity))
try avAudioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try avAudioSession.overrideOutputAudioPort(.none)
} else {
Logger.debug("proximityState: false")
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
}
}
}
@objc
@ -111,6 +127,11 @@ public class OWSAudioSession: NSObject {
defer { objc_sync_exit(self) }
currentActivities = currentActivities.filter { return $0.value != audioActivity }
do {
try ensureCategoryForProximityState()
} catch {
owsFailDebug("error in ensureProximityState: \(error)")
}
ensureAudioSessionActivationStateAfterDelay()
}

@ -2,6 +2,7 @@
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
#import "OWSAudioPlayer.h"
#import <AudioToolbox/AudioServices.h>
NS_ASSUME_NONNULL_BEGIN
@ -68,7 +69,8 @@ typedef NS_ENUM(NSUInteger, OWSSound) {
#pragma mark - AudioPlayer
+ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound;
+ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound
audioBehavior:(OWSAudioBehavior)audioBehavior;
@end

@ -375,17 +375,13 @@ NSString *const kOWSSoundsStorageGlobalNotificationKey = @"kOWSSoundsStorageGlob
}
+ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound
audioBehavior:(OWSAudioBehavior)audioBehavior;
{
return [self audioPlayerForSound:sound quiet:NO];
}
+ (nullable OWSAudioPlayer *)audioPlayerForSound:(OWSSound)sound quiet:(BOOL)quiet
{
NSURL *_Nullable soundURL = [OWSSounds soundURLForSound:sound quiet:(BOOL)quiet];
NSURL *_Nullable soundURL = [OWSSounds soundURLForSound:sound quiet:NO];
if (!soundURL) {
return nil;
}
OWSAudioPlayer *player = [[OWSAudioPlayer alloc] initWithMediaUrl:soundURL];
OWSAudioPlayer *player = [[OWSAudioPlayer alloc] initWithMediaUrl:soundURL audioBehavior:audioBehavior];
if ([self shouldAudioPlayerLoopForSound:sound]) {
player.isLooping = YES;
}

@ -21,6 +21,14 @@ typedef NS_ENUM(NSInteger, AudioPlaybackState) {
#pragma mark -
typedef NS_ENUM(NSUInteger, OWSAudioBehavior) {
OWSAudioBehavior_Unknown,
OWSAudioBehavior_Playback,
OWSAudioBehavior_AudioMessagePlayback,
OWSAudioBehavior_PlayAndRecord,
OWSAudioBehavior_Call,
};
@interface OWSAudioPlayer : NSObject
@property (nonatomic, readonly, weak) id<OWSAudioPlayerDelegate> delegate;
@ -31,16 +39,13 @@ typedef NS_ENUM(NSInteger, AudioPlaybackState) {
@property (nonatomic) BOOL isLooping;
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl;
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id<OWSAudioPlayerDelegate>)delegate;
// respects silent switch
- (void)playWithCurrentAudioCategory;
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl audioBehavior:(OWSAudioBehavior)audioBehavior;
// will ensure sound is audible, even if silent switch is enabled
- (void)playWithPlaybackAudioCategory;
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl
audioBehavior:(OWSAudioBehavior)audioBehavior
delegate:(id<OWSAudioPlayerDelegate>)delegate;
- (void)play;
- (void)pause;
- (void)stop;
- (void)togglePlayState;

@ -35,7 +35,7 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic, readonly) NSURL *mediaUrl;
@property (nonatomic, nullable) AVAudioPlayer *audioPlayer;
@property (nonatomic, nullable) NSTimer *audioPlayerPoller;
@property (nonatomic, readonly) AudioActivity *audioActivity;
@property (nonatomic, readonly) OWSAudioActivity *audioActivity;
@end
@ -44,11 +44,14 @@ NS_ASSUME_NONNULL_BEGIN
@implementation OWSAudioPlayer
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl
audioBehavior:(OWSAudioBehavior)audioBehavior
{
return [self initWithMediaUrl:mediaUrl delegate:[OWSAudioPlayerDelegateStub new]];
return [self initWithMediaUrl:mediaUrl audioBehavior:audioBehavior delegate:[OWSAudioPlayerDelegateStub new]];
}
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id<OWSAudioPlayerDelegate>)delegate
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl
audioBehavior:(OWSAudioBehavior)audioBehavior
delegate:(id<OWSAudioPlayerDelegate>)delegate
{
self = [super init];
if (!self) {
@ -58,11 +61,11 @@ NS_ASSUME_NONNULL_BEGIN
OWSAssertDebug(mediaUrl);
OWSAssertDebug(delegate);
_delegate = delegate;
_mediaUrl = mediaUrl;
_delegate = delegate;
NSString *audioActivityDescription = [NSString stringWithFormat:@"%@ %@", self.logTag, self.mediaUrl];
_audioActivity = [[AudioActivity alloc] initWithAudioDescription:audioActivityDescription];
_audioActivity = [[OWSAudioActivity alloc] initWithAudioDescription:audioActivityDescription behavior:audioBehavior];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationDidEnterBackground:)
@ -81,6 +84,15 @@ NS_ASSUME_NONNULL_BEGIN
[self stop];
}
#pragma mark - Dependencies
- (OWSAudioSession *)audioSession
{
return Environment.shared.audioSession;
}
#pragma mark
- (void)applicationDidEnterBackground:(NSNotification *)notification
{
[self stop];
@ -88,25 +100,21 @@ NS_ASSUME_NONNULL_BEGIN
#pragma mark - Methods
- (void)playWithCurrentAudioCategory
- (void)play
{
OWSAssertIsOnMainThread();
[OWSAudioSession.shared startAudioActivity:self.audioActivity];
[self play];
// get current audio activity
OWSAssertIsOnMainThread();
[self playWithAudioActivity:self.audioActivity];
}
- (void)playWithPlaybackAudioCategory
- (void)playWithAudioActivity:(OWSAudioActivity *)audioActivity
{
OWSAssertIsOnMainThread();
[OWSAudioSession.shared startPlaybackAudioActivity:self.audioActivity];
[self play];
}
BOOL success = [self.audioSession startAudioActivity:audioActivity];
OWSAssertDebug(success);
- (void)play
{
OWSAssertIsOnMainThread();
OWSAssertDebug(self.mediaUrl);
OWSAssertDebug([self.delegate audioPlaybackState] != AudioPlaybackState_Playing);
@ -157,7 +165,7 @@ NS_ASSUME_NONNULL_BEGIN
[self.audioPlayerPoller invalidate];
[self.delegate setAudioProgress:(CGFloat)[self.audioPlayer currentTime] duration:(CGFloat)[self.audioPlayer duration]];
[OWSAudioSession.shared endAudioActivity:self.audioActivity];
[self endAudioActivities];
[DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self];
}
@ -170,10 +178,15 @@ NS_ASSUME_NONNULL_BEGIN
[self.audioPlayerPoller invalidate];
[self.delegate setAudioProgress:0 duration:0];
[OWSAudioSession.shared endAudioActivity:self.audioActivity];
[self endAudioActivities];
[DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self];
}
- (void)endAudioActivities
{
[self.audioSession endAudioActivity:self.audioActivity];
}
- (void)togglePlayState
{
OWSAssertIsOnMainThread();
@ -181,7 +194,7 @@ NS_ASSUME_NONNULL_BEGIN
if (self.delegate.audioPlaybackState == AudioPlaybackState_Playing) {
[self pause];
} else {
[self play];
[self playWithAudioActivity:self.audioActivity];
}
}

Loading…
Cancel
Save