Merge branch 'dev' into notification-patch

pull/1061/head
Ryan ZHAO 2 months ago
commit 9207418100

@ -192,6 +192,7 @@
9473386E2BDF5F3E00B9E169 /* InfoPlist.xcstrings in Resources */ = {isa = PBXBuildFile; fileRef = 9473386D2BDF5F3E00B9E169 /* InfoPlist.xcstrings */; };
947AD6902C8968FF000B2730 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 947AD68F2C8968FF000B2730 /* Constants.swift */; };
94B3DC172AF8592200C88531 /* QuoteView_SwiftUI.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94B3DC162AF8592200C88531 /* QuoteView_SwiftUI.swift */; };
94C58AC92D2E037200609195 /* Permissions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94C58AC82D2E036E00609195 /* Permissions.swift */; };
94C5DCB02BE88170003AA8C5 /* BezierPathView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94C5DCAF2BE88170003AA8C5 /* BezierPathView.swift */; };
94E9BC0D2C7BFBDA006984EA /* Localization+Style.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94E9BC0C2C7BFBDA006984EA /* Localization+Style.swift */; };
A11CD70D17FA230600A2D1B1 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A11CD70C17FA230600A2D1B1 /* QuartzCore.framework */; };
@ -1398,6 +1399,7 @@
9473386D2BDF5F3E00B9E169 /* InfoPlist.xcstrings */ = {isa = PBXFileReference; lastKnownFileType = text.json.xcstrings; path = InfoPlist.xcstrings; sourceTree = "<group>"; };
947AD68F2C8968FF000B2730 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = "<group>"; };
94B3DC162AF8592200C88531 /* QuoteView_SwiftUI.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = QuoteView_SwiftUI.swift; sourceTree = "<group>"; };
94C58AC82D2E036E00609195 /* Permissions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Permissions.swift; sourceTree = "<group>"; };
94C5DCAF2BE88170003AA8C5 /* BezierPathView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BezierPathView.swift; sourceTree = "<group>"; };
94E9BC0C2C7BFBDA006984EA /* Localization+Style.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Localization+Style.swift"; sourceTree = "<group>"; };
A11CD70C17FA230600A2D1B1 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
@ -3625,6 +3627,7 @@
FD09796527F6B0A800936362 /* Utilities */ = {
isa = PBXGroup;
children = (
94C58AC82D2E036E00609195 /* Permissions.swift */,
FD6A39422C2AD81600762359 /* BackgroundTaskManager.swift */,
FDFBB74A2A1EFF4900CA7350 /* Bencode.swift */,
FD6A39162C2A99A000762359 /* BencodeDecoder.swift */,
@ -5824,6 +5827,7 @@
C3C2AC2E2553CBEB00C340D1 /* String+Trimming.swift in Sources */,
FD17D7C727F5207C00122BE0 /* DatabaseMigrator+Utilities.swift in Sources */,
FD848B9328420164000E298B /* UnicodeScalar+Utilities.swift in Sources */,
94C58AC92D2E037200609195 /* Permissions.swift in Sources */,
FD09796B27F6C67500936362 /* Failable.swift in Sources */,
FD7115FA28C8153400B47552 /* UIBarButtonItem+Combine.swift in Sources */,
FD705A92278D051200F16121 /* ReusableView.swift in Sources */,
@ -7825,7 +7829,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 527;
CURRENT_PROJECT_VERSION = 528;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -7896,7 +7900,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 527;
CURRENT_PROJECT_VERSION = 528;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",

@ -1,5 +1,5 @@
{
"originHash" : "c57241b796915b0642f9c260463b2d6fd7d5198beafde785c590f3a7d80d31f5",
"originHash" : "4c95b434de06c87c75c3ef96fa055ec67e885a4a4ad78caafd7925b131995b07",
"pins" : [
{
"identity" : "cocoalumberjack",

@ -87,6 +87,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
didSet {
stateDidChange?()
hasConnectedDidChange?()
updateCallDetailedStatus?("Call Connected")
}
}
@ -94,6 +95,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
didSet {
stateDidChange?()
hasEndedDidChange?()
updateCallDetailedStatus?("")
}
}
@ -113,6 +115,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
var remoteVideoStateDidChange: ((Bool) -> Void)?
var hasStartedReconnecting: (() -> Void)?
var hasReconnected: (() -> Void)?
var updateCallDetailedStatus: ((String) -> Void)?
// MARK: - Derived Properties
@ -249,6 +252,8 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
self.callInteractionId = interaction?.id
self.updateCallDetailedStatus?("Creating Call")
try? webRTCSession
.sendPreOffer(
db,
@ -256,10 +261,27 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
interactionId: interaction?.id,
in: thread
)
.retry(5)
// Start the timeout timer for the call
.handleEvents(receiveOutput: { [weak self] _ in self?.setupTimeoutTimer() })
.flatMap { _ in webRTCSession.sendOffer(to: thread) }
.sinkUntilComplete()
.flatMap { [weak self] _ in
self?.updateCallDetailedStatus?("Sending Call Offer")
return webRTCSession
.sendOffer(to: thread)
.retry(5)
}
.sinkUntilComplete(
receiveCompletion: { [weak self] result in
switch result {
case .finished:
SNLog("[Calls] Offer message sent")
self?.updateCallDetailedStatus?("Sending Connection Candidates")
case .failure(let error):
SNLog("[Calls] Error initializing call after 5 retries: \(error), ending call...")
self?.handleCallInitializationFailed()
}
}
)
}
func answerSessionCall() {
@ -269,6 +291,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
if let sdp = remoteSDP {
SNLog("[Calls] Got remote sdp already")
self.updateCallDetailedStatus?("Answering Call")
webRTCSession.handleRemoteSDP(sdp, from: sessionId) // This sends an answer message internally
}
}
@ -292,6 +315,11 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
hasEnded = true
}
func handleCallInitializationFailed() {
self.endSessionCall()
Singleton.callManager.reportCurrentCallEnded(reason: nil)
}
// MARK: - Call Message Handling
public func updateCallMessage(mode: EndCallMode, using dependencies: Dependencies) {
@ -402,6 +430,18 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
isRemoteVideoEnabled = isEnabled
}
public func iceCandidateDidSend() {
DispatchQueue.main.async {
self.updateCallDetailedStatus?("Awaiting Recipient Answer...")
}
}
public func iceCandidateDidReceive() {
DispatchQueue.main.async {
self.updateCallDetailedStatus?("Handling Connection Candidates")
}
}
public func didReceiveHangUpSignal() {
self.hasEnded = true
DispatchQueue.main.async {

@ -28,8 +28,8 @@ extension SessionCallManager {
let callVC = CallVC(for: call)
if let conversationVC = presentingVC as? ConversationVC {
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
}
presentingVC.present(callVC, animated: true) {

@ -246,6 +246,7 @@ public final class SessionCallManager: NSObject, CallManagerProtocol {
{
let callVC = CallVC(for: call)
callVC.conversationVC = conversationVC
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
presentingVC.present(callVC, animated: true, completion: nil)
}

@ -315,13 +315,30 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
result.themeTextColor = .textPrimary
result.textAlignment = .center
result.isHidden = call.hasConnected
if call.hasStartedConnecting { result.text = "callsConnecting".localized() }
return result
}()
private lazy var callDetailedInfoLabel: UILabel = {
let result: UILabel = UILabel()
result.font = .boldSystemFont(ofSize: Values.smallFontSize)
result.themeTextColor = .textPrimary
result.textAlignment = .center
return result
}()
private lazy var callInfoLabelStackView: UIStackView = {
let result: UIStackView = UIStackView(arrangedSubviews: [callInfoLabel, callDetailedInfoLabel])
result.axis = .vertical
result.spacing = Values.mediumSpacing
result.isHidden = call.hasConnected
return result
}()
private lazy var callDurationLabel: UILabel = {
let result = UILabel()
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
@ -350,11 +367,11 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
remoteVideoView.alpha = isEnabled ? 1 : 0
}
if self.callInfoLabel.alpha < 0.5 {
if self.callInfoLabelStackView.alpha < 0.5 {
UIView.animate(withDuration: 0.25) {
self.operationPanel.alpha = 1
self.responsePanel.alpha = 1
self.callInfoLabel.alpha = 1
self.callInfoLabelStackView.alpha = 1
}
}
}
@ -387,7 +404,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
self?.durationTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
self?.updateDuration()
}
self?.callInfoLabel.isHidden = true
self?.callInfoLabelStackView.isHidden = true
self?.callDurationLabel.isHidden = false
}
}
@ -402,7 +419,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
self.call.hasStartedReconnecting = { [weak self] in
DispatchQueue.main.async {
self?.callInfoLabel.isHidden = false
self?.callInfoLabelStackView.isHidden = false
self?.callDurationLabel.isHidden = true
self?.callInfoLabel.text = "callsReconnecting".localized()
}
@ -410,10 +427,16 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
self.call.hasReconnected = { [weak self] in
DispatchQueue.main.async {
self?.callInfoLabel.isHidden = true
self?.callInfoLabelStackView.isHidden = true
self?.callDurationLabel.isHidden = false
}
}
self.call.updateCallDetailedStatus = { [weak self] status in
DispatchQueue.main.async {
self?.callDetailedInfoLabel.text = status
}
}
}
required init(coder: NSCoder) { preconditionFailure("Use init(for:) instead.") }
@ -510,10 +533,10 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
callInfoLabelContainer.pin(.top, to: .bottom, of: profilePictureView)
callInfoLabelContainer.pin(.bottom, to: .bottom, of: profilePictureContainer)
callInfoLabelContainer.pin([ UIView.HorizontalEdge.left, UIView.HorizontalEdge.right ], to: view)
callInfoLabelContainer.addSubview(callInfoLabel)
callInfoLabelContainer.addSubview(callInfoLabelStackView)
callInfoLabelContainer.addSubview(callDurationLabel)
callInfoLabel.translatesAutoresizingMaskIntoConstraints = false
callInfoLabel.center(in: callInfoLabelContainer)
callInfoLabelStackView.translatesAutoresizingMaskIntoConstraints = false
callInfoLabelStackView.center(in: callInfoLabelContainer)
callDurationLabel.translatesAutoresizingMaskIntoConstraints = false
callDurationLabel.center(in: callInfoLabelContainer)
}
@ -587,7 +610,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
func handleEndCallMessage() {
SNLog("[Calls] Ending call.")
self.callInfoLabel.isHidden = false
self.callInfoLabelStackView.isHidden = false
self.callDurationLabel.isHidden = true
self.callInfoLabel.text = "callsEnded".localized()
@ -596,7 +619,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
remoteVideoView.alpha = 0
self.operationPanel.alpha = 1
self.responsePanel.alpha = 1
self.callInfoLabel.alpha = 1
self.callInfoLabelStackView.alpha = 1
}
Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { [weak self] _ in

@ -204,8 +204,8 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
let callVC = CallVC(for: self.call)
if let conversationVC = (presentingVC as? TopBannerController)?.wrappedViewController() as? ConversationVC {
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
}
presentingVC.present(callVC, animated: true) { [weak self] in

@ -9,6 +9,7 @@ extension WebRTCSession {
public func handleICECandidates(_ candidate: [RTCIceCandidate]) {
SNLog("[Calls] Received ICE candidate message.")
self.delegate?.iceCandidateDidReceive()
candidate.forEach { peerConnection?.add($0, completionHandler: { _ in }) }
}
@ -22,7 +23,9 @@ extension WebRTCSession {
else {
guard sdp.type == .offer else { return }
self?.sendAnswer(to: sessionId).sinkUntilComplete()
self?.sendAnswer(to: sessionId)
.retry(5)
.sinkUntilComplete()
}
})
}

@ -12,6 +12,8 @@ public protocol WebRTCSessionDelegate: AnyObject {
func webRTCIsConnected()
func isRemoteVideoDidChange(isEnabled: Bool)
func iceCandidateDidSend()
func iceCandidateDidReceive()
func dataChannelDidOpen()
func didReceiveHangUpSignal()
func reconnectIfNeeded()
@ -339,9 +341,21 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
}
.subscribe(on: DispatchQueue.global(qos: .userInitiated))
.flatMap { [dependencies = self.dependencies] sendData in
MessageSender.sendImmediate(data: sendData, using: dependencies)
MessageSender
.sendImmediate(data: sendData, using: dependencies)
.retry(5)
}
.sinkUntilComplete()
.sinkUntilComplete(
receiveCompletion: { [weak self] result in
switch result {
case .finished:
SNLog("[Calls] ICE candidates sent")
self?.delegate?.iceCandidateDidSend()
case .failure(let error):
SNLog("[Calls] Error sending ICE candidates due to error: \(error)")
}
}
)
}
public func endCall(
@ -375,6 +389,7 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
MessageSender
.sendImmediate(data: preparedSendData, using: dependencies)
.subscribe(on: DispatchQueue.global(qos: .userInitiated))
.retry(5)
.sinkUntilComplete()
}

@ -121,7 +121,7 @@ extension ConversationVC:
let threadId: String = self.viewModel.threadData.threadId
guard
AVAudioSession.sharedInstance().recordPermission == .granted,
Permissions.microphone == .granted,
self.viewModel.threadData.threadVariant == .contact,
Singleton.callManager.currentCall == nil,
let call: SessionCall = Storage.shared.read({ [dependencies = viewModel.dependencies] db in
@ -323,7 +323,7 @@ extension ConversationVC:
Permissions.requestMicrophonePermissionIfNeeded()
if AVAudioSession.sharedInstance().recordPermission != .granted {
if Permissions.microphone != .granted {
SNLog("Proceeding without microphone access. Any recorded video will be silent.")
}
@ -807,6 +807,8 @@ extension ConversationVC:
}
return
}
self.isKeyboardVisible = self.snInputView.isInputFirstResponder
self.inputAccessoryView?.resignFirstResponder()
self.inputAccessoryView?.isHidden = true
self.inputAccessoryView?.alpha = 0
}
@ -821,6 +823,9 @@ extension ConversationVC:
UIView.animate(withDuration: 0.25, animations: {
self.inputAccessoryView?.isHidden = false
self.inputAccessoryView?.alpha = 1
if self.isKeyboardVisible {
self.inputAccessoryView?.becomeFirstResponder()
}
})
}
@ -2487,7 +2492,7 @@ extension ConversationVC:
// Keep screen on
UIApplication.shared.isIdleTimerDisabled = false
guard AVAudioSession.sharedInstance().recordPermission == .granted else { return }
guard Permissions.microphone == .granted else { return }
// Cancel any current audio playback
self.viewModel.stopAudio()

@ -22,6 +22,7 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
private var isAutoLoadingNextPage: Bool = false
private var isLoadingMore: Bool = false
var isReplacingThread: Bool = false
var isKeyboardVisible: Bool = false
/// This flag indicates whether the thread data has been reloaded after a disappearance (it defaults to true as it will
/// never have disappeared before - this is only needed for value observers since they run asynchronously)
@ -530,6 +531,8 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
stopObservingChanges()
viewModel.updateDraft(to: snInputView.text)
inputAccessoryView?.resignFirstResponder()
NotificationCenter.default.removeObserver(self)
}
override func viewDidDisappear(_ animated: Bool) {
@ -1387,7 +1390,21 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
}
// No nothing if there was no change
let keyboardEndFrameConverted: CGRect = self.view.convert(keyboardEndFrame, from: nil)
// Note: there is a bug on iOS 15.X for iPhone 6/6s where the converted frame is not accurate.
// In iOS 16.1 and later, the keyboard notification object is the screen the keyboard appears on.
// This is a workaround to fix the issue
let fromCoordinateSpace: UICoordinateSpace? = {
if let screen = (notification.object as? UIScreen) {
return screen.coordinateSpace
} else {
var result: UIView? = self.view.superview
while result != nil && result?.frame != UIScreen.main.bounds {
result = result?.superview
}
return result
}
}()
let keyboardEndFrameConverted: CGRect = fromCoordinateSpace?.convert(keyboardEndFrame, to: self.view) ?? keyboardEndFrame
guard keyboardEndFrameConverted != lastKnownKeyboardFrame else { return }
self.lastKnownKeyboardFrame = keyboardEndFrameConverted

@ -141,6 +141,10 @@ final class InputView: UIView, InputViewButtonDelegate, InputTextViewDelegate, M
}()
private lazy var additionalContentContainer = UIView()
public var isInputFirstResponder: Bool {
inputTextView.isFirstResponder
}
// MARK: - Initialization
@ -446,6 +450,10 @@ final class InputView: UIView, InputViewButtonDelegate, InputTextViewDelegate, M
override func resignFirstResponder() -> Bool {
inputTextView.resignFirstResponder()
}
override func becomeFirstResponder() -> Bool {
inputTextView.becomeFirstResponder()
}
func handleLongPress(_ gestureRecognizer: UITapGestureRecognizer) {
// Not relevant in this case

@ -169,7 +169,7 @@ final class CallMessageCell: MessageCell {
!Storage.shared[.areCallsEnabled]
) || (
messageInfo.state == .permissionDeniedMicrophone &&
AVAudioSession.sharedInstance().recordPermission != .granted
Permissions.microphone != .granted
)
)
infoImageViewWidthConstraint.constant = (shouldShowInfoIcon ? CallMessageCell.iconSize : 0)
@ -230,7 +230,7 @@ final class CallMessageCell: MessageCell {
!Storage.shared[.areCallsEnabled]
) || (
messageInfo.state == .permissionDeniedMicrophone &&
AVAudioSession.sharedInstance().recordPermission != .granted
Permissions.microphone != .granted
)
else { return }

@ -109,13 +109,14 @@ class MessageRequestFooterView: UIView {
self.onDecline = onDecline
self.themeBackgroundColor = .backgroundPrimary
setupLayout()
update(
threadVariant: threadVariant,
canWrite: canWrite,
threadIsMessageRequest: threadIsMessageRequest,
threadRequiresApproval: threadRequiresApproval
)
setupLayout()
}
required init?(coder: NSCoder) {

@ -277,6 +277,9 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
Log.info("[AppDelegate] Setting 'isMainAppActive' to false.")
UserDefaults.sharedLokiProject?[.isMainAppActive] = false
Log.info("[AppDelegate] Setting 'lastSeenHasMicrophonePermission'.")
UserDefaults.sharedLokiProject?[.lastSeenHasMicrophonePermission] = (Permissions.microphone == .granted)
Log.flush()
}
@ -901,8 +904,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
conversationVC.viewModel.threadData.threadId == call.sessionId
{
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
}
presentingVC.present(callVC, animated: true, completion: nil)

@ -8,7 +8,7 @@ import SessionUIKit
import SessionUtilitiesKit
import SessionMessagingKit
public enum Permissions {
extension Permissions {
@discardableResult public static func requestCameraPermissionIfNeeded(
presentingViewController: UIViewController? = nil,
onAuthorized: (() -> Void)? = nil
@ -57,40 +57,56 @@ public enum Permissions {
presentingViewController: UIViewController? = nil,
onNotGranted: (() -> Void)? = nil
) {
switch AVAudioSession.sharedInstance().recordPermission {
case .granted: break
case .denied:
guard
Singleton.hasAppContext,
let presentingViewController: UIViewController = (presentingViewController ?? Singleton.appContext.frontmostViewController)
else { return }
onNotGranted?()
let confirmationModal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: "permissionsRequired".localized(),
body: .text(
"permissionsMicrophoneAccessRequiredIos"
.put(key: "app_name", value: Constants.app_name)
.localized()
),
confirmTitle: "sessionSettings".localized(),
dismissOnConfirm: false,
onConfirm: { [weak presentingViewController] _ in
presentingViewController?.dismiss(animated: true, completion: {
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!)
})
},
afterClosed: { onNotGranted?() }
)
let handlePermissionDenied: () -> Void = {
guard
Singleton.hasAppContext,
let presentingViewController: UIViewController = (presentingViewController ?? Singleton.appContext.frontmostViewController)
else { return }
onNotGranted?()
let confirmationModal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: "permissionsRequired".localized(),
body: .text(
"permissionsMicrophoneAccessRequiredIos"
.put(key: "app_name", value: Constants.app_name)
.localized()
),
confirmTitle: "sessionSettings".localized(),
dismissOnConfirm: false,
onConfirm: { [weak presentingViewController] _ in
presentingViewController?.dismiss(animated: true, completion: {
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!)
})
},
afterClosed: { onNotGranted?() }
)
presentingViewController.present(confirmationModal, animated: true, completion: nil)
case .undetermined:
onNotGranted?()
AVAudioSession.sharedInstance().requestRecordPermission { _ in }
default: break
)
presentingViewController.present(confirmationModal, animated: true, completion: nil)
}
if #available(iOS 17.0, *) {
switch AVAudioApplication.shared.recordPermission {
case .granted: break
case .denied: handlePermissionDenied()
case .undetermined:
onNotGranted?()
AVAudioApplication.requestRecordPermission { granted in
UserDefaults.sharedLokiProject?[.lastSeenHasMicrophonePermission] = granted
}
default: break
}
} else {
switch AVAudioSession.sharedInstance().recordPermission {
case .granted: break
case .denied: handlePermissionDenied()
case .undetermined:
onNotGranted?()
AVAudioSession.sharedInstance().requestRecordPermission { granted in
UserDefaults.sharedLokiProject?[.lastSeenHasMicrophonePermission] = granted
}
default: break
}
}
}

@ -23,6 +23,7 @@ public enum MessageReceiverError: LocalizedError {
case invalidConfigMessageHandling
case requiredThreadNotInConfig
case outdatedMessage
case duplicatedCall
public var isRetryable: Bool {
switch self {
@ -72,6 +73,7 @@ public enum MessageReceiverError: LocalizedError {
case .invalidConfigMessageHandling: return "Invalid handling of a config message."
case .requiredThreadNotInConfig: return "Required thread not in config."
case .outdatedMessage: return "Message was sent before a config change which would have removed the message."
case .duplicatedCall: return "Duplicate call."
}
}
}

@ -81,10 +81,11 @@ extension MessageReceiver {
return
}
let hasMicrophonePermission: Bool = (AVAudioSession.sharedInstance().recordPermission == .granted)
guard db[.areCallsEnabled] && hasMicrophonePermission else {
guard db[.areCallsEnabled] && Permissions.microphone == .granted else {
let state: CallMessage.MessageInfo.State = (db[.areCallsEnabled] ? .permissionDeniedMicrophone : .permissionDenied)
SNLog("[MessageReceiver+Calls] Microphone permission is \(AVAudioSession.sharedInstance().recordPermission)")
if let interaction: Interaction = try MessageReceiver.insertCallInfoMessage(db, for: message, state: state, using: dependencies) {
let thread: SessionThread = try SessionThread.upsert(
db,
@ -271,12 +272,15 @@ extension MessageReceiver {
state: CallMessage.MessageInfo.State? = nil,
using dependencies: Dependencies
) throws -> Interaction? {
guard
(try? Interaction
guard (
try? Interaction
.filter(Interaction.Columns.variant == Interaction.Variant.infoCall)
.filter(Interaction.Columns.messageUuid == message.uuid)
.isEmpty(db))
.defaulting(to: false),
.isEmpty(db)
).defaulting(to: false)
else { throw MessageReceiverError.duplicatedCall }
guard
let sender: String = message.sender,
let thread: SessionThread = try SessionThread.fetchOne(db, id: sender),
!thread.isMessageRequest(db)

@ -403,6 +403,13 @@ public enum MessageReceiver {
}
fallthrough
case is CallMessage:
if threadId == getUserHexEncodedPublicKey(db, using: dependencies) {
break
} else {
fallthrough
}
default:
// Only update the `shouldBeVisible` flag if the thread is currently not visible

@ -176,16 +176,17 @@ public class NSENotificationPresenter: NSObject, NotificationsProtocol {
let senderName: String = Profile.displayName(db, id: interaction.authorId, threadVariant: thread.variant)
if messageInfo.state == .permissionDenied {
notificationContent.body = "callsYouMissedCallPermissions"
.put(key: "name", value: senderName)
.localizedDeformatted()
}
else if messageInfo.state == .permissionDeniedMicrophone {
notificationContent.body = String(
format: "callsMissedCallFrom".localized(),
senderName
)
switch messageInfo.state {
case .permissionDenied:
notificationContent.body = "callsYouMissedCallPermissions"
.put(key: "name", value: senderName)
.localizedDeformatted()
case .permissionDeniedMicrophone:
notificationContent.body = "callsMissedCallFrom"
.put(key: "name", value: senderName)
.localizedDeformatted()
default:
break
}
addNotifcationRequest(

@ -104,6 +104,15 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
(UserDefaults.sharedLokiProject?[.lastCallPreOffer]) != nil
)
let hasMicrophonePermission: Bool = {
return switch Permissions.microphone {
case .undetermined:
(UserDefaults.sharedLokiProject?[.lastSeenHasMicrophonePermission]).defaulting(to: false)
default:
Permissions.microphone == .granted
}
}()
// HACK: It is important to use write synchronously here to avoid a race condition
// where the completeSilenty() is called before the local notification request
// is added to notification center
@ -146,6 +155,7 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
using: dependencies
)
// FIXME: Do we need to call it here? It does nothing other than log what kind of message we received
try MessageReceiver.handleCallMessage(
db,
threadId: threadId,
@ -158,7 +168,6 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
throw NotificationError.ignorableMessage
}
let hasMicrophonePermission: Bool = (AVAudioSession.sharedInstance().recordPermission == .granted)
switch ((db[.areCallsEnabled] && hasMicrophonePermission), isCallOngoing) {
case (false, _):
if

@ -34,6 +34,7 @@ public enum SNUserDefaults {
case wasUnlinked
case isMainAppActive
case isCallOngoing
case lastSeenHasMicrophonePermission
}
public enum Date: Swift.String {

@ -0,0 +1,39 @@
// Copyright © 2025 Rangeproof Pty Ltd. All rights reserved.
import AVFAudio
public enum Permissions {
public enum MicrophonePermisson {
case denied
case granted
case undetermined
case unknown
}
public static var microphone: MicrophonePermisson {
if #available(iOSApplicationExtension 17.0, *) {
switch AVAudioApplication.shared.recordPermission {
case .undetermined:
return .undetermined
case .denied:
return .denied
case .granted:
return .granted
@unknown default:
return .unknown
}
} else {
switch AVAudioSession.sharedInstance().recordPermission {
case .undetermined:
return .undetermined
case .denied:
return .denied
case .granted:
return .granted
@unknown default:
return .unknown
}
}
}
}
Loading…
Cancel
Save