Merge remote-tracking branch 'upstream/dev' into feature/groups-rebuild

# Conflicts:
#	Session.xcodeproj/project.pbxproj
#	Session/Calls/CallVC.swift
#	Session/Calls/WebRTC/WebRTCSession+MessageHandling.swift
#	Session/Calls/WebRTC/WebRTCSession.swift
#	Session/Meta/AppDelegate.swift
#	Session/Utilities/Permissions.swift
#	SessionMessagingKit/Sending & Receiving/Errors/MessageReceiverError.swift
#	SessionMessagingKit/Sending & Receiving/MessageReceiver.swift
#	SessionUtilitiesKit/General/SNUserDefaults.swift
pull/894/head
Morgan Pretty 2 months ago
commit 1db80dfa13

@ -181,6 +181,8 @@
9473386E2BDF5F3E00B9E169 /* InfoPlist.xcstrings in Resources */ = {isa = PBXBuildFile; fileRef = 9473386D2BDF5F3E00B9E169 /* InfoPlist.xcstrings */; };
947AD6902C8968FF000B2730 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 947AD68F2C8968FF000B2730 /* Constants.swift */; };
94B3DC172AF8592200C88531 /* QuoteView_SwiftUI.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94B3DC162AF8592200C88531 /* QuoteView_SwiftUI.swift */; };
94C58AC92D2E037200609195 /* Permissions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94C58AC82D2E036E00609195 /* Permissions.swift */; };
94C5DCB02BE88170003AA8C5 /* BezierPathView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94C5DCAF2BE88170003AA8C5 /* BezierPathView.swift */; };
94E9BC0D2C7BFBDA006984EA /* Localization+Style.swift in Sources */ = {isa = PBXBuildFile; fileRef = 94E9BC0C2C7BFBDA006984EA /* Localization+Style.swift */; };
A11CD70D17FA230600A2D1B1 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A11CD70C17FA230600A2D1B1 /* QuartzCore.framework */; };
A163E8AB16F3F6AA0094D68B /* Security.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A163E8AA16F3F6A90094D68B /* Security.framework */; };
@ -1451,6 +1453,8 @@
9473386D2BDF5F3E00B9E169 /* InfoPlist.xcstrings */ = {isa = PBXFileReference; lastKnownFileType = text.json.xcstrings; path = InfoPlist.xcstrings; sourceTree = "<group>"; };
947AD68F2C8968FF000B2730 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = "<group>"; };
94B3DC162AF8592200C88531 /* QuoteView_SwiftUI.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = QuoteView_SwiftUI.swift; sourceTree = "<group>"; };
94C58AC82D2E036E00609195 /* Permissions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Permissions.swift; sourceTree = "<group>"; };
94C5DCAF2BE88170003AA8C5 /* BezierPathView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BezierPathView.swift; sourceTree = "<group>"; };
94E9BC0C2C7BFBDA006984EA /* Localization+Style.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Localization+Style.swift"; sourceTree = "<group>"; };
A11CD70C17FA230600A2D1B1 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
A163E8AA16F3F6A90094D68B /* Security.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Security.framework; path = System/Library/Frameworks/Security.framework; sourceTree = SDKROOT; };
@ -3716,6 +3720,12 @@
FD09796527F6B0A800936362 /* Utilities */ = {
isa = PBXGroup;
children = (
94C58AC82D2E036E00609195 /* Permissions.swift */,
FD6A39422C2AD81600762359 /* BackgroundTaskManager.swift */,
FDFBB74A2A1EFF4900CA7350 /* Bencode.swift */,
FD6A39162C2A99A000762359 /* BencodeDecoder.swift */,
FD6A39182C2A99AB00762359 /* BencodeEncoder.swift */,
FD6A391A2C2A99B600762359 /* BencodeResponse.swift */,
FD97B23F2A3FEB050027DD57 /* ARC4RandomNumberGenerator.swift */,
FD7443452D07CA9F00862443 /* CGFloat+Utilities.swift */,
FD7443462D07CA9F00862443 /* CGPoint+Utilities.swift */,
@ -6015,6 +6025,7 @@
FD848B9328420164000E298B /* UnicodeScalar+Utilities.swift in Sources */,
FDE754CE2C9BAF37002A2623 /* ImageFormat.swift in Sources */,
FDE7551A2C9BC169002A2623 /* UIApplicationState+Utilities.swift in Sources */,
94C58AC92D2E037200609195 /* Permissions.swift in Sources */,
FD09796B27F6C67500936362 /* Failable.swift in Sources */,
FD7115FA28C8153400B47552 /* UIBarButtonItem+Combine.swift in Sources */,
FD705A92278D051200F16121 /* ReusableView.swift in Sources */,
@ -8055,6 +8066,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 528;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -8092,6 +8104,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 2.8.7;
OTHER_LDFLAGS = "$(inherited)";
OTHER_SWIFT_FLAGS = "$(inherited) \"-D\" \"COCOAPODS\" \"-DDEBUG\"";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
@ -8121,6 +8134,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 528;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -8158,6 +8172,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 2.8.7;
OTHER_LDFLAGS = "$(inherited)";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
PRODUCT_NAME = Session;

@ -87,6 +87,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
didSet {
stateDidChange?()
hasConnectedDidChange?()
updateCallDetailedStatus?("Call Connected")
}
}
@ -94,6 +95,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
didSet {
stateDidChange?()
hasEndedDidChange?()
updateCallDetailedStatus?("")
}
}
@ -113,6 +115,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
var remoteVideoStateDidChange: ((Bool) -> Void)?
var hasStartedReconnecting: (() -> Void)?
var hasReconnected: (() -> Void)?
var updateCallDetailedStatus: ((String) -> Void)?
// MARK: - Derived Properties
@ -250,6 +253,8 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
self.callInteractionId = interaction?.id
self.updateCallDetailedStatus?("Creating Call")
try? webRTCSession
.sendPreOffer(
db,
@ -257,10 +262,27 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
interactionId: interaction?.id,
in: thread
)
.retry(5)
// Start the timeout timer for the call
.handleEvents(receiveOutput: { [weak self] _ in self?.setupTimeoutTimer() })
.flatMap { _ in webRTCSession.sendOffer(to: thread) }
.sinkUntilComplete()
.flatMap { [weak self] _ in
self?.updateCallDetailedStatus?("Sending Call Offer")
return webRTCSession
.sendOffer(to: thread)
.retry(5)
}
.sinkUntilComplete(
receiveCompletion: { [weak self] result in
switch result {
case .finished:
SNLog("[Calls] Offer message sent")
self?.updateCallDetailedStatus?("Sending Connection Candidates")
case .failure(let error):
SNLog("[Calls] Error initializing call after 5 retries: \(error), ending call...")
self?.handleCallInitializationFailed()
}
}
)
}
func answerSessionCall() {
@ -270,6 +292,7 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
if let sdp = remoteSDP {
SNLog("[Calls] Got remote sdp already")
self.updateCallDetailedStatus?("Answering Call")
webRTCSession.handleRemoteSDP(sdp, from: sessionId) // This sends an answer message internally
}
}
@ -293,6 +316,11 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
hasEnded = true
}
func handleCallInitializationFailed() {
self.endSessionCall()
Singleton.callManager.reportCurrentCallEnded(reason: nil)
}
// MARK: - Call Message Handling
public func updateCallMessage(mode: EndCallMode, using dependencies: Dependencies) {
@ -404,6 +432,18 @@ public final class SessionCall: CurrentCallProtocol, WebRTCSessionDelegate {
isRemoteVideoEnabled = isEnabled
}
public func iceCandidateDidSend() {
DispatchQueue.main.async {
self.updateCallDetailedStatus?("Awaiting Recipient Answer...")
}
}
public func iceCandidateDidReceive() {
DispatchQueue.main.async {
self.updateCallDetailedStatus?("Handling Connection Candidates")
}
}
public func didReceiveHangUpSignal() {
self.hasEnded = true
DispatchQueue.main.async { [dependencies] in

@ -29,8 +29,8 @@ extension SessionCallManager {
if let conversationVC = presentingVC as? ConversationVC {
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
}
presentingVC.present(callVC, animated: true) {

@ -243,6 +243,7 @@ public final class SessionCallManager: NSObject, CallManagerProtocol {
{
let callVC = CallVC(for: call, using: dependencies)
callVC.conversationVC = conversationVC
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
presentingVC.present(callVC, animated: true, completion: nil)
}

@ -316,13 +316,30 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
result.themeTextColor = .textPrimary
result.textAlignment = .center
result.isHidden = call.hasConnected
if call.hasStartedConnecting { result.text = "callsConnecting".localized() }
return result
}()
private lazy var callDetailedInfoLabel: UILabel = {
let result: UILabel = UILabel()
result.font = .boldSystemFont(ofSize: Values.smallFontSize)
result.themeTextColor = .textPrimary
result.textAlignment = .center
return result
}()
private lazy var callInfoLabelStackView: UIStackView = {
let result: UIStackView = UIStackView(arrangedSubviews: [callInfoLabel, callDetailedInfoLabel])
result.axis = .vertical
result.spacing = Values.mediumSpacing
result.isHidden = call.hasConnected
return result
}()
private lazy var callDurationLabel: UILabel = {
let result = UILabel()
result.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
@ -354,11 +371,11 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
remoteVideoView.alpha = isEnabled ? 1 : 0
}
if self.callInfoLabel.alpha < 0.5 {
if self.callInfoLabelStackView.alpha < 0.5 {
UIView.animate(withDuration: 0.25) {
self.operationPanel.alpha = 1
self.responsePanel.alpha = 1
self.callInfoLabel.alpha = 1
self.callInfoLabelStackView.alpha = 1
}
}
}
@ -391,7 +408,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
self?.durationTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
self?.updateDuration()
}
self?.callInfoLabel.isHidden = true
self?.callInfoLabelStackView.isHidden = true
self?.callDurationLabel.isHidden = false
}
}
@ -406,7 +423,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
self.call.hasStartedReconnecting = { [weak self] in
DispatchQueue.main.async {
self?.callInfoLabel.isHidden = false
self?.callInfoLabelStackView.isHidden = false
self?.callDurationLabel.isHidden = true
self?.callInfoLabel.text = "callsReconnecting".localized()
}
@ -414,10 +431,16 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
self.call.hasReconnected = { [weak self] in
DispatchQueue.main.async {
self?.callInfoLabel.isHidden = true
self?.callInfoLabelStackView.isHidden = true
self?.callDurationLabel.isHidden = false
}
}
self.call.updateCallDetailedStatus = { [weak self] status in
DispatchQueue.main.async {
self?.callDetailedInfoLabel.text = status
}
}
}
required init(coder: NSCoder) { preconditionFailure("Use init(for:) instead.") }
@ -514,10 +537,10 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
callInfoLabelContainer.pin(.top, to: .bottom, of: profilePictureView)
callInfoLabelContainer.pin(.bottom, to: .bottom, of: profilePictureContainer)
callInfoLabelContainer.pin([ UIView.HorizontalEdge.left, UIView.HorizontalEdge.right ], to: view)
callInfoLabelContainer.addSubview(callInfoLabel)
callInfoLabelContainer.addSubview(callInfoLabelStackView)
callInfoLabelContainer.addSubview(callDurationLabel)
callInfoLabel.translatesAutoresizingMaskIntoConstraints = false
callInfoLabel.center(in: callInfoLabelContainer)
callInfoLabelStackView.translatesAutoresizingMaskIntoConstraints = false
callInfoLabelStackView.center(in: callInfoLabelContainer)
callDurationLabel.translatesAutoresizingMaskIntoConstraints = false
callDurationLabel.center(in: callInfoLabelContainer)
}
@ -588,7 +611,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
func handleEndCallMessage() {
Log.info(.calls, "Ending call.")
self.callInfoLabel.isHidden = false
self.callInfoLabelStackView.isHidden = false
self.callDurationLabel.isHidden = true
self.callInfoLabel.text = "callsEnded".localized()
@ -597,7 +620,7 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
remoteVideoView.alpha = 0
self.operationPanel.alpha = 1
self.responsePanel.alpha = 1
self.callInfoLabel.alpha = 1
self.callInfoLabelStackView.alpha = 1
}
Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { [weak self] _ in

@ -223,8 +223,8 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
let callVC = CallVC(for: self.call, using: dependencies)
if let conversationVC = (presentingVC as? TopBannerController)?.wrappedViewController() as? ConversationVC {
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
}
presentingVC.present(callVC, animated: true) { [weak self] in

@ -10,6 +10,7 @@ extension WebRTCSession {
public func handleICECandidates(_ candidate: [RTCIceCandidate]) {
Log.info(.calls, "Received ICE candidate message.")
self.delegate?.iceCandidateDidReceive()
candidate.forEach { peerConnection?.add($0, completionHandler: { _ in }) }
}
@ -23,7 +24,9 @@ extension WebRTCSession {
else {
guard sdp.type == .offer else { return }
self?.sendAnswer(to: sessionId).sinkUntilComplete()
self?.sendAnswer(to: sessionId)
.retry(5)
.sinkUntilComplete()
}
})
}

@ -13,6 +13,8 @@ public protocol WebRTCSessionDelegate: AnyObject {
func webRTCIsConnected()
func isRemoteVideoDidChange(isEnabled: Bool)
func iceCandidateDidSend()
func iceCandidateDidReceive()
func dataChannelDidOpen()
func didReceiveHangUpSignal()
func reconnectIfNeeded()
@ -340,8 +342,22 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
)
}
.subscribe(on: DispatchQueue.global(qos: .userInitiated))
.flatMap { [dependencies] preparedRequest in preparedRequest.send(using: dependencies) }
.sinkUntilComplete()
.flatMap { [dependencies] preparedRequest in
preparedRequest
.send(using: dependencies)
.retry(5)
}
.sinkUntilComplete(
receiveCompletion: { [weak self] result in
switch result {
case .finished:
Log.info(.calls, "ICE candidates sent")
self?.delegate?.iceCandidateDidSend()
case .failure(let error):
Log.error(.calls, "Error sending ICE candidates due to error: \(error)")
}
}
)
}
public func endCall(
@ -374,6 +390,7 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
)
.send(using: dependencies)
.subscribe(on: DispatchQueue.global(qos: .userInitiated), using: dependencies)
.retry(5)
.sinkUntilComplete()
}

@ -162,7 +162,7 @@ extension ConversationVC:
let threadId: String = self.viewModel.threadData.threadId
guard
AVAudioSession.sharedInstance().recordPermission == .granted,
Permissions.microphone == .granted,
self.viewModel.threadData.threadVariant == .contact,
viewModel.dependencies[singleton: .callManager].currentCall == nil,
let call: SessionCall = viewModel.dependencies[singleton: .storage]
@ -361,7 +361,7 @@ extension ConversationVC:
Permissions.requestMicrophonePermissionIfNeeded(using: viewModel.dependencies)
if AVAudioSession.sharedInstance().recordPermission != .granted {
if Permissions.microphone != .granted {
SNLog("Proceeding without microphone access. Any recorded video will be silent.")
}
@ -870,6 +870,8 @@ extension ConversationVC:
}
return
}
self.isKeyboardVisible = self.snInputView.isInputFirstResponder
self.inputAccessoryView?.resignFirstResponder()
self.inputAccessoryView?.isHidden = true
self.inputAccessoryView?.alpha = 0
}
@ -884,6 +886,9 @@ extension ConversationVC:
UIView.animate(withDuration: 0.25, animations: {
self.inputAccessoryView?.isHidden = false
self.inputAccessoryView?.alpha = 1
if self.isKeyboardVisible {
self.inputAccessoryView?.becomeFirstResponder()
}
})
}
@ -2314,7 +2319,7 @@ extension ConversationVC:
// Keep screen on
UIApplication.shared.isIdleTimerDisabled = false
guard AVAudioSession.sharedInstance().recordPermission == .granted else { return }
guard Permissions.microphone == .granted else { return }
// Cancel any current audio playback
self.viewModel.stopAudio()

@ -23,6 +23,7 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
private var isAutoLoadingNextPage: Bool = false
private var isLoadingMore: Bool = false
var isReplacingThread: Bool = false
var isKeyboardVisible: Bool = false
/// This flag indicates whether the thread data has been reloaded after a disappearance (it defaults to true as it will
/// never have disappeared before - this is only needed for value observers since they run asynchronously)
@ -528,6 +529,8 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
stopObservingChanges()
viewModel.updateDraft(to: snInputView.text)
inputAccessoryView?.resignFirstResponder()
NotificationCenter.default.removeObserver(self)
}
override func viewDidDisappear(_ animated: Bool) {
@ -1363,7 +1366,21 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
}
// No nothing if there was no change
let keyboardEndFrameConverted: CGRect = self.view.convert(keyboardEndFrame, from: nil)
// Note: there is a bug on iOS 15.X for iPhone 6/6s where the converted frame is not accurate.
// In iOS 16.1 and later, the keyboard notification object is the screen the keyboard appears on.
// This is a workaround to fix the issue
let fromCoordinateSpace: UICoordinateSpace? = {
if let screen = (notification.object as? UIScreen) {
return screen.coordinateSpace
} else {
var result: UIView? = self.view.superview
while result != nil && result?.frame != UIScreen.main.bounds {
result = result?.superview
}
return result
}
}()
let keyboardEndFrameConverted: CGRect = fromCoordinateSpace?.convert(keyboardEndFrame, to: self.view) ?? keyboardEndFrame
guard keyboardEndFrameConverted != lastKnownKeyboardFrame else { return }
self.lastKnownKeyboardFrame = keyboardEndFrameConverted

@ -150,6 +150,10 @@ final class InputView: UIView, InputViewButtonDelegate, InputTextViewDelegate, M
}()
private lazy var additionalContentContainer = UIView()
public var isInputFirstResponder: Bool {
inputTextView.isFirstResponder
}
// MARK: - Initialization
@ -468,6 +472,10 @@ final class InputView: UIView, InputViewButtonDelegate, InputTextViewDelegate, M
override func resignFirstResponder() -> Bool {
inputTextView.resignFirstResponder()
}
override func becomeFirstResponder() -> Bool {
inputTextView.becomeFirstResponder()
}
func handleLongPress(_ gestureRecognizer: UITapGestureRecognizer) {
// Not relevant in this case

@ -171,7 +171,7 @@ final class CallMessageCell: MessageCell {
!dependencies[singleton: .storage, key: .areCallsEnabled]
) || (
messageInfo.state == .permissionDeniedMicrophone &&
AVAudioSession.sharedInstance().recordPermission != .granted
Permissions.microphone != .granted
)
)
infoImageViewWidthConstraint.constant = (shouldShowInfoIcon ? CallMessageCell.iconSize : 0)
@ -234,7 +234,7 @@ final class CallMessageCell: MessageCell {
!dependencies[singleton: .storage, key: .areCallsEnabled]
) || (
messageInfo.state == .permissionDeniedMicrophone &&
AVAudioSession.sharedInstance().recordPermission != .granted
Permissions.microphone != .granted
)
else { return }

@ -111,6 +111,8 @@ class MessageRequestFooterView: UIView {
self.onDecline = onDecline
self.themeBackgroundColor = .backgroundPrimary
setupLayout()
update(
threadVariant: threadVariant,
canWrite: canWrite,
@ -118,7 +120,6 @@ class MessageRequestFooterView: UIView {
threadRequiresApproval: threadRequiresApproval,
closedGroupAdminProfile: closedGroupAdminProfile
)
setupLayout()
}
required init?(coder: NSCoder) {

@ -299,6 +299,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
Log.info(.cat, "Setting 'isMainAppActive' to false.")
dependencies[defaults: .appGroup, key: .isMainAppActive] = false
Log.info(.cat, "Setting 'lastSeenHasMicrophonePermission'.")
dependencies[defaults: .appGroup, key: .lastSeenHasMicrophonePermission] = (Permissions.microphone == .granted)
Log.flush()
}
@ -937,8 +939,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
conversationVC.viewModel.threadData.threadId == call.sessionId
{
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
conversationVC.resignFirstResponder()
conversationVC.hideInputAccessoryView()
}
presentingVC.present(callVC, animated: true, completion: nil)

@ -8,7 +8,7 @@ import SessionUIKit
import SessionUtilitiesKit
import SessionMessagingKit
public enum Permissions {
extension Permissions {
@discardableResult public static func requestCameraPermissionIfNeeded(
presentingViewController: UIViewController? = nil,
using dependencies: Dependencies,
@ -58,39 +58,55 @@ public enum Permissions {
using dependencies: Dependencies,
onNotGranted: (() -> Void)? = nil
) {
switch AVAudioSession.sharedInstance().recordPermission {
case .granted: break
case .denied:
guard
let presentingViewController: UIViewController = (presentingViewController ?? dependencies[singleton: .appContext].frontMostViewController)
else { return }
onNotGranted?()
let confirmationModal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: "permissionsRequired".localized(),
body: .text(
"permissionsMicrophoneAccessRequiredIos"
.put(key: "app_name", value: Constants.app_name)
.localized()
),
confirmTitle: "sessionSettings".localized(),
dismissOnConfirm: false,
onConfirm: { [weak presentingViewController] _ in
presentingViewController?.dismiss(animated: true, completion: {
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!)
})
},
afterClosed: { onNotGranted?() }
)
let handlePermissionDenied: () -> Void = {
guard
let presentingViewController: UIViewController = (presentingViewController ?? dependencies[singleton: .appContext].frontMostViewController)
else { return }
onNotGranted?()
let confirmationModal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: "permissionsRequired".localized(),
body: .text(
"permissionsMicrophoneAccessRequiredIos"
.put(key: "app_name", value: Constants.app_name)
.localized()
),
confirmTitle: "sessionSettings".localized(),
dismissOnConfirm: false,
onConfirm: { [weak presentingViewController] _ in
presentingViewController?.dismiss(animated: true, completion: {
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!)
})
},
afterClosed: { onNotGranted?() }
)
presentingViewController.present(confirmationModal, animated: true, completion: nil)
case .undetermined:
onNotGranted?()
AVAudioSession.sharedInstance().requestRecordPermission { _ in }
default: break
)
presentingViewController.present(confirmationModal, animated: true, completion: nil)
}
if #available(iOS 17.0, *) {
switch AVAudioApplication.shared.recordPermission {
case .granted: break
case .denied: handlePermissionDenied()
case .undetermined:
onNotGranted?()
AVAudioApplication.requestRecordPermission { granted in
dependencies[defaults: .appGroup, key: .lastSeenHasMicrophonePermission] = granted
}
default: break
}
} else {
switch AVAudioSession.sharedInstance().recordPermission {
case .granted: break
case .denied: handlePermissionDenied()
case .undetermined:
onNotGranted?()
AVAudioSession.sharedInstance().requestRecordPermission { granted in
dependencies[defaults: .appGroup, key: .lastSeenHasMicrophonePermission] = granted
}
default: break
}
}
}

@ -24,6 +24,7 @@ public enum MessageReceiverError: LocalizedError {
case invalidConfigMessageHandling
case requiredThreadNotInConfig
case outdatedMessage
case duplicatedCall
case missingRequiredAdminPrivileges
public var isRetryable: Bool {
@ -76,6 +77,7 @@ public enum MessageReceiverError: LocalizedError {
case .invalidConfigMessageHandling: return "Invalid handling of a config message."
case .requiredThreadNotInConfig: return "Required thread not in config."
case .outdatedMessage: return "Message was sent before a config change which would have removed the message."
case .duplicatedCall: return "Duplicate call."
case .missingRequiredAdminPrivileges: return "Handling this message requires admin privileges which the current user does not have."
}
}

@ -91,10 +91,11 @@ extension MessageReceiver {
return
}
let hasMicrophonePermission: Bool = (AVAudioSession.sharedInstance().recordPermission == .granted)
guard db[.areCallsEnabled] && hasMicrophonePermission else {
guard db[.areCallsEnabled] && Permissions.microphone == .granted else {
let state: CallMessage.MessageInfo.State = (db[.areCallsEnabled] ? .permissionDeniedMicrophone : .permissionDenied)
SNLog("[MessageReceiver+Calls] Microphone permission is \(AVAudioSession.sharedInstance().recordPermission)")
if let interaction: Interaction = try MessageReceiver.insertCallInfoMessage(db, for: message, state: state, using: dependencies) {
let thread: SessionThread = try SessionThread.upsert(
db,
@ -293,12 +294,15 @@ extension MessageReceiver {
state: CallMessage.MessageInfo.State? = nil,
using dependencies: Dependencies
) throws -> Interaction? {
guard
(try? Interaction
guard (
try? Interaction
.filter(Interaction.Columns.variant == Interaction.Variant.infoCall)
.filter(Interaction.Columns.messageUuid == message.uuid)
.isEmpty(db))
.defaulting(to: false),
.isEmpty(db)
).defaulting(to: false)
else { throw MessageReceiverError.duplicatedCall }
guard
let sender: String = message.sender,
!SessionThread.isMessageRequest(
db,

@ -466,6 +466,7 @@ public enum MessageReceiver {
case is ReadReceipt: return true
case is TypingIndicator: return true
case is UnsendRequest: return true
case is CallMessage: return (threadId != dependencies[cache: .general].sessionId.hexString)
case let message as ClosedGroupControlMessage:
// Only re-show a legacy group conversation if we are going to add a control text message
@ -484,7 +485,7 @@ public enum MessageReceiver {
is GroupUpdateMemberLeftMessage, is GroupUpdateMemberLeftNotificationMessage,
is GroupUpdateInviteResponseMessage, is GroupUpdateDeleteMemberContentMessage:
return false
/// Currently this is just for handling the `groupKicked` message which is sent to a group so the same rules as above apply
case is LibSessionMessage: return false

@ -207,16 +207,17 @@ public class NSENotificationPresenter: NotificationsManagerType {
let senderName: String = Profile.displayName(db, id: interaction.authorId, threadVariant: thread.variant, using: dependencies)
if messageInfo.state == .permissionDenied {
notificationContent.body = "callsYouMissedCallPermissions"
.put(key: "name", value: senderName)
.localizedDeformatted()
}
else if messageInfo.state == .permissionDeniedMicrophone {
notificationContent.body = String(
format: "callsMissedCallFrom".localized(),
senderName
)
switch messageInfo.state {
case .permissionDenied:
notificationContent.body = "callsYouMissedCallPermissions"
.put(key: "name", value: senderName)
.localizedDeformatted()
case .permissionDeniedMicrophone:
notificationContent.body = "callsMissedCallFrom"
.put(key: "name", value: senderName)
.localizedDeformatted()
default:
break
}
addNotifcationRequest(

@ -107,6 +107,15 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
(dependencies[defaults: .appGroup, key: .lastCallPreOffer] != nil)
)
let hasMicrophonePermission: Bool = {
return switch Permissions.microphone {
case .undetermined:
(UserDefaults.sharedLokiProject?[.lastSeenHasMicrophonePermission]).defaulting(to: false)
default:
Permissions.microphone == .granted
}
}()
// HACK: It is important to use write synchronously here to avoid a race condition
// where the completeSilenty() is called before the local notification request
// is added to notification center
@ -153,6 +162,7 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
using: dependencies
)
// FIXME: Do we need to call it here? It does nothing other than log what kind of message we received
try MessageReceiver.handleCallMessage(
db,
threadId: threadId,
@ -165,7 +175,6 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
throw NotificationError.ignorableMessage
}
let hasMicrophonePermission: Bool = (AVAudioSession.sharedInstance().recordPermission == .granted)
switch ((db[.areCallsEnabled] && hasMicrophonePermission), isCallOngoing) {
case (false, _):
if

@ -106,6 +106,9 @@ public extension UserDefaults.BoolKey {
/// Indicates whether there is an ongoing call
static let isCallOngoing: UserDefaults.BoolKey = "isCallOngoing"
/// Indicates whether we had the microphone permission the last time the app when to the background
static let lastSeenHasMicrophonePermission: UserDefaults.BoolKey = "lastSeenHasMicrophonePermission"
}
public extension UserDefaults.DateKey {

@ -0,0 +1,39 @@
// Copyright © 2025 Rangeproof Pty Ltd. All rights reserved.
import AVFAudio
public enum Permissions {
public enum MicrophonePermisson {
case denied
case granted
case undetermined
case unknown
}
public static var microphone: MicrophonePermisson {
if #available(iOSApplicationExtension 17.0, *) {
switch AVAudioApplication.shared.recordPermission {
case .undetermined:
return .undetermined
case .denied:
return .denied
case .granted:
return .granted
@unknown default:
return .unknown
}
} else {
switch AVAudioSession.sharedInstance().recordPermission {
case .undetermined:
return .undetermined
case .denied:
return .denied
case .granted:
return .granted
@unknown default:
return .unknown
}
}
}
}
Loading…
Cancel
Save