Merge pull request #562 from RyanRory/voice-call-bug-fix-1

Call bug fix 1
pull/605/head
RyanZhao 2 years ago committed by GitHub
commit 38fc71f54a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -153,6 +153,8 @@
7BA68909272A27BE00EFC32F /* SessionCall.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BA68908272A27BE00EFC32F /* SessionCall.swift */; };
7BA6890D27325CCC00EFC32F /* SessionCallManager+CXCallController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BA6890C27325CCC00EFC32F /* SessionCallManager+CXCallController.swift */; };
7BA6890F27325CE300EFC32F /* SessionCallManager+CXProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BA6890E27325CE300EFC32F /* SessionCallManager+CXProvider.swift */; };
7BAADFCC27B0EF23007BCF92 /* CallVideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BAADFCB27B0EF23007BCF92 /* CallVideoView.swift */; };
7BAADFCE27B215FE007BCF92 /* UIView+Draggable.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BAADFCD27B215FE007BCF92 /* UIView+Draggable.swift */; };
7BAF54CE27ACCEEC003D12F8 /* Storage+RecentSearchResults.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BAF54CB27ACCEEC003D12F8 /* Storage+RecentSearchResults.swift */; };
7BAF54CF27ACCEEC003D12F8 /* GlobalSearchViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BAF54CC27ACCEEC003D12F8 /* GlobalSearchViewController.swift */; };
7BAF54D027ACCEEC003D12F8 /* EmptySearchResultCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BAF54CD27ACCEEC003D12F8 /* EmptySearchResultCell.swift */; };
@ -1161,6 +1163,8 @@
7BA68908272A27BE00EFC32F /* SessionCall.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionCall.swift; sourceTree = "<group>"; };
7BA6890C27325CCC00EFC32F /* SessionCallManager+CXCallController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "SessionCallManager+CXCallController.swift"; sourceTree = "<group>"; };
7BA6890E27325CE300EFC32F /* SessionCallManager+CXProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "SessionCallManager+CXProvider.swift"; sourceTree = "<group>"; };
7BAADFCB27B0EF23007BCF92 /* CallVideoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallVideoView.swift; sourceTree = "<group>"; };
7BAADFCD27B215FE007BCF92 /* UIView+Draggable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIView+Draggable.swift"; sourceTree = "<group>"; };
7BAF54CB27ACCEEC003D12F8 /* Storage+RecentSearchResults.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Storage+RecentSearchResults.swift"; sourceTree = "<group>"; };
7BAF54CC27ACCEEC003D12F8 /* GlobalSearchViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GlobalSearchViewController.swift; sourceTree = "<group>"; };
7BAF54CD27ACCEEC003D12F8 /* EmptySearchResultCell.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = EmptySearchResultCell.swift; sourceTree = "<group>"; };
@ -2099,6 +2103,7 @@
B83F2B87240CB75A000A54AB /* UIImage+Scaling.swift */,
C31A6C59247F214E001123EF /* UIView+Glow.swift */,
C3548F0724456AB6009433A8 /* UIView+Wrapping.swift */,
7BAADFCD27B215FE007BCF92 /* UIView+Draggable.swift */,
7BFD1A892745C4F000FB91B9 /* Permissions.swift */,
);
path = Utilities;
@ -2111,6 +2116,7 @@
7B7CB18F270FB2150079FF93 /* MiniCallView.swift */,
7B1581E727210ECC00848B49 /* RenderView.swift */,
7B0EFDF52755CC5400FFAAE7 /* CallMissedTipsModal.swift */,
7BAADFCB27B0EF23007BCF92 /* CallVideoView.swift */,
);
path = "Views & Modals";
sourceTree = "<group>";
@ -5039,6 +5045,7 @@
45F32C222057297A00A300D5 /* MediaDetailViewController.m in Sources */,
B82149C125D605C6009C0F2A /* InfoBanner.swift in Sources */,
C3DAB3242480CB2B00725F25 /* SRCopyableLabel.swift in Sources */,
7BAADFCC27B0EF23007BCF92 /* CallVideoView.swift in Sources */,
B8CCF63F23975CFB0091D419 /* JoinOpenGroupVC.swift in Sources */,
34ABC0E421DD20C500ED9469 /* ConversationMessageMapping.swift in Sources */,
B85357C323A1BD1200AAF6CD /* SeedVC.swift in Sources */,
@ -5051,6 +5058,7 @@
7B1581E827210ECC00848B49 /* RenderView.swift in Sources */,
7BC707F227290ACB002817AD /* SessionCallManager.swift in Sources */,
3441FD9F21A3604F00BB9542 /* BackupRestoreViewController.swift in Sources */,
7BAADFCE27B215FE007BCF92 /* UIView+Draggable.swift in Sources */,
45C0DC1B1E68FE9000E04C47 /* UIApplication+OWS.swift in Sources */,
4539B5861F79348F007141FF /* PushRegistrationManager.swift in Sources */,
B8041A9525C8FA1D003C2166 /* MediaLoaderView.swift in Sources */,

@ -21,7 +21,7 @@ extension SessionCallManager: CXProviderDelegate {
if let _ = CurrentAppContext().frontmostViewController() as? CallVC {
call.answerSessionCall()
} else {
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // TODO: Handle more gracefully
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
let callVC = CallVC(for: self.currentCall!)
if let conversationVC = presentingVC as? ConversationVC {
callVC.conversationVC = conversationVC

@ -92,9 +92,8 @@ public final class SessionCallManager: NSObject {
// Report the incoming call to the system
self.provider.reportNewIncomingCall(with: call.callID, update: update) { error in
guard error == nil else {
self.currentCall = nil
self.reportCurrentCallEnded(reason: .failed)
completion(error)
Logger.error("failed to report new incoming call, error: \(error!)")
return
}
completion(nil)
@ -115,7 +114,7 @@ public final class SessionCallManager: NSObject {
} else {
call.updateCallMessage(mode: .local)
}
self.currentCall?.webRTCSession.dropConnection()
call.webRTCSession.dropConnection()
self.currentCall = nil
WebRTCSession.current = nil
}

@ -7,7 +7,15 @@ extension CallVC : CameraManagerDelegate {
let rtcPixelBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer)
let timestamp = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
let timestampNs = Int64(timestamp * 1000000000)
let frame = RTCVideoFrame(buffer: rtcPixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timestampNs)
let rotation: RTCVideoRotation = {
switch UIDevice.current.orientation {
case .landscapeRight: return RTCVideoRotation._90
case .portraitUpsideDown: return RTCVideoRotation._180
case .landscapeLeft: return RTCVideoRotation._270
default: return RTCVideoRotation._0
}
}()
let frame = RTCVideoFrame(buffer: rtcPixelBuffer, rotation: rotation, timeStampNs: timestampNs)
frame.timeStamp = Int32(timestamp)
call.webRTCSession.handleLocalFrameCaptured(frame)
}

@ -20,20 +20,21 @@ final class CallVC : UIViewController, VideoPreviewDelegate {
}()
// MARK: UI Components
private lazy var localVideoView: RTCMTLVideoView = {
let result = RTCMTLVideoView()
private lazy var localVideoView: LocalVideoView = {
let result = LocalVideoView()
result.isHidden = !call.isVideoEnabled
result.contentMode = .scaleAspectFill
result.set(.width, to: 80)
result.set(.height, to: 173)
result.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(handlePanGesture)))
result.layer.cornerRadius = 10
result.layer.masksToBounds = true
result.set(.width, to: LocalVideoView.width)
result.set(.height, to: LocalVideoView.height)
result.makeViewDraggable()
return result
}()
private lazy var remoteVideoView: RTCMTLVideoView = {
let result = RTCMTLVideoView()
private lazy var remoteVideoView: RemoteVideoView = {
let result = RemoteVideoView()
result.alpha = 0
result.contentMode = .scaleAspectFill
result.backgroundColor = .black
result.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(handleRemoteVieioViewTapped)))
return result
}()
@ -255,10 +256,12 @@ final class CallVC : UIViewController, VideoPreviewDelegate {
}
}
}
setupOrientationMonitoring()
NotificationCenter.default.addObserver(self, selector: #selector(audioRouteDidChange), name: AVAudioSession.routeChangeNotification, object: nil)
}
deinit {
UIDevice.current.endGeneratingDeviceOrientationNotifications()
NotificationCenter.default.removeObserver(self)
}
@ -326,6 +329,7 @@ final class CallVC : UIViewController, VideoPreviewDelegate {
if (call.isVideoEnabled && shouldRestartCamera) { cameraManager.start() }
shouldRestartCamera = true
addLocalVideoView()
remoteVideoView.alpha = call.isRemoteVideoEnabled ? 1 : 0
}
override func viewWillDisappear(_ animated: Bool) {
@ -334,6 +338,41 @@ final class CallVC : UIViewController, VideoPreviewDelegate {
localVideoView.removeFromSuperview()
}
// MARK: - Orientation
private func setupOrientationMonitoring() {
UIDevice.current.beginGeneratingDeviceOrientationNotifications()
NotificationCenter.default.addObserver(self, selector: #selector(didChangeDeviceOrientation), name: UIDevice.orientationDidChangeNotification, object: UIDevice.current)
}
@objc func didChangeDeviceOrientation(notification: Notification) {
func rotateAllButtons(rotationAngle: CGFloat) {
let transform = CGAffineTransform(rotationAngle: rotationAngle)
UIView.animate(withDuration: 0.2) {
self.answerButton.transform = transform
self.hangUpButton.transform = transform
self.switchAudioButton.transform = transform
self.switchCameraButton.transform = transform
self.videoButton.transform = transform
self.volumeView.transform = transform
}
}
switch UIDevice.current.orientation {
case .portrait:
rotateAllButtons(rotationAngle: 0)
case .portraitUpsideDown:
rotateAllButtons(rotationAngle: .pi)
case .landscapeLeft:
rotateAllButtons(rotationAngle: .halfPi)
case .landscapeRight:
rotateAllButtons(rotationAngle: .pi + .halfPi)
default:
break
}
}
// MARK: Call signalling
func handleAnswerMessage(_ message: CallMessage) {
callInfoLabel.text = "Connecting..."
@ -471,38 +510,6 @@ final class CallVC : UIViewController, VideoPreviewDelegate {
}
}
// MARK: Pan gesture handling
@objc private func handlePanGesture(gesture: UIPanGestureRecognizer) {
let location = gesture.location(in: self.view)
if let draggedView = gesture.view {
draggedView.center = location
if gesture.state == .ended {
let sideMargin = 40 + Values.verySmallSpacing
if draggedView.frame.midX >= self.view.layer.frame.width / 2 {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.x = self.view.layer.frame.width - sideMargin
}, completion: nil)
}else{
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.x = sideMargin
}, completion: nil)
}
let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
if draggedView.frame.minY <= topMargin {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.y = topMargin + draggedView.frame.size.height / 2
}, completion: nil)
}
let bottomMargin = UIApplication.shared.keyWindow!.safeAreaInsets.bottom
if draggedView.frame.maxY >= self.view.layer.frame.height {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.y = self.view.layer.frame.height - draggedView.frame.size.height / 2 - bottomMargin
}, completion: nil)
}
}
}
}
@objc private func handleRemoteVieioViewTapped(gesture: UITapGestureRecognizer) {
let isHidden = callInfoLabel.alpha < 0.5
UIView.animate(withDuration: 0.5) {

@ -0,0 +1,83 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import WebRTC
import Foundation
// MARK: RemoteVideoView
class RemoteVideoView: RTCMTLVideoView {
override func renderFrame(_ frame: RTCVideoFrame?) {
super.renderFrame(frame)
guard let frame = frame else { return }
DispatchMainThreadSafe {
let frameRatio = Double(frame.height) / Double(frame.width)
let frameRotation = frame.rotation
let deviceRotation = UIDevice.current.orientation
var rotationOverride: RTCVideoRotation? = nil
switch deviceRotation {
case .portrait, .portraitUpsideDown:
// We don't have to do anything, the renderer will automatically make sure it's right-side-up.
break
case .landscapeLeft:
switch frameRotation {
case RTCVideoRotation._0: rotationOverride = RTCVideoRotation._90 // Landscape left
case RTCVideoRotation._90: rotationOverride = RTCVideoRotation._180 // Portrait
case RTCVideoRotation._180: rotationOverride = RTCVideoRotation._270 // Landscape right
case RTCVideoRotation._270: rotationOverride = RTCVideoRotation._0 // Portrait upside-down
default: break
}
case .landscapeRight:
switch frameRotation {
case RTCVideoRotation._0: rotationOverride = RTCVideoRotation._270 // Landscape left
case RTCVideoRotation._90: rotationOverride = RTCVideoRotation._0 // Portrait
case RTCVideoRotation._180: rotationOverride = RTCVideoRotation._90 // Landscape right
case RTCVideoRotation._270: rotationOverride = RTCVideoRotation._180 // Portrait upside-down
default: break
}
default:
// Do nothing if we're face down, up, etc.
// Assume we're already setup for the correct orientation.
break
}
if let rotationOverride = rotationOverride {
self.rotationOverride = NSNumber(value: rotationOverride.rawValue)
if [ RTCVideoRotation._0, RTCVideoRotation._180 ].contains(rotationOverride) {
self.videoContentMode = .scaleAspectFill
} else {
self.videoContentMode = .scaleAspectFit
}
} else {
self.rotationOverride = nil
if [ RTCVideoRotation._0, RTCVideoRotation._180 ].contains(frameRotation) {
self.videoContentMode = .scaleAspectFill
} else {
self.videoContentMode = .scaleAspectFit
}
}
// if not a mobile ratio, always use .scaleAspectFit
if frameRatio < 1.5 {
self.videoContentMode = .scaleAspectFit
}
}
}
}
// MARK: LocalVideoView
class LocalVideoView: RTCMTLVideoView {
static let width: CGFloat = 80
static let height: CGFloat = 173
override func renderFrame(_ frame: RTCVideoFrame?) {
super.renderFrame(frame)
DispatchMainThreadSafe {
// This is a workaround for a weird issue that
// sometimes the rotationOverride is not working
// if it is only set once on initialization
self.rotationOverride = NSNumber(value: RTCVideoRotation._0.rawValue)
self.videoContentMode = .scaleAspectFill
}
}
}

@ -151,7 +151,7 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
public func showCallVC(answer: Bool) {
dismiss()
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // TODO: Handle more gracefully
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
let callVC = CallVC(for: self.call)
if let conversationVC = presentingVC as? ConversationVC {
callVC.conversationVC = conversationVC

@ -1,12 +1,27 @@
import UIKit
import WebRTC
final class MiniCallView: UIView {
final class MiniCallView: UIView, RTCVideoViewDelegate {
var callVC: CallVC
// MARK: UI
private static let defaultSize: CGFloat = 100
private let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
private let bottomMargin = UIApplication.shared.keyWindow!.safeAreaInsets.bottom
private var width: NSLayoutConstraint?
private var height: NSLayoutConstraint?
private var left: NSLayoutConstraint?
private var right: NSLayoutConstraint?
private var top: NSLayoutConstraint?
private var bottom: NSLayoutConstraint?
private lazy var remoteVideoView: RTCMTLVideoView = {
let result = RTCMTLVideoView()
result.contentMode = .scaleAspectFill
result.delegate = self
result.alpha = self.callVC.call.isRemoteVideoEnabled ? 1 : 0
result.videoContentMode = .scaleAspectFit
result.backgroundColor = .black
return result
}()
@ -16,10 +31,21 @@ final class MiniCallView: UIView {
init(from callVC: CallVC) {
self.callVC = callVC
super.init(frame: CGRect.zero)
self.backgroundColor = .black
self.backgroundColor = UIColor.init(white: 0, alpha: 0.8)
setUpViewHierarchy()
setUpGestureRecognizers()
MiniCallView.current = self
self.callVC.call.remoteVideoStateDidChange = { isEnabled in
DispatchQueue.main.async {
UIView.animate(withDuration: 0.25) {
self.remoteVideoView.alpha = isEnabled ? 1 : 0
if !isEnabled {
self.width?.constant = MiniCallView.defaultSize
self.height?.constant = MiniCallView.defaultSize
}
}
}
}
}
override init(frame: CGRect) {
@ -31,8 +57,9 @@ final class MiniCallView: UIView {
}
private func setUpViewHierarchy() {
self.set(.width, to: 80)
self.set(.height, to: 173)
self.width = self.set(.width, to: MiniCallView.defaultSize)
self.height = self.set(.height, to: MiniCallView.defaultSize)
self.layer.cornerRadius = 10
self.layer.masksToBounds = true
// Background
let background = getBackgroudView()
@ -56,11 +83,6 @@ final class MiniCallView: UIView {
imageView.set(.width, to: 64)
imageView.set(.height, to: 64)
imageView.center(in: background)
let blurView = UIView()
blurView.alpha = 0.5
blurView.backgroundColor = .black
background.addSubview(blurView)
blurView.autoPinEdgesToSuperviewEdges()
return background
}
@ -68,55 +90,26 @@ final class MiniCallView: UIView {
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTap))
tapGestureRecognizer.numberOfTapsRequired = 1
addGestureRecognizer(tapGestureRecognizer)
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(handlePan))
addGestureRecognizer(panGestureRecognizer)
makeViewDraggable()
}
// MARK: Interaction
@objc private func handleTap(_ gestureRecognizer: UITapGestureRecognizer) {
dismiss()
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // TODO: Handle more gracefully
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
presentingVC.present(callVC, animated: true, completion: nil)
}
@objc private func handlePan(_ gesture: UIPanGestureRecognizer) {
let location = gesture.location(in: self.superview!)
if let draggedView = gesture.view {
draggedView.center = location
if gesture.state == .ended {
let sideMargin = 40 + Values.verySmallSpacing
if draggedView.frame.midX >= self.superview!.layer.frame.width / 2 {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.x = self.superview!.layer.frame.width - sideMargin
}, completion: nil)
}else{
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.x = sideMargin
}, completion: nil)
}
let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
if draggedView.frame.minY <= topMargin {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.y = topMargin + draggedView.frame.size.height / 2
}, completion: nil)
}
let bottomMargin = UIApplication.shared.keyWindow!.safeAreaInsets.bottom
if draggedView.frame.maxY >= self.superview!.layer.frame.height {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.y = self.layer.frame.height - draggedView.frame.size.height / 2 - bottomMargin
}, completion: nil)
}
}
}
}
public func show() {
self.alpha = 0.0
let window = CurrentAppContext().mainWindow!
window.addSubview(self)
self.autoPinEdge(toSuperviewEdge: .right, withInset: Values.smallSpacing)
let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
self.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
left = self.autoPinEdge(toSuperviewEdge: .left)
left?.isActive = false
right = self.autoPinEdge(toSuperviewEdge: .right)
top = self.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
bottom = self.autoPinEdge(toSuperviewEdge: .bottom, withInset: bottomMargin)
bottom?.isActive = false
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
self.alpha = 1.0
}, completion: nil)
@ -127,9 +120,42 @@ final class MiniCallView: UIView {
self.alpha = 0.0
}, completion: { _ in
self.callVC.call.removeRemoteVideoRenderer(self.remoteVideoView)
self.callVC.setupStateChangeCallbacks()
MiniCallView.current = nil
self.removeFromSuperview()
})
}
// MARK: RTCVideoViewDelegate
func videoView(_ videoView: RTCVideoRenderer, didChangeVideoSize size: CGSize) {
let newSize = CGSize(width: min(160.0, 160.0 * size.width / size.height), height: min(160.0, 160.0 * size.height / size.width))
persistCurrentPosition(newSize: newSize)
self.width?.constant = newSize.width
self.height?.constant = newSize.height
}
func persistCurrentPosition(newSize: CGSize) {
let currentCenter = self.center
if currentCenter.x < self.superview!.width() / 2 {
left?.isActive = true
right?.isActive = false
} else {
left?.isActive = false
right?.isActive = true
}
let willTouchTop = currentCenter.y < newSize.height / 2 + topMargin
let willTouchBottom = currentCenter.y + newSize.height / 2 >= self.superview!.height()
if willTouchBottom {
top?.isActive = false
bottom?.isActive = true
} else {
let constant = willTouchTop ? topMargin : currentCenter.y - newSize.height / 2
top?.constant = constant
top?.isActive = true
bottom?.isActive = false
}
}
}

@ -218,24 +218,24 @@ public class ConversationMessageMapping: NSObject {
return IndexPath(row: oldIndex, section: 0)
}
guard let view = transaction.ext(viewName) as? YapDatabaseAutoViewTransaction else {
owsFailDebug("Could not load view.")
SNLog("Could not load view.")
return nil
}
guard let group = group else {
owsFailDebug("No group.")
SNLog("No group.")
return nil
}
let indexPtr: UnsafeMutablePointer<UInt> = UnsafeMutablePointer<UInt>.allocate(capacity: 1)
let wasFound = view.getGroup(nil, index: indexPtr, forKey: uniqueId, inCollection: TSInteraction.collection())
guard wasFound else {
owsFailDebug("Could not find interaction.")
SNLog("Could not find interaction.")
return nil
}
let index = indexPtr.pointee
let threadInteractionCount = view.numberOfItems(inGroup: group)
guard index < threadInteractionCount else {
owsFailDebug("Invalid index.")
SNLog("Invalid index.")
return nil
}
// This math doesn't take into account the number of items loaded _after_ the pivot.
@ -244,7 +244,7 @@ public class ConversationMessageMapping: NSObject {
self.update(withDesiredLength: desiredWindowSize, transaction: transaction)
guard let newIndex = loadedUniqueIds().firstIndex(of: uniqueId) else {
owsFailDebug("Couldn't find interaction.")
SNLog("Couldn't find interaction.")
return nil
}
return IndexPath(row: newIndex, section: 0)

@ -44,11 +44,6 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
present(callVC, animated: true, completion: nil)
}
}
internal func showCallVCIfNeeded() {
guard let incomingCallBanner = IncomingCallBanner.current else { return }
incomingCallBanner.showCallVC(answer: false)
}
// MARK: Blocking
@objc func unblock() {

@ -263,7 +263,6 @@ final class ConversationVC : BaseVC, ConversationViewModelDelegate, OWSConversat
didFinishInitialLayout = true
markAllAsRead()
self.becomeFirstResponder()
showCallVCIfNeeded()
}
override func viewWillDisappear(_ animated: Bool) {

@ -319,11 +319,13 @@ final class VisibleMessageCell : MessageCell, LinkPreviewViewDelegate {
let maxWidth = VisibleMessageCell.getMaxWidth(for: viewItem) - 2 * inset
if let linkPreview = viewItem.linkPreview {
let linkPreviewView = LinkPreviewView(for: viewItem, maxWidth: maxWidth, delegate: self)
linkPreviewView.layer.mask = bubbleViewMaskLayer
linkPreviewView.linkPreviewState = LinkPreviewSent(linkPreview: linkPreview, imageAttachment: viewItem.linkPreviewAttachment)
snContentView.addSubview(linkPreviewView)
linkPreviewView.pin(to: snContentView)
} else if let openGroupInvitationName = message.openGroupInvitationName, let openGroupInvitationURL = message.openGroupInvitationURL {
let openGroupInvitationView = OpenGroupInvitationView(name: openGroupInvitationName, url: openGroupInvitationURL, textColor: bodyLabelTextColor, isOutgoing: isOutgoing)
openGroupInvitationView.layer.mask = bubbleViewMaskLayer
snContentView.addSubview(openGroupInvitationView)
openGroupInvitationView.pin(to: snContentView)
} else {

@ -13,6 +13,7 @@ class GlobalSearchViewController: BaseVC, UITableViewDelegate, UITableViewDataSo
}
}
var recentSearchResults: [String] = Array(Storage.shared.getRecentSearchResults().reversed())
var defaultSearchResults: HomeScreenSearchResultSet = HomeScreenSearchResultSet.noteToSelfOnly
var searchResultSet: HomeScreenSearchResultSet = HomeScreenSearchResultSet.empty
private var lastSearchText: String?
var searcher: FullTextSearcher {
@ -137,7 +138,7 @@ class GlobalSearchViewController: BaseVC, UITableViewDelegate, UITableViewDataSo
let searchText = rawSearchText.stripped
guard searchText.count > 0 else {
searchResultSet = HomeScreenSearchResultSet.noteToSelfOnly
searchResultSet = defaultSearchResults
lastSearchText = nil
reloadTableData()
return

@ -197,13 +197,11 @@ static NSTimeInterval launchStartedAt;
LKAppMode appMode = [self getCurrentAppMode];
[self adaptAppMode:appMode];
if (@available(iOS 11, *)) {
// This must happen in appDidFinishLaunching or earlier to ensure we don't
// miss notifications.
// Setting the delegate also seems to prevent us from getting the legacy notification
// notification callbacks upon launch e.g. 'didReceiveLocalNotification'
UNUserNotificationCenter.currentNotificationCenter.delegate = self;
}
// This must happen in appDidFinishLaunching or earlier to ensure we don't
// miss notifications.
// Setting the delegate also seems to prevent us from getting the legacy notification
// notification callbacks upon launch e.g. 'didReceiveLocalNotification'
UNUserNotificationCenter.currentNotificationCenter.delegate = self;
[OWSScreenLockUI.sharedManager setupWithRootWindow:self.window];
[[OWSWindowManager sharedManager] setupWithRootWindow:self.window

@ -9,8 +9,9 @@ extension AppDelegate {
// MARK: Call handling
@objc func handleAppActivatedWithOngoingCallIfNeeded() {
guard let call = AppEnvironment.shared.callManager.currentCall else { return }
guard MiniCallView.current == nil else { return }
if let callVC = CurrentAppContext().frontmostViewController() as? CallVC, callVC.call == call { return }
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // TODO: Handle more gracefully
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
let callVC = CallVC(for: call)
if let conversationVC = presentingVC as? ConversationVC, let contactThread = conversationVC.thread as? TSContactThread, contactThread.contactSessionID() == call.sessionID {
callVC.conversationVC = conversationVC
@ -28,21 +29,20 @@ extension AppDelegate {
private func showCallUIForCall(_ call: SessionCall) {
DispatchQueue.main.async {
if CurrentAppContext().isMainAppAndActive {
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // TODO: Handle more gracefully
if let conversationVC = presentingVC as? ConversationVC, let contactThread = conversationVC.thread as? TSContactThread, contactThread.contactSessionID() == call.sessionID {
let callVC = CallVC(for: call)
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
presentingVC.present(callVC, animated: true, completion: nil)
}
}
call.reportIncomingCallIfNeeded{ error in
if let error = error {
SNLog("[Calls] Failed to report incoming call to CallKit due to error: \(error)")
let incomingCallBanner = IncomingCallBanner(for: call)
incomingCallBanner.show()
} else {
if CurrentAppContext().isMainAppAndActive {
guard let presentingVC = CurrentAppContext().frontmostViewController() else { preconditionFailure() } // FIXME: Handle more gracefully
if let conversationVC = presentingVC as? ConversationVC, let contactThread = conversationVC.thread as? TSContactThread, contactThread.contactSessionID() == call.sessionID {
let callVC = CallVC(for: call)
callVC.conversationVC = conversationVC
conversationVC.inputAccessoryView?.isHidden = true
conversationVC.inputAccessoryView?.alpha = 0
presentingVC.present(callVC, animated: true, completion: nil)
}
}
}
}
}

@ -0,0 +1,40 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
extension UIView {
func makeViewDraggable() {
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(handlePan))
addGestureRecognizer(panGestureRecognizer)
}
@objc private func handlePan(_ gesture: UIPanGestureRecognizer) {
let location = gesture.location(in: self.superview!)
if let draggedView = gesture.view {
draggedView.center = location
if gesture.state == .ended {
if draggedView.frame.midX >= self.superview!.layer.frame.width / 2 {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.x = self.superview!.layer.frame.width - draggedView.width() / 2
}, completion: nil)
}else{
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.x = draggedView.width() / 2
}, completion: nil)
}
let topMargin = UIApplication.shared.keyWindow!.safeAreaInsets.top + Values.veryLargeSpacing
if draggedView.frame.minY <= topMargin {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.y = topMargin + draggedView.height() / 2
}, completion: nil)
}
let bottomMargin = UIApplication.shared.keyWindow!.safeAreaInsets.bottom
if draggedView.frame.maxY >= self.superview!.layer.frame.height {
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseIn, animations: {
draggedView.center.y = self.superview!.layer.frame.height - draggedView.height() / 2 - bottomMargin
}, completion: nil)
}
}
}
}
}

@ -8,7 +8,7 @@ struct TurnServerInfo {
let username: String
let urls: [String]
init?(attributes: JSON) {
init?(attributes: JSON, random: Int? = nil) {
if let passwordAttribute = (attributes["password"] as? String) {
password = passwordAttribute
} else {
@ -22,7 +22,12 @@ struct TurnServerInfo {
}
if let urlsAttribute = attributes["urls"] as? [String] {
urls = urlsAttribute
if let random = random {
urls = Array(urlsAttribute.shuffled()[0..<random])
} else {
urls = urlsAttribute
}
} else {
return nil
}

@ -22,7 +22,7 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
let url = Bundle.main.url(forResource: "Session-Turn-Server", withExtension: nil)!
let data = try! Data(contentsOf: url)
let json = try! JSONSerialization.jsonObject(with: data, options: [ .fragmentsAllowed ]) as! JSON
return TurnServerInfo(attributes: json)
return TurnServerInfo(attributes: json, random: 2)
}()
internal lazy var factory: RTCPeerConnectionFactory = {
@ -36,9 +36,8 @@ public final class WebRTCSession : NSObject, RTCPeerConnectionDelegate {
/// remote peer, maintain and monitor the connection, and close the connection once it's no longer needed.
internal lazy var peerConnection: RTCPeerConnection = {
let configuration = RTCConfiguration()
configuration.iceServers = [ RTCIceServer(urlStrings: ["stun:freyr.getsession.org:5349"]), RTCIceServer(urlStrings: ["turn:freyr.getsession.org"], username: "session", credential: "session") ]
if let defaultICEServer = defaultICEServer {
configuration.iceServers.append(RTCIceServer(urlStrings: defaultICEServer.urls, username: defaultICEServer.username, credential: defaultICEServer.password))
configuration.iceServers = [ RTCIceServer(urlStrings: defaultICEServer.urls, username: defaultICEServer.username, credential: defaultICEServer.password) ]
}
configuration.sdpSemantics = .unifiedPlan
let constraints = RTCMediaConstraints(mandatoryConstraints: [:], optionalConstraints: [:])

@ -65,7 +65,8 @@ void VerifyRegistrationsForPrimaryStorage(OWSStorage *storage)
if (self) {
[self loadDatabase];
self.database.maxConnectionPoolCount = 5; // Increase max connection pool count, default is 3.
_dbReadPool = [[YapDatabaseConnectionPool alloc] initWithDatabase:self.database];
_dbReadWriteConnection = [self newDatabaseConnection];
_uiDatabaseConnection = [self newDatabaseConnection];

@ -111,7 +111,7 @@ NSUInteger TSInfoMessageSchemaVersion = 1;
if ([thread isKindOfClass: [TSContactThread class]]) {
TSContactThread *contactThread = (TSContactThread *)thread;
NSString *sessionID = contactThread.contactSessionID;
NSString *name = contactThread.name;
NSString *name = [contactThread nameWithTransaction:transaction];
if ([name isEqual:sessionID]) {
name = [NSString stringWithFormat:@"%@...%@", [sessionID substringToIndex:4], [sessionID substringFromIndex:sessionID.length - 4]];
}

@ -233,10 +233,10 @@ public final class NotificationServiceExtension : UNNotificationServiceExtension
CXProvider.reportNewIncomingVoIPPushPayload(payload) { error in
if let error = error {
self.handleSuccess(for: content)
owsFailDebug("Failed to notify main app of call message: \(error)")
SNLog("Failed to notify main app of call message: \(error)")
} else {
self.completeSilenty()
Logger.info("Successfully notified main app of call message.")
SNLog("Successfully notified main app of call message.")
}
}
}

@ -82,7 +82,6 @@ public final class ProfilePictureView : UIView {
update()
} else { // A one-to-one chat
let thread = thread as! TSContactThread
hasTappableProfilePicture = OWSProfileManager.shared().profileAvatar(forRecipientId: thread.contactSessionID()) != nil
update(for: thread.contactSessionID())
}
}
@ -92,8 +91,10 @@ public final class ProfilePictureView : UIView {
func getProfilePicture(of size: CGFloat, for publicKey: String) -> UIImage? {
guard !publicKey.isEmpty else { return nil }
if let profilePicture = OWSProfileManager.shared().profileAvatar(forRecipientId: publicKey) {
hasTappableProfilePicture = true
return profilePicture
} else {
hasTappableProfilePicture = false
// TODO: Pass in context?
let displayName = Storage.shared.getContact(with: publicKey)?.name ?? publicKey
return Identicon.generatePlaceholderIcon(seed: publicKey, text: displayName, size: size)

Loading…
Cancel
Save