Merge branch 'charlesmchen/webrtc/video2_' into feature/webrtc

pull/1/head
Matthew Chen 8 years ago
commit 8bdf03fa7d

@ -72,7 +72,7 @@
<key>NSAppleMusicUsageDescription</key> <key>NSAppleMusicUsageDescription</key>
<string>Signal needs to use Apple Music to play media attachments.</string> <string>Signal needs to use Apple Music to play media attachments.</string>
<key>NSCameraUsageDescription</key> <key>NSCameraUsageDescription</key>
<string>Signal will let you take a photo to send to your contacts. You can review it before sending.</string> <string>Signal uses your camera to take photos and for video calls.</string>
<key>NSContactsUsageDescription</key> <key>NSContactsUsageDescription</key>
<string>Signal uses your contacts to find users you know. We do not store your contacts on the server.</string> <string>Signal uses your contacts to find users you know. We do not store your contacts on the server.</string>
<key>NSMicrophoneUsageDescription</key> <key>NSMicrophoneUsageDescription</key>

@ -5,11 +5,13 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import "AppAudioManager.h" #import "AppAudioManager.h"
#import "Asserts.h"
#import "Environment.h" #import "Environment.h"
#import "NotificationsManager.h" #import "NotificationsManager.h"
#import "OWSCallNotificationsAdaptee.h" #import "OWSCallNotificationsAdaptee.h"
#import "OWSContactAvatarBuilder.h" #import "OWSContactAvatarBuilder.h"
#import "OWSContactsManager.h" #import "OWSContactsManager.h"
#import "OWSDispatch.h"
#import "OWSLogger.h" #import "OWSLogger.h"
#import "OWSWebRTCDataProtos.pb.h" #import "OWSWebRTCDataProtos.pb.h"
#import "PhoneNumber.h" #import "PhoneNumber.h"
@ -54,3 +56,5 @@
#import <SignalServiceKit/TSStorageManager+keyingMaterial.h> #import <SignalServiceKit/TSStorageManager+keyingMaterial.h>
#import <SignalServiceKit/TSThread.h> #import <SignalServiceKit/TSThread.h>
#import <WebRTC/RTCAudioSession.h> #import <WebRTC/RTCAudioSession.h>
#import <WebRTC/RTCCameraPreviewView.h>
#import <WebRTC/RTCEAGLVideoView.h>

@ -1,5 +1,5 @@
// //
// Copyright © 2017 Open Whisper Systems. All rights reserved. // Copyright (c) 2017 Open Whisper Systems. All rights reserved.
// //
import Foundation import Foundation
@ -31,16 +31,17 @@ import Foundation
// MARK: - CallObserver // MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) { internal func stateDidChange(call: SignalCall, state: CallState) {
DispatchQueue.main.async { AssertIsOnMainThread()
self.handleState(state) self.handleState(state)
} }
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) { internal func muteDidChange(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
Logger.verbose("\(TAG) in \(#function) is no-op") Logger.verbose("\(TAG) in \(#function) is no-op")
} }
internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) { internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) {
AssertIsOnMainThread()
if isEnabled { if isEnabled {
setAudioSession(category: AVAudioSessionCategoryPlayAndRecord, options: .defaultToSpeaker) setAudioSession(category: AVAudioSessionCategoryPlayAndRecord, options: .defaultToSpeaker)
} else { } else {
@ -49,6 +50,7 @@ import Foundation
} }
internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) { internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) {
AssertIsOnMainThread()
// no-op // no-op
} }
@ -131,7 +133,7 @@ import Foundation
return return
} }
vibrateTimer = WeakTimer.scheduledTimer(timeInterval: vibrateRepeatDuration, target: self, userInfo: nil, repeats: true) {[weak self] timer in vibrateTimer = WeakTimer.scheduledTimer(timeInterval: vibrateRepeatDuration, target: self, userInfo: nil, repeats: true) {[weak self] _ in
self?.ringVibration() self?.ringVibration()
} }
vibrateTimer?.fire() vibrateTimer?.fire()

@ -80,12 +80,25 @@ enum CallError: Error {
// FIXME TODO do we need to timeout? // FIXME TODO do we need to timeout?
fileprivate let timeoutSeconds = 60 fileprivate let timeoutSeconds = 60
@objc class CallService: NSObject, PeerConnectionClientDelegate { // All Observer methods will be invoked from the main thread.
protocol CallServiceObserver: class {
/**
* Fired whenever the local or remote video track become active or inactive.
*/
func didUpdateVideoTracks(localVideoTrack: RTCVideoTrack?,
remoteVideoTrack: RTCVideoTrack?)
}
// This class' state should only be accessed on the signaling queue, _except_
// the observer-related state which only be accessed on the main thread.
@objc class CallService: NSObject, CallObserver, PeerConnectionClientDelegate {
// MARK: - Properties // MARK: - Properties
let TAG = "[CallService]" let TAG = "[CallService]"
var observers = [Weak<CallServiceObserver>]()
// MARK: Dependencies // MARK: Dependencies
let accountManager: AccountManager let accountManager: AccountManager
@ -104,7 +117,18 @@ fileprivate let timeoutSeconds = 60
var peerConnectionClient: PeerConnectionClient? var peerConnectionClient: PeerConnectionClient?
// TODO code cleanup: move thread into SignalCall? Or refactor messageSender to take SignalRecipient identifier. // TODO code cleanup: move thread into SignalCall? Or refactor messageSender to take SignalRecipient identifier.
var thread: TSContactThread? var thread: TSContactThread?
var call: SignalCall? var call: SignalCall? {
didSet {
assertOnSignalingQueue()
oldValue?.removeObserver(self)
call?.addObserverAndSyncState(observer: self)
DispatchQueue.main.async { [weak self] in
self?.updateIsVideoEnabled()
}
}
}
/** /**
* In the process of establishing a connection between the clients (ICE process) we must exchange ICE updates. * In the process of establishing a connection between the clients (ICE process) we must exchange ICE updates.
@ -123,6 +147,26 @@ fileprivate let timeoutSeconds = 60
// Used to coordinate promises across delegate methods // Used to coordinate promises across delegate methods
var fulfillCallConnectedPromise: (() -> Void)? var fulfillCallConnectedPromise: (() -> Void)?
weak var localVideoTrack: RTCVideoTrack? {
didSet {
assertOnSignalingQueue()
Logger.info("\(self.TAG) \(#function)")
fireDidUpdateVideoTracks()
}
}
weak var remoteVideoTrack: RTCVideoTrack? {
didSet {
assertOnSignalingQueue()
Logger.info("\(self.TAG) \(#function)")
fireDidUpdateVideoTracks()
}
}
required init(accountManager: AccountManager, contactsManager: OWSContactsManager, messageSender: MessageSender, notificationsAdapter: CallNotificationsAdapter) { required init(accountManager: AccountManager, contactsManager: OWSContactsManager, messageSender: MessageSender, notificationsAdapter: CallNotificationsAdapter) {
self.accountManager = accountManager self.accountManager = accountManager
self.messageSender = messageSender self.messageSender = messageSender
@ -591,7 +635,7 @@ fileprivate let timeoutSeconds = 60
// We don't risk transmitting any media until the remote client has admitted to being connected. // We don't risk transmitting any media until the remote client has admitted to being connected.
peerConnectionClient.setAudioEnabled(enabled: !call.isMuted) peerConnectionClient.setAudioEnabled(enabled: !call.isMuted)
peerConnectionClient.setVideoEnabled(enabled: call.hasVideo) peerConnectionClient.setLocalVideoEnabled(enabled: shouldHaveLocalVideoTrack())
} }
/** /**
@ -731,7 +775,7 @@ fileprivate let timeoutSeconds = 60
} }
call.hasVideo = hasVideo call.hasVideo = hasVideo
peerConnectionClient.setVideoEnabled(enabled: hasVideo) peerConnectionClient.setLocalVideoEnabled(enabled: shouldHaveLocalVideoTrack())
} }
func handleCallKitStartVideo() { func handleCallKitStartVideo() {
@ -804,7 +848,7 @@ fileprivate let timeoutSeconds = 60
/** /**
* The connection has been established. The clients can now communicate. * The connection has been established. The clients can now communicate.
*/ */
func peerConnectionClientIceConnected(_ peerconnectionClient: PeerConnectionClient) { internal func peerConnectionClientIceConnected(_ peerconnectionClient: PeerConnectionClient) {
CallService.signalingQueue.async { CallService.signalingQueue.async {
self.handleIceConnected() self.handleIceConnected()
} }
@ -813,7 +857,7 @@ fileprivate let timeoutSeconds = 60
/** /**
* The connection failed to establish. The clients will not be able to communicate. * The connection failed to establish. The clients will not be able to communicate.
*/ */
func peerConnectionClientIceFailed(_ peerconnectionClient: PeerConnectionClient) { internal func peerConnectionClientIceFailed(_ peerconnectionClient: PeerConnectionClient) {
CallService.signalingQueue.async { CallService.signalingQueue.async {
self.handleFailedCall(error: CallError.disconnected) self.handleFailedCall(error: CallError.disconnected)
} }
@ -824,7 +868,7 @@ fileprivate let timeoutSeconds = 60
* reach the local client via the internet. The delegate must shuttle these IceCandates to the other (remote) client * reach the local client via the internet. The delegate must shuttle these IceCandates to the other (remote) client
* out of band, as part of establishing a connection over WebRTC. * out of band, as part of establishing a connection over WebRTC.
*/ */
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, addedLocalIceCandidate iceCandidate: RTCIceCandidate) { internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, addedLocalIceCandidate iceCandidate: RTCIceCandidate) {
CallService.signalingQueue.async { CallService.signalingQueue.async {
self.handleLocalAddedIceCandidate(iceCandidate) self.handleLocalAddedIceCandidate(iceCandidate)
} }
@ -833,17 +877,38 @@ fileprivate let timeoutSeconds = 60
/** /**
* Once the peerconnection is established, we can receive messages via the data channel, and notify the delegate. * Once the peerconnection is established, we can receive messages via the data channel, and notify the delegate.
*/ */
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData) { internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData) {
CallService.signalingQueue.async { CallService.signalingQueue.async {
self.handleDataChannelMessage(dataChannelMessage) self.handleDataChannelMessage(dataChannelMessage)
} }
} }
internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateLocal videoTrack: RTCVideoTrack?) {
CallService.signalingQueue.async { [weak self] in
if let strongSelf = self {
strongSelf.localVideoTrack = videoTrack
strongSelf.fireDidUpdateVideoTracks()
}
}
}
internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateRemote videoTrack: RTCVideoTrack?) {
CallService.signalingQueue.async { [weak self] in
if let strongSelf = self {
strongSelf.remoteVideoTrack = videoTrack
strongSelf.fireDidUpdateVideoTracks()
}
}
}
// MARK: Helpers // MARK: Helpers
/** /**
* Ensure that all `SignalCall` and `CallService` state is synchronized by only mutating signaling state in * Ensure that all `SignalCall` and `CallService` state is synchronized by only mutating signaling state in
* handleXXX methods, and putting those methods on the signaling queue. * handleXXX methods, and putting those methods on the signaling queue.
*
* TODO: We might want to move this queue and method to OWSDispatch so that we can assert this in
* other classes like SignalCall as well.
*/ */
private func assertOnSignalingQueue() { private func assertOnSignalingQueue() {
if #available(iOS 10.0, *) { if #available(iOS 10.0, *) {
@ -902,6 +967,7 @@ fileprivate let timeoutSeconds = 60
*/ */
private func terminateCall() { private func terminateCall() {
assertOnSignalingQueue() assertOnSignalingQueue()
Logger.debug("\(TAG) in \(#function)") Logger.debug("\(TAG) in \(#function)")
PeerConnectionClient.stopAudioSession() PeerConnectionClient.stopAudioSession()
@ -909,12 +975,121 @@ fileprivate let timeoutSeconds = 60
peerConnectionClient?.terminate() peerConnectionClient?.terminate()
peerConnectionClient = nil peerConnectionClient = nil
localVideoTrack = nil
remoteVideoTrack = nil
call?.removeAllObservers() call?.removeAllObservers()
call = nil call = nil
thread = nil thread = nil
incomingCallPromise = nil incomingCallPromise = nil
sendIceUpdatesImmediately = true sendIceUpdatesImmediately = true
pendingIceUpdateMessages = [] pendingIceUpdateMessages = []
fireDidUpdateVideoTracks()
}
// MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) {
AssertIsOnMainThread()
Logger.info("\(self.TAG) \(#function): \(state)")
self.updateIsVideoEnabled()
}
internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) {
AssertIsOnMainThread()
Logger.info("\(self.TAG) \(#function): \(hasVideo)")
self.updateIsVideoEnabled()
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
// Do nothing
}
internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) {
AssertIsOnMainThread()
// Do nothing
}
// MARK: - Video
private func shouldHaveLocalVideoTrack() -> Bool {
assertOnSignalingQueue()
// The iOS simulator doesn't provide any sort of camera capture
// support or emulation (http://goo.gl/rHAnC1) so don't bother
// trying to open a local stream.
return (!Platform.isSimulator &&
call != nil &&
call!.state == .connected &&
call!.hasVideo)
}
private func updateIsVideoEnabled() {
AssertIsOnMainThread()
// It's only safe to access the class properties on the signaling queue, so
// we dispatch there...
CallService.signalingQueue.async {
Logger.info("\(self.TAG) \(#function): \(self.shouldHaveLocalVideoTrack())")
self.peerConnectionClient?.setLocalVideoEnabled(enabled: self.shouldHaveLocalVideoTrack())
}
}
// MARK: - Observers
// The observer-related methods should be invoked on the main thread.
func addObserverAndSyncState(observer: CallServiceObserver) {
AssertIsOnMainThread()
observers.append(Weak(value: observer))
// Synchronize observer with current call state
// It's only safe to access the video track properties on the signaling queue, so
// we dispatch there...
CallService.signalingQueue.async {
let localVideoTrack = self.localVideoTrack
let remoteVideoTrack = self.remoteVideoTrack
// Then dispatch back to the main thread.
DispatchQueue.main.async {
observer.didUpdateVideoTracks(localVideoTrack:localVideoTrack,
remoteVideoTrack:remoteVideoTrack)
}
}
}
// The observer-related methods should be invoked on the main thread.
func removeObserver(_ observer: CallServiceObserver) {
AssertIsOnMainThread()
while let index = observers.index(where: { $0.value === observer }) {
observers.remove(at: index)
}
}
// The observer-related methods should be invoked on the main thread.
func removeAllObservers() {
AssertIsOnMainThread()
observers = []
}
func fireDidUpdateVideoTracks() {
assertOnSignalingQueue()
let localVideoTrack = self.localVideoTrack
let remoteVideoTrack = self.remoteVideoTrack
DispatchQueue.main.async { [weak self] in
if let strongSelf = self {
for observer in strongSelf.observers {
observer.value?.didUpdateVideoTracks(localVideoTrack:localVideoTrack,
remoteVideoTrack:remoteVideoTrack)
}
}
}
} }
} }

@ -116,7 +116,9 @@ class NonCallKitCallUIAdaptee: CallUIAdaptee {
func localHangupCall(_ call: SignalCall) { func localHangupCall(_ call: SignalCall) {
CallService.signalingQueue.async { CallService.signalingQueue.async {
guard call.localId == self.callService.call?.localId else { // If both parties hang up at the same moment,
// call might already be nil.
guard self.callService.call == nil || call.localId == self.callService.call?.localId else {
assertionFailure("\(self.TAG) in \(#function) localId does not match current call") assertionFailure("\(self.TAG) in \(#function) localId does not match current call")
return return
} }

@ -35,6 +35,16 @@ protocol PeerConnectionClientDelegate: class {
* Once the peerconnection is established, we can receive messages via the data channel, and notify the delegate. * Once the peerconnection is established, we can receive messages via the data channel, and notify the delegate.
*/ */
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData) func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData)
/**
* Fired whenever the local video track become active or inactive.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateLocal videoTrack: RTCVideoTrack?)
/**
* Fired whenever the remote video track become active or inactive.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateRemote videoTrack: RTCVideoTrack?)
} }
/** /**
@ -80,7 +90,8 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
// Video // Video
private var videoSender: RTCRtpSender? private var videoSender: RTCRtpSender?
private var videoTrack: RTCVideoTrack? private var localVideoTrack: RTCVideoTrack?
private var remoteVideoTrack: RTCVideoTrack?
private var cameraConstraints: RTCMediaConstraints private var cameraConstraints: RTCMediaConstraints
init(iceServers: [RTCIceServer], delegate: PeerConnectionClientDelegate) { init(iceServers: [RTCIceServer], delegate: PeerConnectionClientDelegate) {
@ -92,7 +103,6 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
configuration.bundlePolicy = .maxBundle configuration.bundlePolicy = .maxBundle
configuration.rtcpMuxPolicy = .require configuration.rtcpMuxPolicy = .require
let connectionConstraintsDict = ["DtlsSrtpKeyAgreement": "true"] let connectionConstraintsDict = ["DtlsSrtpKeyAgreement": "true"]
connectionConstraints = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: connectionConstraintsDict) connectionConstraints = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: connectionConstraintsDict)
@ -131,35 +141,45 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
return return
} }
// TODO: What are the best values to use here?
let mediaConstraintsDictionary = [
kRTCMediaConstraintsMinWidth: "240",
kRTCMediaConstraintsMinHeight: "320",
kRTCMediaConstraintsMaxWidth: "240",
kRTCMediaConstraintsMaxHeight: "320"
]
let cameraConstraints = RTCMediaConstraints(mandatoryConstraints:nil,
optionalConstraints:mediaConstraintsDictionary)
// TODO: Revisit the cameraConstraints.
let videoSource = factory.avFoundationVideoSource(with: cameraConstraints) let videoSource = factory.avFoundationVideoSource(with: cameraConstraints)
let videoTrack = factory.videoTrack(with: videoSource, trackId: Identifiers.videoTrack.rawValue) videoSource.useBackCamera = false
self.videoTrack = videoTrack let localVideoTrack = factory.videoTrack(with: videoSource, trackId: Identifiers.videoTrack.rawValue)
self.localVideoTrack = localVideoTrack
// Disable by default until call is connected. // Disable by default until call is connected.
// FIXME - do we require mic permissions at this point? // FIXME - do we require mic permissions at this point?
// if so maybe it would be better to not even add the track until the call is connected // if so maybe it would be better to not even add the track until the call is connected
// instead of creating it and disabling it. // instead of creating it and disabling it.
videoTrack.isEnabled = false localVideoTrack.isEnabled = false
// Occasionally seeing this crash on the next line, after a *second* call:
// -[__NSCFNumber length]: unrecognized selector sent to instance 0x1562c610
// Seems like either videoKind or videoStreamId (both of which are Strings) is being GC'd prematurely.
// Not sure why, but assigned the value to local vars above in hopes of avoiding it.
// let videoKind = kRTCMediaStreamTrackKindVideo
let videoSender = peerConnection.sender(withKind: kVideoTrackType, streamId: Identifiers.mediaStream.rawValue) let videoSender = peerConnection.sender(withKind: kVideoTrackType, streamId: Identifiers.mediaStream.rawValue)
videoSender.track = videoTrack videoSender.track = localVideoTrack
self.videoSender = videoSender self.videoSender = videoSender
} }
public func setVideoEnabled(enabled: Bool) { public func setLocalVideoEnabled(enabled: Bool) {
guard let videoTrack = self.videoTrack else { guard let localVideoTrack = self.localVideoTrack else {
let action = enabled ? "enable" : "disable" let action = enabled ? "enable" : "disable"
Logger.error("\(TAG)) trying to \(action) videoTrack which doesn't exist") Logger.error("\(TAG)) trying to \(action) videoTrack which doesn't exist")
return return
} }
videoTrack.isEnabled = enabled localVideoTrack.isEnabled = enabled
if let delegate = delegate {
delegate.peerConnectionClient(self, didUpdateLocal: enabled ? localVideoTrack : nil)
}
} }
// MARK: Audio // MARK: Audio
@ -294,7 +314,8 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
// we are likely to crash if we retain any peer connection properties when the peerconnection is released // we are likely to crash if we retain any peer connection properties when the peerconnection is released
Logger.debug("\(TAG) in \(#function)") Logger.debug("\(TAG) in \(#function)")
audioTrack = nil audioTrack = nil
videoTrack = nil localVideoTrack = nil
remoteVideoTrack = nil
dataChannel = nil dataChannel = nil
audioSender = nil audioSender = nil
videoSender = nil videoSender = nil
@ -332,8 +353,10 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
return return
} }
if let delegate = delegate {
delegate.peerConnectionClient(self, received: dataChannelMessage) delegate.peerConnectionClient(self, received: dataChannelMessage)
} }
}
/** The data channel's |bufferedAmount| changed. */ /** The data channel's |bufferedAmount| changed. */
public func dataChannel(_ dataChannel: RTCDataChannel, didChangeBufferedAmount amount: UInt64) { public func dataChannel(_ dataChannel: RTCDataChannel, didChangeBufferedAmount amount: UInt64) {
@ -349,7 +372,14 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
/** Called when media is received on a new stream from remote peer. */ /** Called when media is received on a new stream from remote peer. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) { public func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
Logger.debug("\(TAG) didAdd stream:\(stream)") Logger.debug("\(TAG) didAdd stream:\(stream) video tracks: \(stream.videoTracks.count) audio tracks: \(stream.audioTracks.count)")
if stream.videoTracks.count > 0 {
remoteVideoTrack = stream.videoTracks[0]
if let delegate = delegate {
delegate.peerConnectionClient(self, didUpdateRemote: remoteVideoTrack)
}
}
} }
/** Called when a remote peer closes a stream. */ /** Called when a remote peer closes a stream. */
@ -367,10 +397,14 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
Logger.debug("\(TAG) didChange IceConnectionState:\(newState.debugDescription)") Logger.debug("\(TAG) didChange IceConnectionState:\(newState.debugDescription)")
switch newState { switch newState {
case .connected, .completed: case .connected, .completed:
self.delegate.peerConnectionClientIceConnected(self) if let delegate = delegate {
delegate.peerConnectionClientIceConnected(self)
}
case .failed: case .failed:
Logger.warn("\(self.TAG) RTCIceConnection failed.") Logger.warn("\(self.TAG) RTCIceConnection failed.")
self.delegate.peerConnectionClientIceFailed(self) if let delegate = delegate {
delegate.peerConnectionClientIceFailed(self)
}
case .disconnected: case .disconnected:
Logger.warn("\(self.TAG) RTCIceConnection disconnected.") Logger.warn("\(self.TAG) RTCIceConnection disconnected.")
default: default:
@ -386,7 +420,9 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
/** New ice candidate has been found. */ /** New ice candidate has been found. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) { public func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
Logger.debug("\(TAG) didGenerate IceCandidate:\(candidate.sdp)") Logger.debug("\(TAG) didGenerate IceCandidate:\(candidate.sdp)")
self.delegate.peerConnectionClient(self, addedLocalIceCandidate: candidate) if let delegate = delegate {
delegate.peerConnectionClient(self, addedLocalIceCandidate: candidate)
}
} }
/** Called when a group of local Ice candidates have been removed. */ /** Called when a group of local Ice candidates have been removed. */

@ -17,6 +17,7 @@ enum CallState: String {
case remoteBusy // terminal case remoteBusy // terminal
} }
// All Observer methods will be invoked from the main thread.
protocol CallObserver: class { protocol CallObserver: class {
func stateDidChange(call: SignalCall, state: CallState) func stateDidChange(call: SignalCall, state: CallState)
func hasVideoDidChange(call: SignalCall, hasVideo: Bool) func hasVideoDidChange(call: SignalCall, hasVideo: Bool)
@ -26,6 +27,8 @@ protocol CallObserver: class {
/** /**
* Data model for a WebRTC backed voice/video call. * Data model for a WebRTC backed voice/video call.
*
* This class' state should only be accessed on the signaling queue.
*/ */
@objc class SignalCall: NSObject { @objc class SignalCall: NSObject {
@ -42,48 +45,94 @@ protocol CallObserver: class {
var hasVideo = false { var hasVideo = false {
didSet { didSet {
Logger.debug("\(TAG) hasVideo changed: \(oldValue) -> \(hasVideo)") // This should only occur on the signaling queue.
objc_sync_enter(self)
let observers = self.observers
let call = self
let hasVideo = self.hasVideo
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers { for observer in observers {
observer.value?.hasVideoDidChange(call: self, hasVideo: hasVideo) observer.value?.hasVideoDidChange(call: call, hasVideo: hasVideo)
}
} }
} }
} }
var state: CallState { var state: CallState {
didSet { didSet {
Logger.debug("\(TAG) state changed: \(oldValue) -> \(state)") // This should only occur on the signaling queue.
objc_sync_enter(self)
Logger.debug("\(TAG) state changed: \(oldValue) -> \(self.state)")
// Update connectedDate // Update connectedDate
if state == .connected { if self.state == .connected {
if connectedDate == nil { if connectedDate == nil {
connectedDate = NSDate() connectedDate = NSDate()
} }
} else { } else {
connectedDate = nil connectedDate = nil
} }
let observers = self.observers
let call = self
let state = self.state
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers { for observer in observers {
observer.value?.stateDidChange(call: self, state: state) observer.value?.stateDidChange(call: call, state: state)
}
} }
} }
} }
var isMuted = false { var isMuted = false {
didSet { didSet {
Logger.debug("\(TAG) muted changed: \(oldValue) -> \(isMuted)") // This should only occur on the signaling queue.
objc_sync_enter(self)
Logger.debug("\(TAG) muted changed: \(oldValue) -> \(self.isMuted)")
let observers = self.observers
let call = self
let isMuted = self.isMuted
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers { for observer in observers {
observer.value?.muteDidChange(call: self, isMuted: isMuted) observer.value?.muteDidChange(call: call, isMuted: isMuted)
}
} }
} }
} }
var isSpeakerphoneEnabled = false { var isSpeakerphoneEnabled = false {
didSet { didSet {
Logger.debug("\(TAG) isSpeakerphoneEnabled changed: \(oldValue) -> \(isSpeakerphoneEnabled)") // This should only occur on the signaling queue.
objc_sync_enter(self)
Logger.debug("\(TAG) isSpeakerphoneEnabled changed: \(oldValue) -> \(self.isSpeakerphoneEnabled)")
let observers = self.observers
let call = self
let isSpeakerphoneEnabled = self.isSpeakerphoneEnabled
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers { for observer in observers {
observer.value?.speakerphoneDidChange(call: self, isEnabled: isSpeakerphoneEnabled) observer.value?.speakerphoneDidChange(call: call, isEnabled: isSpeakerphoneEnabled)
}
} }
} }
} }
var connectedDate: NSDate? var connectedDate: NSDate?
var error: CallError? var error: CallError?
@ -108,20 +157,37 @@ protocol CallObserver: class {
// - // -
func addObserverAndSyncState(observer: CallObserver) { func addObserverAndSyncState(observer: CallObserver) {
objc_sync_enter(self)
observers.append(Weak(value: observer)) observers.append(Weak(value: observer))
let call = self
let state = self.state
objc_sync_exit(self)
DispatchQueue.main.async {
// Synchronize observer with current call state // Synchronize observer with current call state
observer.stateDidChange(call: self, state: self.state) observer.stateDidChange(call: call, state: state)
}
} }
func removeObserver(_ observer: CallObserver) { func removeObserver(_ observer: CallObserver) {
objc_sync_enter(self)
while let index = observers.index(where: { $0.value === observer }) { while let index = observers.index(where: { $0.value === observer }) {
observers.remove(at: index) observers.remove(at: index)
} }
objc_sync_exit(self)
} }
func removeAllObservers() { func removeAllObservers() {
objc_sync_enter(self)
observers = [] observers = []
objc_sync_exit(self)
} }
// MARK: Equatable // MARK: Equatable

@ -7,10 +7,9 @@ import WebRTC
import PromiseKit import PromiseKit
// TODO: Add category so that button handlers can be defined where button is created. // TODO: Add category so that button handlers can be defined where button is created.
// TODO: Add logic to button handlers.
// TODO: Ensure buttons enabled & disabled as necessary. // TODO: Ensure buttons enabled & disabled as necessary.
@objc(OWSCallViewController) @objc(OWSCallViewController)
class CallViewController: UIViewController, CallObserver { class CallViewController: UIViewController, CallObserver, CallServiceObserver, RTCEAGLVideoViewDelegate {
enum CallDirection { enum CallDirection {
case unspecified, outgoing, incoming case unspecified, outgoing, incoming
@ -25,7 +24,6 @@ class CallViewController: UIViewController, CallObserver {
// MARK: Properties // MARK: Properties
var peerConnectionClient: PeerConnectionClient?
var callDirection: CallDirection = .unspecified var callDirection: CallDirection = .unspecified
var thread: TSContactThread! var thread: TSContactThread!
var call: SignalCall! var call: SignalCall!
@ -60,6 +58,15 @@ class CallViewController: UIViewController, CallObserver {
var acceptIncomingButton: UIButton! var acceptIncomingButton: UIButton!
var declineIncomingButton: UIButton! var declineIncomingButton: UIButton!
// MARK: Video Views
var remoteVideoView: RTCEAGLVideoView!
var localVideoView: RTCCameraPreviewView!
weak var localVideoTrack: RTCVideoTrack?
weak var remoteVideoTrack: RTCVideoTrack?
var remoteVideoSize: CGSize! = CGSize.zero
var videoViewConstraints: [NSLayoutConstraint] = []
// MARK: Control Groups // MARK: Control Groups
var allControls: [UIView] { var allControls: [UIView] {
@ -132,19 +139,36 @@ class CallViewController: UIViewController, CallObserver {
// Subscribe for future call updates // Subscribe for future call updates
call.addObserverAndSyncState(observer: self) call.addObserverAndSyncState(observer: self)
Environment.getCurrent().callService.addObserverAndSyncState(observer:self)
} }
// MARK: - Create Views
func createViews() { func createViews() {
// Dark blurred background. // Dark blurred background.
let blurEffect = UIBlurEffect(style: .dark) let blurEffect = UIBlurEffect(style: .dark)
blurView = UIVisualEffectView(effect: blurEffect) blurView = UIVisualEffectView(effect: blurEffect)
self.view.addSubview(blurView) self.view.addSubview(blurView)
// Create the video views first, as they are under the other views.
createVideoViews()
createContactViews() createContactViews()
createOngoingCallControls() createOngoingCallControls()
createIncomingCallControls() createIncomingCallControls()
} }
func createVideoViews() {
remoteVideoView = RTCEAGLVideoView()
remoteVideoView.delegate = self
localVideoView = RTCCameraPreviewView()
remoteVideoView.isHidden = true
localVideoView.isHidden = true
self.view.addSubview(remoteVideoView)
self.view.addSubview(localVideoView)
}
func createContactViews() { func createContactViews() {
contactNameLabel = UILabel() contactNameLabel = UILabel()
contactNameLabel.font = UIFont.ows_lightFont(withSize:ScaleFromIPhone5To7Plus(32, 40)) contactNameLabel.font = UIFont.ows_lightFont(withSize:ScaleFromIPhone5To7Plus(32, 40))
@ -291,6 +315,8 @@ class CallViewController: UIViewController, CallObserver {
return row return row
} }
// MARK: - Layout
override func updateViewConstraints() { override func updateViewConstraints() {
if !hasConstraints { if !hasConstraints {
// We only want to create our constraints once. // We only want to create our constraints once.
@ -310,10 +336,20 @@ class CallViewController: UIViewController, CallObserver {
// The buttons have built-in 10% margins, so to appear centered // The buttons have built-in 10% margins, so to appear centered
// the avatar's bottom spacing should be a bit less. // the avatar's bottom spacing should be a bit less.
let avatarBottomSpacing = ScaleFromIPhone5To7Plus(18, 41) let avatarBottomSpacing = ScaleFromIPhone5To7Plus(18, 41)
// Layout of the local video view is a bit unusual because
// although the view is square, it will be used
let videoPreviewHMargin = CGFloat(0)
// Dark blurred background. // Dark blurred background.
blurView.autoPinEdgesToSuperviewEdges() blurView.autoPinEdgesToSuperviewEdges()
// TODO: Prevent overlap of localVideoView and contact views.
localVideoView.autoPinEdge(toSuperviewEdge:.right, withInset:videoPreviewHMargin)
localVideoView.autoPinEdge(toSuperviewEdge:.top, withInset:topMargin)
let localVideoSize = ScaleFromIPhone5To7Plus(80, 100)
localVideoView.autoSetDimension(.width, toSize:localVideoSize)
localVideoView.autoSetDimension(.height, toSize:localVideoSize)
contactNameLabel.autoPinEdge(toSuperviewEdge:.top, withInset:topMargin) contactNameLabel.autoPinEdge(toSuperviewEdge:.top, withInset:topMargin)
contactNameLabel.autoPinWidthToSuperview(withMargin:contactHMargin) contactNameLabel.autoPinWidthToSuperview(withMargin:contactHMargin)
contactNameLabel.setContentHuggingVerticalHigh() contactNameLabel.setContentHuggingVerticalHigh()
@ -342,9 +378,60 @@ class CallViewController: UIViewController, CallObserver {
incomingCallView.setContentHuggingVerticalHigh() incomingCallView.setContentHuggingVerticalHigh()
} }
updateVideoViewLayout()
super.updateViewConstraints() super.updateViewConstraints()
} }
internal func updateVideoViewLayout() {
NSLayoutConstraint.deactivate(self.videoViewConstraints)
var constraints: [NSLayoutConstraint] = []
// We fill the screen with the remote video. The remote video's
// aspect ratio may not (and in fact will very rarely) match the
// aspect ratio of the current device, so parts of the remote
// video will be hidden offscreen.
//
// It's better to trim the remote video than to adopt a letterboxed
// layout.
if remoteVideoSize.width > 0 && remoteVideoSize.height > 0 &&
self.view.bounds.size.width > 0 && self.view.bounds.size.height > 0 {
var remoteVideoWidth = self.view.bounds.size.width
var remoteVideoHeight = self.view.bounds.size.height
if remoteVideoSize.width / self.view.bounds.size.width > remoteVideoSize.height / self.view.bounds.size.height {
remoteVideoWidth = round(self.view.bounds.size.height * remoteVideoSize.width / remoteVideoSize.height)
} else {
remoteVideoHeight = round(self.view.bounds.size.width * remoteVideoSize.height / remoteVideoSize.width)
}
constraints.append(remoteVideoView.autoSetDimension(.width, toSize:remoteVideoWidth))
constraints.append(remoteVideoView.autoSetDimension(.height, toSize:remoteVideoHeight))
constraints += remoteVideoView.autoCenterInSuperview()
remoteVideoView.frame = CGRect(origin:CGPoint.zero,
size:CGSize(width:remoteVideoWidth,
height:remoteVideoHeight))
remoteVideoView.isHidden = false
} else {
constraints += remoteVideoView.autoPinEdgesToSuperviewEdges()
remoteVideoView.isHidden = true
}
self.videoViewConstraints = constraints
}
func traverseViewHierarchy(view: UIView!, visitor: (UIView) -> Void) {
visitor(view)
for subview in view.subviews {
traverseViewHierarchy(view:subview, visitor:visitor)
}
}
// MARK: - Methods
// objc accessible way to set our swift enum. // objc accessible way to set our swift enum.
func setOutgoingCallDirection() { func setOutgoingCallDirection() {
callDirection = .outgoing callDirection = .outgoing
@ -360,6 +447,8 @@ class CallViewController: UIViewController, CallObserver {
Logger.error("\(TAG) call failed with error: \(error)") Logger.error("\(TAG) call failed with error: \(error)")
} }
// MARK: - View State
func localizedTextForCallState(_ callState: CallState) -> String { func localizedTextForCallState(_ callState: CallState) -> String {
assert(Thread.isMainThread) assert(Thread.isMainThread)
@ -541,27 +630,87 @@ class CallViewController: UIViewController, CallObserver {
// MARK: - CallObserver // MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) { internal func stateDidChange(call: SignalCall, state: CallState) {
DispatchQueue.main.async { AssertIsOnMainThread()
Logger.info("\(self.TAG) new call status: \(state)") Logger.info("\(self.TAG) new call status: \(state)")
self.updateCallUI(callState: state) self.updateCallUI(callState: state)
} }
}
internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) { internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) {
DispatchQueue.main.async { AssertIsOnMainThread()
self.updateCallUI(callState: call.state) self.updateCallUI(callState: call.state)
} }
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) { internal func muteDidChange(call: SignalCall, isMuted: Bool) {
DispatchQueue.main.async { AssertIsOnMainThread()
self.updateCallUI(callState: call.state) self.updateCallUI(callState: call.state)
} }
}
internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) { internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) {
DispatchQueue.main.async { AssertIsOnMainThread()
self.updateCallUI(callState: call.state) self.updateCallUI(callState: call.state)
} }
// MARK: - Video
internal func updateLocalVideoTrack(localVideoTrack: RTCVideoTrack?) {
AssertIsOnMainThread()
guard self.localVideoTrack == localVideoTrack else {
return
}
self.localVideoTrack = localVideoTrack
var source: RTCAVFoundationVideoSource?
if localVideoTrack?.source is RTCAVFoundationVideoSource {
source = localVideoTrack?.source as! RTCAVFoundationVideoSource
}
localVideoView.captureSession = source?.captureSession
let isHidden = source == nil
Logger.info("\(TAG) \(#function) isHidden: \(isHidden)")
localVideoView.isHidden = isHidden
updateVideoViewLayout()
}
internal func updateRemoteVideoTrack(remoteVideoTrack: RTCVideoTrack?) {
AssertIsOnMainThread()
guard self.remoteVideoTrack == remoteVideoTrack else {
return
}
self.remoteVideoTrack?.remove(remoteVideoView)
self.remoteVideoTrack = nil
remoteVideoView.renderFrame(nil)
self.remoteVideoTrack = remoteVideoTrack
self.remoteVideoTrack?.add(remoteVideoView)
// TODO: We need to figure out how to observe start/stop of remote video.
updateVideoViewLayout()
}
// MARK: - CallServiceObserver
internal func didUpdateVideoTracks(localVideoTrack: RTCVideoTrack?,
remoteVideoTrack: RTCVideoTrack?) {
AssertIsOnMainThread()
updateLocalVideoTrack(localVideoTrack:localVideoTrack)
updateRemoteVideoTrack(remoteVideoTrack:remoteVideoTrack)
}
// MARK: - RTCEAGLVideoViewDelegate
internal func videoView(_ videoView: RTCEAGLVideoView, didChangeVideoSize size: CGSize) {
AssertIsOnMainThread()
if videoView != remoteVideoView {
return
}
Logger.info("\(TAG) \(#function): \(size)")
remoteVideoSize = size
updateVideoViewLayout()
} }
} }

Loading…
Cancel
Save