Finish voice message UI

pull/347/head
nielsandriesse 5 years ago
parent 6504996c34
commit a6f2d9e975

@ -37,7 +37,6 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
// TODO: Attachments
let text = snInputView.text.trimmingCharacters(in: .whitespacesAndNewlines)
let thread = self.thread
// TODO: Blocking
guard !text.isEmpty else { return }
let message = VisibleMessage()
message.sentTimestamp = NSDate.millisecondTimestamp()
@ -56,7 +55,6 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
Storage.shared.write { transaction in
MessageSender.send(message, with: [], in: thread, using: transaction as! YapDatabaseReadWriteTransaction)
}
// TODO: Sent handling
guard let self = self else { return }
self.snInputView.text = ""
self.snInputView.quoteDraftInfo = nil
@ -263,6 +261,7 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
func startVoiceMessageRecording() {
// Request permission if needed
requestMicrophonePermissionIfNeeded()
guard AVAudioSession.sharedInstance().recordPermission == .granted else { return }
// Cancel any current audio playback
audioPlayer?.stop()
audioPlayer = nil
@ -323,8 +322,9 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
// Check for user misunderstanding
guard duration > 1 else {
self.audioRecorder = nil
// TODO: Show modal explaining what's up
return
let title = NSLocalizedString("VOICE_MESSAGE_TOO_SHORT_ALERT_TITLE", comment: "")
let message = NSLocalizedString("VOICE_MESSAGE_TOO_SHORT_ALERT_MESSAGE", comment: "")
return OWSAlerts.showAlert(title: title, message: message)
}
// Get data
let dataSourceOrNil = DataSourcePath.dataSource(with: audioRecorder.url, shouldDeleteOnDeallocation: true)
@ -335,8 +335,9 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
dataSource.sourceFilename = fileName
let attachment = SignalAttachment.voiceMessageAttachment(dataSource: dataSource, dataUTI: kUTTypeMPEG4Audio as String)
guard !attachment.hasError else {
// TODO: Show error UI
return
let alert = UIAlertController(title: "Session", message: "An error occurred.", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
return present(alert, animated: true, completion: nil)
}
// Send attachment
// TODO: Send the attachment

@ -183,14 +183,19 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
}
func handleInputViewButtonLongPressBegan(_ inputViewButton: InputViewButton) {
if inputViewButton == voiceMessageButton {
delegate.startVoiceMessageRecording()
showVoiceMessageUI()
}
guard inputViewButton == voiceMessageButton else { return }
delegate.startVoiceMessageRecording()
showVoiceMessageUI()
}
func handleInputViewButtonLongPressMoved(_ inputViewButton: InputViewButton, with touch: UITouch) {
guard let voiceMessageRecordingView = voiceMessageRecordingView, inputViewButton == voiceMessageButton else { return }
let location = touch.location(in: voiceMessageRecordingView)
voiceMessageRecordingView.handleLongPressMoved(to: location)
}
func handleInputViewButtonLongPressEnded(_ inputViewButton: InputViewButton, with touch: UITouch) {
guard let voiceMessageRecordingView = voiceMessageRecordingView else { return }
guard let voiceMessageRecordingView = voiceMessageRecordingView, inputViewButton == voiceMessageButton else { return }
let location = touch.location(in: voiceMessageRecordingView)
voiceMessageRecordingView.handleLongPressEnded(at: location)
}

@ -83,6 +83,12 @@ final class InputViewButton : UIView {
})
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
if isLongPress {
delegate.handleInputViewButtonLongPressMoved(self, with: touches.first!)
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
collapse()
if !isLongPress {
@ -109,5 +115,6 @@ protocol InputViewButtonDelegate {
func handleInputViewButtonTapped(_ inputViewButton: InputViewButton)
func handleInputViewButtonLongPressBegan(_ inputViewButton: InputViewButton)
func handleInputViewButtonLongPressMoved(_ inputViewButton: InputViewButton, with touch: UITouch)
func handleInputViewButtonLongPressEnded(_ inputViewButton: InputViewButton, with touch: UITouch)
}

@ -11,6 +11,27 @@ final class VoiceMessageRecordingView : UIView {
private var recordingTimer: Timer?
// MARK: UI Components
private lazy var iconImageView: UIImageView = {
let result = UIImageView()
result.image = UIImage(named: "Microphone")!.withTint(.white)
result.contentMode = .scaleAspectFit
let size = VoiceMessageRecordingView.iconSize
result.set(.width, to: size)
result.set(.height, to: size)
return result
}()
private lazy var circleView: UIView = {
let result = UIView()
result.backgroundColor = Colors.destructive
let size = VoiceMessageRecordingView.circleSize
result.set(.width, to: size)
result.set(.height, to: size)
result.layer.cornerRadius = size / 2
result.layer.masksToBounds = true
return result
}()
private lazy var pulseView: UIView = {
let result = UIView()
result.backgroundColor = Colors.destructive
@ -28,6 +49,16 @@ final class VoiceMessageRecordingView : UIView {
return result
}()
private lazy var chevronImageView: UIImageView = {
let chevronSize = VoiceMessageRecordingView.chevronSize
let chevronColor = (isLightMode ? UIColor.black : UIColor.white).withAlphaComponent(Values.mediumOpacity)
let result = UIImageView(image: UIImage(named: "small_chevron_left")!.withTint(chevronColor))
result.contentMode = .scaleAspectFit
result.set(.width, to: chevronSize)
result.set(.height, to: chevronSize)
return result
}()
private lazy var slideToCancelLabel: UILabel = {
let result = UILabel()
result.text = "Slide to cancel"
@ -36,6 +67,16 @@ final class VoiceMessageRecordingView : UIView {
return result
}()
private lazy var cancelButton: UIButton = {
let result = UIButton()
result.setTitle("Cancel", for: UIControl.State.normal)
result.titleLabel!.font = .boldSystemFont(ofSize: Values.smallFontSize)
result.setTitleColor(Colors.text, for: UIControl.State.normal)
result.addTarget(self, action: #selector(handleCancelButtonTapped), for: UIControl.Event.touchUpInside)
result.alpha = 0
return result
}()
private lazy var durationStackView: UIStackView = {
let result = UIStackView()
result.axis = .horizontal
@ -68,9 +109,10 @@ final class VoiceMessageRecordingView : UIView {
// MARK: Settings
private static let circleSize: CGFloat = 96
private static let pulseSize: CGFloat = 24
private static let microPhoneIconSize: CGFloat = 28
private static let iconSize: CGFloat = 28
private static let chevronSize: CGFloat = 16
private static let dotSize: CGFloat = 16
private static let lockViewHitMargin: CGFloat = 40
// MARK: Lifecycle
init(voiceMessageButtonFrame: CGRect, delegate: VoiceMessageRecordingViewDelegate) {
@ -78,7 +120,7 @@ final class VoiceMessageRecordingView : UIView {
self.delegate = delegate
super.init(frame: CGRect.zero)
setUpViewHierarchy()
recordingTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] _ in
recordingTimer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true) { [weak self] _ in
self?.updateDurationLabel()
}
}
@ -97,41 +139,27 @@ final class VoiceMessageRecordingView : UIView {
private func setUpViewHierarchy() {
// Icon
let iconSize = VoiceMessageRecordingView.microPhoneIconSize
let iconImageView = UIImageView()
iconImageView.image = UIImage(named: "Microphone")!.withTint(.white)
iconImageView.contentMode = .scaleAspectFit
iconImageView.set(.width, to: iconSize)
iconImageView.set(.height, to: iconSize)
let iconSize = VoiceMessageRecordingView.iconSize
addSubview(iconImageView)
let voiceMessageButtonCenter = voiceMessageButtonFrame.center
iconImageView.pin(.left, to: .left, of: self, withInset: voiceMessageButtonCenter.x - iconSize / 2)
iconImageView.pin(.top, to: .top, of: self, withInset: voiceMessageButtonCenter.y - iconSize / 2)
// Circle
let circleView = UIView()
circleView.backgroundColor = Colors.destructive
let circleSize = VoiceMessageRecordingView.circleSize
circleView.set(.width, to: circleSize)
circleView.set(.height, to: circleSize)
circleView.layer.cornerRadius = circleSize / 2
circleView.layer.masksToBounds = true
insertSubview(circleView, at: 0)
circleView.center(in: iconImageView)
// Pulse
insertSubview(pulseView, at: 0)
pulseView.center(in: circleView)
// Slide to cancel stack view
let chevronSize = VoiceMessageRecordingView.chevronSize
let chevronColor = (isLightMode ? UIColor.black : UIColor.white).withAlphaComponent(Values.mediumOpacity)
let chevronImageView = UIImageView(image: UIImage(named: "small_chevron_left")!.withTint(chevronColor))
chevronImageView.contentMode = .scaleAspectFit
chevronImageView.set(.width, to: chevronSize)
chevronImageView.set(.height, to: chevronSize)
slideToCancelStackView.addArrangedSubview(chevronImageView)
slideToCancelStackView.addArrangedSubview(slideToCancelLabel)
addSubview(slideToCancelStackView)
slideToCancelStackViewRightConstraint.isActive = true
slideToCancelStackView.center(.vertical, in: iconImageView)
// Cancel button
addSubview(cancelButton)
cancelButton.center(.horizontal, in: self)
cancelButton.center(.vertical, in: iconImageView)
// Duration stack view
durationStackView.addArrangedSubview(dotView)
durationStackView.addArrangedSubview(durationLabel)
@ -208,13 +236,49 @@ final class VoiceMessageRecordingView : UIView {
}
// MARK: Interaction
func handleLongPressMoved(to location: CGPoint) {
if location.x < bounds.center.x {
let translationX = location.x - bounds.center.x
let sign: CGFloat = -1
let chevronDamping: CGFloat = 4
let labelDamping: CGFloat = 3
let chevronX = (chevronDamping * (sqrt(abs(translationX)) / sqrt(chevronDamping))) * sign
let labelX = (labelDamping * (sqrt(abs(translationX)) / sqrt(labelDamping))) * sign
chevronImageView.transform = CGAffineTransform(translationX: chevronX, y: 0)
slideToCancelLabel.transform = CGAffineTransform(translationX: labelX, y: 0)
} else {
chevronImageView.transform = .identity
slideToCancelLabel.transform = .identity
}
}
func handleLongPressEnded(at location: CGPoint) {
let lockViewHitMargin = VoiceMessageRecordingView.lockViewHitMargin
if pulseView.frame.contains(location) {
delegate.endVoiceMessageRecording()
} else if lockView.frame.contains(location) {
print("[Test] Lock view")
} else if location.y < 0 && location.x > (lockView.frame.minX - lockViewHitMargin) && location.x < (lockView.frame.maxX + lockViewHitMargin) {
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleCircleViewTap))
circleView.addGestureRecognizer(tapGestureRecognizer)
UIView.animate(withDuration: 0.25, delay: 0, options: .transitionCrossDissolve, animations: {
self.lockView.alpha = 0
self.iconImageView.image = UIImage(named: "ArrowUp")!.withTint(.white)
self.slideToCancelStackView.alpha = 0
self.cancelButton.alpha = 1
}, completion: { _ in
// Do nothing
})
} else {
delegate.cancelVoiceMessageRecording()
}
}
@objc private func handleCircleViewTap() {
delegate.endVoiceMessageRecording()
}
@objc private func handleCancelButtonTapped() {
delegate.cancelVoiceMessageRecording()
}
}
// MARK: Lock View

Loading…
Cancel
Save