From a6f2d9e975b9516914d40288843b3b7ee1ff6cc1 Mon Sep 17 00:00:00 2001 From: nielsandriesse Date: Tue, 16 Feb 2021 19:28:32 +1100 Subject: [PATCH] Finish voice message UI --- .../ConversationVC+Interaction.swift | 13 ++- .../Input View/InputView.swift | 15 ++- .../Input View/InputViewButton.swift | 7 ++ .../VoiceMessageRecordingView.swift | 110 ++++++++++++++---- 4 files changed, 111 insertions(+), 34 deletions(-) diff --git a/Session/Conversations V2/ConversationVC+Interaction.swift b/Session/Conversations V2/ConversationVC+Interaction.swift index bf8e66d08..95fa1c19c 100644 --- a/Session/Conversations V2/ConversationVC+Interaction.swift +++ b/Session/Conversations V2/ConversationVC+Interaction.swift @@ -37,7 +37,6 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc // TODO: Attachments let text = snInputView.text.trimmingCharacters(in: .whitespacesAndNewlines) let thread = self.thread - // TODO: Blocking guard !text.isEmpty else { return } let message = VisibleMessage() message.sentTimestamp = NSDate.millisecondTimestamp() @@ -56,7 +55,6 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc Storage.shared.write { transaction in MessageSender.send(message, with: [], in: thread, using: transaction as! YapDatabaseReadWriteTransaction) } - // TODO: Sent handling guard let self = self else { return } self.snInputView.text = "" self.snInputView.quoteDraftInfo = nil @@ -263,6 +261,7 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc func startVoiceMessageRecording() { // Request permission if needed requestMicrophonePermissionIfNeeded() + guard AVAudioSession.sharedInstance().recordPermission == .granted else { return } // Cancel any current audio playback audioPlayer?.stop() audioPlayer = nil @@ -323,8 +322,9 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc // Check for user misunderstanding guard duration > 1 else { self.audioRecorder = nil - // TODO: Show modal explaining what's up - return + let title = NSLocalizedString("VOICE_MESSAGE_TOO_SHORT_ALERT_TITLE", comment: "") + let message = NSLocalizedString("VOICE_MESSAGE_TOO_SHORT_ALERT_MESSAGE", comment: "") + return OWSAlerts.showAlert(title: title, message: message) } // Get data let dataSourceOrNil = DataSourcePath.dataSource(with: audioRecorder.url, shouldDeleteOnDeallocation: true) @@ -335,8 +335,9 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc dataSource.sourceFilename = fileName let attachment = SignalAttachment.voiceMessageAttachment(dataSource: dataSource, dataUTI: kUTTypeMPEG4Audio as String) guard !attachment.hasError else { - // TODO: Show error UI - return + let alert = UIAlertController(title: "Session", message: "An error occurred.", preferredStyle: .alert) + alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil)) + return present(alert, animated: true, completion: nil) } // Send attachment // TODO: Send the attachment diff --git a/Session/Conversations V2/Input View/InputView.swift b/Session/Conversations V2/Input View/InputView.swift index 3f520c08e..e51c87caa 100644 --- a/Session/Conversations V2/Input View/InputView.swift +++ b/Session/Conversations V2/Input View/InputView.swift @@ -183,14 +183,19 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate, } func handleInputViewButtonLongPressBegan(_ inputViewButton: InputViewButton) { - if inputViewButton == voiceMessageButton { - delegate.startVoiceMessageRecording() - showVoiceMessageUI() - } + guard inputViewButton == voiceMessageButton else { return } + delegate.startVoiceMessageRecording() + showVoiceMessageUI() + } + + func handleInputViewButtonLongPressMoved(_ inputViewButton: InputViewButton, with touch: UITouch) { + guard let voiceMessageRecordingView = voiceMessageRecordingView, inputViewButton == voiceMessageButton else { return } + let location = touch.location(in: voiceMessageRecordingView) + voiceMessageRecordingView.handleLongPressMoved(to: location) } func handleInputViewButtonLongPressEnded(_ inputViewButton: InputViewButton, with touch: UITouch) { - guard let voiceMessageRecordingView = voiceMessageRecordingView else { return } + guard let voiceMessageRecordingView = voiceMessageRecordingView, inputViewButton == voiceMessageButton else { return } let location = touch.location(in: voiceMessageRecordingView) voiceMessageRecordingView.handleLongPressEnded(at: location) } diff --git a/Session/Conversations V2/Input View/InputViewButton.swift b/Session/Conversations V2/Input View/InputViewButton.swift index 24ee3ee8d..98200420f 100644 --- a/Session/Conversations V2/Input View/InputViewButton.swift +++ b/Session/Conversations V2/Input View/InputViewButton.swift @@ -83,6 +83,12 @@ final class InputViewButton : UIView { }) } + override func touchesMoved(_ touches: Set, with event: UIEvent?) { + if isLongPress { + delegate.handleInputViewButtonLongPressMoved(self, with: touches.first!) + } + } + override func touchesEnded(_ touches: Set, with event: UIEvent?) { collapse() if !isLongPress { @@ -109,5 +115,6 @@ protocol InputViewButtonDelegate { func handleInputViewButtonTapped(_ inputViewButton: InputViewButton) func handleInputViewButtonLongPressBegan(_ inputViewButton: InputViewButton) + func handleInputViewButtonLongPressMoved(_ inputViewButton: InputViewButton, with touch: UITouch) func handleInputViewButtonLongPressEnded(_ inputViewButton: InputViewButton, with touch: UITouch) } diff --git a/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift b/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift index 373fcbcd0..a56b3b27d 100644 --- a/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift +++ b/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift @@ -11,6 +11,27 @@ final class VoiceMessageRecordingView : UIView { private var recordingTimer: Timer? // MARK: UI Components + private lazy var iconImageView: UIImageView = { + let result = UIImageView() + result.image = UIImage(named: "Microphone")!.withTint(.white) + result.contentMode = .scaleAspectFit + let size = VoiceMessageRecordingView.iconSize + result.set(.width, to: size) + result.set(.height, to: size) + return result + }() + + private lazy var circleView: UIView = { + let result = UIView() + result.backgroundColor = Colors.destructive + let size = VoiceMessageRecordingView.circleSize + result.set(.width, to: size) + result.set(.height, to: size) + result.layer.cornerRadius = size / 2 + result.layer.masksToBounds = true + return result + }() + private lazy var pulseView: UIView = { let result = UIView() result.backgroundColor = Colors.destructive @@ -28,6 +49,16 @@ final class VoiceMessageRecordingView : UIView { return result }() + private lazy var chevronImageView: UIImageView = { + let chevronSize = VoiceMessageRecordingView.chevronSize + let chevronColor = (isLightMode ? UIColor.black : UIColor.white).withAlphaComponent(Values.mediumOpacity) + let result = UIImageView(image: UIImage(named: "small_chevron_left")!.withTint(chevronColor)) + result.contentMode = .scaleAspectFit + result.set(.width, to: chevronSize) + result.set(.height, to: chevronSize) + return result + }() + private lazy var slideToCancelLabel: UILabel = { let result = UILabel() result.text = "Slide to cancel" @@ -36,6 +67,16 @@ final class VoiceMessageRecordingView : UIView { return result }() + private lazy var cancelButton: UIButton = { + let result = UIButton() + result.setTitle("Cancel", for: UIControl.State.normal) + result.titleLabel!.font = .boldSystemFont(ofSize: Values.smallFontSize) + result.setTitleColor(Colors.text, for: UIControl.State.normal) + result.addTarget(self, action: #selector(handleCancelButtonTapped), for: UIControl.Event.touchUpInside) + result.alpha = 0 + return result + }() + private lazy var durationStackView: UIStackView = { let result = UIStackView() result.axis = .horizontal @@ -68,9 +109,10 @@ final class VoiceMessageRecordingView : UIView { // MARK: Settings private static let circleSize: CGFloat = 96 private static let pulseSize: CGFloat = 24 - private static let microPhoneIconSize: CGFloat = 28 + private static let iconSize: CGFloat = 28 private static let chevronSize: CGFloat = 16 private static let dotSize: CGFloat = 16 + private static let lockViewHitMargin: CGFloat = 40 // MARK: Lifecycle init(voiceMessageButtonFrame: CGRect, delegate: VoiceMessageRecordingViewDelegate) { @@ -78,7 +120,7 @@ final class VoiceMessageRecordingView : UIView { self.delegate = delegate super.init(frame: CGRect.zero) setUpViewHierarchy() - recordingTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] _ in + recordingTimer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true) { [weak self] _ in self?.updateDurationLabel() } } @@ -97,41 +139,27 @@ final class VoiceMessageRecordingView : UIView { private func setUpViewHierarchy() { // Icon - let iconSize = VoiceMessageRecordingView.microPhoneIconSize - let iconImageView = UIImageView() - iconImageView.image = UIImage(named: "Microphone")!.withTint(.white) - iconImageView.contentMode = .scaleAspectFit - iconImageView.set(.width, to: iconSize) - iconImageView.set(.height, to: iconSize) + let iconSize = VoiceMessageRecordingView.iconSize addSubview(iconImageView) let voiceMessageButtonCenter = voiceMessageButtonFrame.center iconImageView.pin(.left, to: .left, of: self, withInset: voiceMessageButtonCenter.x - iconSize / 2) iconImageView.pin(.top, to: .top, of: self, withInset: voiceMessageButtonCenter.y - iconSize / 2) // Circle - let circleView = UIView() - circleView.backgroundColor = Colors.destructive - let circleSize = VoiceMessageRecordingView.circleSize - circleView.set(.width, to: circleSize) - circleView.set(.height, to: circleSize) - circleView.layer.cornerRadius = circleSize / 2 - circleView.layer.masksToBounds = true insertSubview(circleView, at: 0) circleView.center(in: iconImageView) // Pulse insertSubview(pulseView, at: 0) pulseView.center(in: circleView) // Slide to cancel stack view - let chevronSize = VoiceMessageRecordingView.chevronSize - let chevronColor = (isLightMode ? UIColor.black : UIColor.white).withAlphaComponent(Values.mediumOpacity) - let chevronImageView = UIImageView(image: UIImage(named: "small_chevron_left")!.withTint(chevronColor)) - chevronImageView.contentMode = .scaleAspectFit - chevronImageView.set(.width, to: chevronSize) - chevronImageView.set(.height, to: chevronSize) slideToCancelStackView.addArrangedSubview(chevronImageView) slideToCancelStackView.addArrangedSubview(slideToCancelLabel) addSubview(slideToCancelStackView) slideToCancelStackViewRightConstraint.isActive = true slideToCancelStackView.center(.vertical, in: iconImageView) + // Cancel button + addSubview(cancelButton) + cancelButton.center(.horizontal, in: self) + cancelButton.center(.vertical, in: iconImageView) // Duration stack view durationStackView.addArrangedSubview(dotView) durationStackView.addArrangedSubview(durationLabel) @@ -208,13 +236,49 @@ final class VoiceMessageRecordingView : UIView { } // MARK: Interaction + func handleLongPressMoved(to location: CGPoint) { + if location.x < bounds.center.x { + let translationX = location.x - bounds.center.x + let sign: CGFloat = -1 + let chevronDamping: CGFloat = 4 + let labelDamping: CGFloat = 3 + let chevronX = (chevronDamping * (sqrt(abs(translationX)) / sqrt(chevronDamping))) * sign + let labelX = (labelDamping * (sqrt(abs(translationX)) / sqrt(labelDamping))) * sign + chevronImageView.transform = CGAffineTransform(translationX: chevronX, y: 0) + slideToCancelLabel.transform = CGAffineTransform(translationX: labelX, y: 0) + } else { + chevronImageView.transform = .identity + slideToCancelLabel.transform = .identity + } + } + func handleLongPressEnded(at location: CGPoint) { + let lockViewHitMargin = VoiceMessageRecordingView.lockViewHitMargin if pulseView.frame.contains(location) { delegate.endVoiceMessageRecording() - } else if lockView.frame.contains(location) { - print("[Test] Lock view") + } else if location.y < 0 && location.x > (lockView.frame.minX - lockViewHitMargin) && location.x < (lockView.frame.maxX + lockViewHitMargin) { + let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleCircleViewTap)) + circleView.addGestureRecognizer(tapGestureRecognizer) + UIView.animate(withDuration: 0.25, delay: 0, options: .transitionCrossDissolve, animations: { + self.lockView.alpha = 0 + self.iconImageView.image = UIImage(named: "ArrowUp")!.withTint(.white) + self.slideToCancelStackView.alpha = 0 + self.cancelButton.alpha = 1 + }, completion: { _ in + // Do nothing + }) + } else { + delegate.cancelVoiceMessageRecording() } } + + @objc private func handleCircleViewTap() { + delegate.endVoiceMessageRecording() + } + + @objc private func handleCancelButtonTapped() { + delegate.cancelVoiceMessageRecording() + } } // MARK: Lock View