Implement new voice message design

pull/288/head
nielsandriesse 4 years ago
parent be4ef51416
commit 6ff0834065

@ -261,7 +261,6 @@
34D1F0B01F867BFC0066283D /* OWSSystemMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0A61F867BFC0066283D /* OWSSystemMessageCell.m */; };
34D1F0B41F86D31D0066283D /* ConversationCollectionView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0B31F86D31D0066283D /* ConversationCollectionView.m */; };
34D1F0B71F87F8850066283D /* OWSGenericAttachmentView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0B61F87F8850066283D /* OWSGenericAttachmentView.m */; };
34D1F0BA1F8800D90066283D /* OWSAudioMessageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0B91F8800D90066283D /* OWSAudioMessageView.m */; };
34D1F0BD1F8D108C0066283D /* AttachmentUploadView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0BC1F8D108C0066283D /* AttachmentUploadView.m */; };
34D1F0C01F8EC1760066283D /* MessageRecipientStatusUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0BF1F8EC1760066283D /* MessageRecipientStatusUtils.swift */; };
34D2CCDA2062E7D000CB1A14 /* OWSScreenLockUI.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D2CCD92062E7D000CB1A14 /* OWSScreenLockUI.m */; };
@ -583,6 +582,8 @@
C35E8AA92485C85800ACB629 /* GeoLite2-Country-Blocks-IPv4.csv in Resources */ = {isa = PBXBuildFile; fileRef = C35E8AA62485C85600ACB629 /* GeoLite2-Country-Blocks-IPv4.csv */; };
C35E8AAE2485E51D00ACB629 /* IP2Country.swift in Sources */ = {isa = PBXBuildFile; fileRef = C35E8AAD2485E51D00ACB629 /* IP2Country.swift */; };
C3638C0524C7F0B500AF29BC /* LK002RemoveFriendRequests.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3638C0424C7F0B500AF29BC /* LK002RemoveFriendRequests.swift */; };
C3645350252449260045C478 /* VoiceMessageView2.swift in Sources */ = {isa = PBXBuildFile; fileRef = C364534F252449260045C478 /* VoiceMessageView2.swift */; };
C364535C252467900045C478 /* AudioUtilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = C364535B252467900045C478 /* AudioUtilities.swift */; };
C369549D24D27A3500CEB4E3 /* MultiDeviceRemovalSheet.swift in Sources */ = {isa = PBXBuildFile; fileRef = C369549C24D27A3500CEB4E3 /* MultiDeviceRemovalSheet.swift */; };
C36B8707243C50C60049991D /* SignalMessaging.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 453518921FC63DBF00210559 /* SignalMessaging.framework */; };
C396DAEF2518408B00FF6DC5 /* ParsingState.swift in Sources */ = {isa = PBXBuildFile; fileRef = C396DAE82518408900FF6DC5 /* ParsingState.swift */; };
@ -1032,8 +1033,6 @@
34D1F0B31F86D31D0066283D /* ConversationCollectionView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ConversationCollectionView.m; sourceTree = "<group>"; };
34D1F0B51F87F8850066283D /* OWSGenericAttachmentView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSGenericAttachmentView.h; sourceTree = "<group>"; };
34D1F0B61F87F8850066283D /* OWSGenericAttachmentView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSGenericAttachmentView.m; sourceTree = "<group>"; };
34D1F0B81F8800D90066283D /* OWSAudioMessageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSAudioMessageView.h; sourceTree = "<group>"; };
34D1F0B91F8800D90066283D /* OWSAudioMessageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSAudioMessageView.m; sourceTree = "<group>"; };
34D1F0BB1F8D108C0066283D /* AttachmentUploadView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AttachmentUploadView.h; sourceTree = "<group>"; };
34D1F0BC1F8D108C0066283D /* AttachmentUploadView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AttachmentUploadView.m; sourceTree = "<group>"; };
34D1F0BF1F8EC1760066283D /* MessageRecipientStatusUtils.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MessageRecipientStatusUtils.swift; sourceTree = "<group>"; };
@ -1383,6 +1382,8 @@
C35E8AA62485C85600ACB629 /* GeoLite2-Country-Blocks-IPv4.csv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = "GeoLite2-Country-Blocks-IPv4.csv"; sourceTree = "<group>"; };
C35E8AAD2485E51D00ACB629 /* IP2Country.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IP2Country.swift; sourceTree = "<group>"; };
C3638C0424C7F0B500AF29BC /* LK002RemoveFriendRequests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LK002RemoveFriendRequests.swift; sourceTree = "<group>"; };
C364534F252449260045C478 /* VoiceMessageView2.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageView2.swift; sourceTree = "<group>"; };
C364535B252467900045C478 /* AudioUtilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioUtilities.swift; sourceTree = "<group>"; };
C369549C24D27A3500CEB4E3 /* MultiDeviceRemovalSheet.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultiDeviceRemovalSheet.swift; sourceTree = "<group>"; };
C396469C2509D3ED00B0B9F5 /* pl */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = pl; path = translations/pl.lproj/Localizable.strings; sourceTree = "<group>"; };
C396469D2509D3F400B0B9F5 /* ja */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = ja; path = translations/ja.lproj/Localizable.strings; sourceTree = "<group>"; };
@ -2046,8 +2047,6 @@
34A8B3502190A40E00218A25 /* MediaAlbumCellView.swift */,
34EA693F2194933900702471 /* MediaDownloadView.swift */,
34EA69412194DE7F00702471 /* MediaUploadView.swift */,
34D1F0B81F8800D90066283D /* OWSAudioMessageView.h */,
34D1F0B91F8800D90066283D /* OWSAudioMessageView.m */,
34DBF005206C3CB100025978 /* OWSBubbleShapeView.h */,
34DBF006206C3CB200025978 /* OWSBubbleShapeView.m */,
34DBF002206BD5A500025978 /* OWSBubbleView.h */,
@ -2690,6 +2689,7 @@
B8BB82B423947F2D00BA5194 /* TextField.swift */,
C3C3CF8824D8EED300E1CCE7 /* TextView.swift */,
C31D1DDC25217014005D4DA8 /* UserCell.swift */,
C364534F252449260045C478 /* VoiceMessageView2.swift */,
);
path = Components;
sourceTree = "<group>";
@ -2698,6 +2698,7 @@
isa = PBXGroup;
children = (
B8544E3223D50E4900299F14 /* AppearanceUtilities.swift */,
C364535B252467900045C478 /* AudioUtilities.swift */,
C3D0972A2510499C00F6E3E4 /* BackgroundPoller.swift */,
C31A6C5B247F2CF3001123EF /* CGRect+Utilities.swift */,
C31D1DE8252172D4005D4DA8 /* ContactUtilities.swift */,
@ -3779,7 +3780,6 @@
34DC9BD921543E0C00FDDCEC /* DebugContactsUtils.m in Sources */,
34DBF007206C3CB200025978 /* OWSBubbleShapeView.m in Sources */,
4C04392A220A9EC800BAEA63 /* VoiceNoteLock.swift in Sources */,
34D1F0BA1F8800D90066283D /* OWSAudioMessageView.m in Sources */,
34D8C02B1ED3685800188D7C /* DebugUIContacts.m in Sources */,
3496956E21A301A100DCFE74 /* OWSBackupExportJob.m in Sources */,
4C1885D2218F8E1C00B67051 /* PhotoGridViewCell.swift in Sources */,
@ -3797,6 +3797,7 @@
B879D449247E1BE300DB3608 /* PathVC.swift in Sources */,
34E3EF0D1EFC235B007F6822 /* DebugUIDiskUsage.m in Sources */,
454A84042059C787008B8C75 /* MediaTileViewController.swift in Sources */,
C364535C252467900045C478 /* AudioUtilities.swift in Sources */,
340FC8B4204DAC8D007AEB0F /* OWSBackupSettingsViewController.m in Sources */,
34D1F0871F8678AA0066283D /* ConversationViewItem.m in Sources */,
451A13B11E13DED2000A50FD /* AppNotifications.swift in Sources */,
@ -4014,6 +4015,7 @@
3427C64320F500E000EEC730 /* OWSMessageTimerView.m in Sources */,
B90418E6183E9DD40038554A /* DateUtil.m in Sources */,
340FC8BD204DAC8D007AEB0F /* ShowGroupMembersViewController.m in Sources */,
C3645350252449260045C478 /* VoiceMessageView2.swift in Sources */,
3496956F21A301A100DCFE74 /* OWSBackupLazyRestore.swift in Sources */,
459311FC1D75C948008DD4F0 /* OWSDeviceTableViewCell.m in Sources */,
);

@ -0,0 +1,136 @@
import Accelerate
@objc(LKVoiceMessageView2)
final class VoiceMessageView2 : UIView {
private let audioFileURL: URL
private let player: AVAudioPlayer
private var duration: Double = 1
private var isAnimating = false
private var volumeSamples: [Float] = [] { didSet { updateShapeLayer() } }
// MARK: Components
private lazy var loader: UIView = {
let result = UIView()
result.backgroundColor = Colors.text.withAlphaComponent(0.2)
result.layer.cornerRadius = Values.messageBubbleCornerRadius
return result
}()
private lazy var backgroundShapeLayer: CAShapeLayer = {
let result = CAShapeLayer()
result.fillColor = Colors.text.cgColor
return result
}()
private lazy var foregroundShapeLayer: CAShapeLayer = {
let result = CAShapeLayer()
result.fillColor = Colors.accent.cgColor
return result
}()
// MARK: Settings
private let margin = Values.smallSpacing
private let sampleSpacing: CGFloat = 1
// MARK: Initialization
init(audioFileURL: URL) {
self.audioFileURL = audioFileURL
player = try! AVAudioPlayer(contentsOf: audioFileURL)
super.init(frame: CGRect.zero)
initialize()
}
override init(frame: CGRect) {
preconditionFailure("Use init(audioFileURL:) instead.")
}
required init?(coder: NSCoder) {
preconditionFailure("Use init(audioFileURL:) instead.")
}
private func initialize() {
setUpViewHierarchy()
AudioUtilities.getVolumeSamples(for: audioFileURL).done(on: DispatchQueue.main) { [weak self] duration, volumeSamples in
guard let self = self else { return }
self.duration = duration
self.volumeSamples = volumeSamples
self.stopAnimating()
}.catch(on: DispatchQueue.main) { error in
print("[Loki] Couldn't sample audio file due to error: \(error).")
}
}
private func setUpViewHierarchy() {
set(.width, to: 200)
set(.height, to: 40)
addSubview(loader)
loader.pin(to: self)
backgroundColor = Colors.sentMessageBackground
layer.cornerRadius = Values.messageBubbleCornerRadius
layer.insertSublayer(backgroundShapeLayer, at: 0)
layer.insertSublayer(foregroundShapeLayer, at: 1)
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(togglePlayback))
addGestureRecognizer(tapGestureRecognizer)
showLoader()
}
// MARK: User Interface
private func showLoader() {
isAnimating = true
loader.alpha = 1
animateLoader()
}
private func animateLoader() {
loader.frame = CGRect(x: 0, y: 0, width: 0, height: 40)
UIView.animate(withDuration: 2) { [weak self] in
self?.loader.frame = CGRect(x: 0, y: 0, width: 200, height: 40)
} completion: { [weak self] _ in
guard let self = self else { return }
if self.isAnimating { self.animateLoader() }
}
}
private func stopAnimating() {
isAnimating = false
loader.alpha = 0
}
override func layoutSubviews() {
super.layoutSubviews()
updateShapeLayer()
}
private func updateShapeLayer() {
guard !volumeSamples.isEmpty else { return }
let max = CGFloat(volumeSamples.max()!)
let min = CGFloat(volumeSamples.min()!)
let w = width() - 2 * margin
let h = height() - 2 * margin
let sW = (w - sampleSpacing * CGFloat(volumeSamples.count)) / CGFloat(volumeSamples.count)
let backgroundPath = UIBezierPath()
let foregroundPath = UIBezierPath()
for (i, value) in volumeSamples.enumerated() {
let x = margin + CGFloat(i) * (sW + sampleSpacing)
let fraction = (CGFloat(value) - min) / (max - min)
let sH = h * fraction
let y = margin + (h - sH) / 2
let subPath = UIBezierPath(roundedRect: CGRect(x: x, y: y, width: sW, height: sH), cornerRadius: sW / 2)
backgroundPath.append(subPath)
if player.currentTime / duration > Double(i) / Double(volumeSamples.count) { foregroundPath.append(subPath) }
}
backgroundPath.close()
foregroundPath.close()
backgroundShapeLayer.path = backgroundPath.cgPath
foregroundShapeLayer.path = foregroundPath.cgPath
}
@objc private func togglePlayback() {
player.play()
Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true) { [weak self] timer in
guard let self = self else { return timer.invalidate() }
self.updateShapeLayer()
if !self.player.isPlaying { timer.invalidate() }
}
}
}

@ -0,0 +1,173 @@
import Accelerate
import PromiseKit
enum AudioUtilities {
private static let noiseFloor: Float = -80
private struct FileInfo {
let url: URL
let sampleCount: Int
let asset: AVAsset
let track: AVAssetTrack
}
enum Error : LocalizedError {
case noAudioTrack
case noAudioFormatDescription
case loadingFailed
case parsingFailed
var errorDescription: String? {
switch self {
case .noAudioTrack: return "No audio track."
case .noAudioFormatDescription: return "No audio format description."
case .loadingFailed: return "Couldn't load asset."
case .parsingFailed: return "Couldn't parse asset."
}
}
}
static func getVolumeSamples(for audioFileURL: URL, targetSampleCount: Int = 32) -> Promise<(duration: Double, volumeSamples: [Float])> {
return loadFile(audioFileURL).then { fileInfo in
AudioUtilities.parseSamples(from: fileInfo, with: targetSampleCount)
}
}
private static func loadFile(_ audioFileURL: URL) -> Promise<FileInfo> {
let asset = AVURLAsset(url: audioFileURL)
guard let track = asset.tracks(withMediaType: AVMediaType.audio).first else {
return Promise(error: Error.noAudioTrack)
}
let (promise, seal) = Promise<FileInfo>.pending()
asset.loadValuesAsynchronously(forKeys: [ "duration" ]) {
var nsError: NSError?
let status = asset.statusOfValue(forKey: "duration", error: &nsError)
switch status {
case .loaded:
guard let formatDescriptions = track.formatDescriptions as? [CMAudioFormatDescription],
let audioFormatDescription = formatDescriptions.first,
let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDescription)
else { return seal.reject(Error.noAudioFormatDescription) }
let sampleCount = Int((asbd.pointee.mSampleRate) * Float64(asset.duration.value) / Float64(asset.duration.timescale))
let fileInfo = FileInfo(url: audioFileURL, sampleCount: sampleCount, asset: asset, track: track)
seal.fulfill(fileInfo)
default:
print("Couldn't load asset due to error: \(nsError?.localizedDescription ?? "no description provided").")
seal.reject(Error.loadingFailed)
}
}
return promise
}
private static func parseSamples(from fileInfo: FileInfo, with targetSampleCount: Int) -> Promise<(duration: Double, volumeSamples: [Float])> {
// Prepare the reader
guard let reader = try? AVAssetReader(asset: fileInfo.asset) else { return Promise(error: Error.parsingFailed) }
let range = 0..<fileInfo.sampleCount
reader.timeRange = CMTimeRange(start: CMTime(value: Int64(range.lowerBound), timescale: fileInfo.asset.duration.timescale),
duration: CMTime(value: Int64(range.count), timescale: fileInfo.asset.duration.timescale))
let outputSettings: [String:Any] = [
AVFormatIDKey : Int(kAudioFormatLinearPCM),
AVLinearPCMBitDepthKey : 16,
AVLinearPCMIsBigEndianKey : false,
AVLinearPCMIsFloatKey : false,
AVLinearPCMIsNonInterleaved : false
]
let output = AVAssetReaderTrackOutput(track: fileInfo.track, outputSettings: outputSettings)
output.alwaysCopiesSampleData = false
reader.add(output)
var channelCount = 1
let formatDescriptions = fileInfo.track.formatDescriptions as! [CMAudioFormatDescription]
for audioFormatDescription in formatDescriptions {
guard let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDescription) else {
return Promise(error: Error.parsingFailed)
}
channelCount = Int(asbd.pointee.mChannelsPerFrame)
}
let samplesPerPixel = max(1, channelCount * range.count / targetSampleCount)
let filter = [Float](repeating: 1 / Float(samplesPerPixel), count: samplesPerPixel)
var result = [Float]()
var sampleBuffer = Data()
// Read the file
reader.startReading()
defer { reader.cancelReading() }
while reader.status == .reading {
guard let readSampleBuffer = output.copyNextSampleBuffer(),
let readBuffer = CMSampleBufferGetDataBuffer(readSampleBuffer) else { break }
var readBufferLength = 0
var readBufferPointer: UnsafeMutablePointer<Int8>?
CMBlockBufferGetDataPointer(readBuffer,
atOffset: 0,
lengthAtOffsetOut: &readBufferLength,
totalLengthOut: nil,
dataPointerOut: &readBufferPointer)
sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength))
CMSampleBufferInvalidate(readSampleBuffer)
let sampleCount = sampleBuffer.count / MemoryLayout<Int16>.size
let downSampledLength = sampleCount / samplesPerPixel
let samplesToProcess = downSampledLength * samplesPerPixel
guard samplesToProcess > 0 else { continue }
processSamples(from: &sampleBuffer,
outputSamples: &result,
samplesToProcess: samplesToProcess,
downSampledLength: downSampledLength,
samplesPerPixel: samplesPerPixel,
filter: filter)
}
// Process any remaining samples
let samplesToProcess = sampleBuffer.count / MemoryLayout<Int16>.size
if samplesToProcess > 0 {
let downSampledLength = 1
let samplesPerPixel = samplesToProcess
let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel)
processSamples(from: &sampleBuffer,
outputSamples: &result,
samplesToProcess: samplesToProcess,
downSampledLength: downSampledLength,
samplesPerPixel: samplesPerPixel,
filter: filter)
}
guard reader.status == .completed else { return Promise(error: Error.parsingFailed) }
// Return
let duration = fileInfo.asset.duration.seconds
return Promise { $0.fulfill((duration, result)) }
}
private static func processSamples(from sampleBuffer: inout Data, outputSamples: inout [Float], samplesToProcess: Int,
downSampledLength: Int, samplesPerPixel: Int, filter: [Float]) {
sampleBuffer.withUnsafeBytes { (samples: UnsafeRawBufferPointer) in
var processingBuffer = [Float](repeating: 0, count: samplesToProcess)
let sampleCount = vDSP_Length(samplesToProcess)
// Create an UnsafePointer<Int16> from the samples
let unsafeBufferPointer = samples.bindMemory(to: Int16.self)
let unsafePointer = unsafeBufferPointer.baseAddress!
// Convert 16 bit int samples to floats
vDSP_vflt16(unsafePointer, 1, &processingBuffer, 1, sampleCount)
// Take the absolute values to get the amplitude
vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, sampleCount)
// Get the corresponding dB values and clip the results
getdB(from: &processingBuffer)
// Downsample and average
var downSampledData = [Float](repeating: 0, count: downSampledLength)
vDSP_desamp(processingBuffer,
vDSP_Stride(samplesPerPixel),
filter,
&downSampledData,
vDSP_Length(downSampledLength),
vDSP_Length(samplesPerPixel))
// Remove the processed samples
sampleBuffer.removeFirst(samplesToProcess * MemoryLayout<Int16>.size)
// Update the output samples
outputSamples += downSampledData
}
}
static func getdB(from normalizedSamples: inout [Float]) {
// Convert samples to a log scale
var zero: Float = 32768.0
vDSP_vdbcon(normalizedSamples, 1, &zero, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count), 1)
// Clip to [noiseFloor, 0]
var ceil: Float = 0.0
var noiseFloorMutable = AudioUtilities.noiseFloor
vDSP_vclip(normalizedSamples, 1, &noiseFloorMutable, &ceil, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count))
}
}

@ -22,7 +22,6 @@
#import "NotificationSettingsViewController.h"
#import "OWSAddToContactViewController.h"
#import "OWSAnyTouchGestureRecognizer.h"
#import "OWSAudioMessageView.h"
#import "OWSAudioPlayer.h"
#import "OWSBackup.h"
#import "OWSBackupIO.h"

@ -228,6 +228,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
}
private func updateMockConversationView() {
/*
conversationStyle.viewWidth = max(bounds.size.width, kMinimumConversationWidth)
mockConversationView.subviews.forEach { $0.removeFromSuperview() }
@ -275,6 +276,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
mockConversationView.addSubview(messagesStackView)
messagesStackView.autoPinEdgesToSuperviewMargins()
*/
}
private func buildPaletteView(colorViews: [ColorView]) -> UIView {
@ -303,6 +305,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
// MARK: Mock Classes for rendering demo conversation
/*
@objc
private class MockConversationViewItem: NSObject, ConversationViewItem {
var userCanDeleteGroupMessage: Bool = false
@ -444,6 +447,7 @@ private class MockConversationViewItem: NSObject, ConversationViewItem {
return false
}
}
*/
private class MockIncomingMessage: TSIncomingMessage {
init(messageBody: String) {

@ -1,27 +0,0 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
NS_ASSUME_NONNULL_BEGIN
@class ConversationStyle;
@class TSAttachment;
@protocol ConversationViewItem;
@interface OWSAudioMessageView : UIStackView
- (instancetype)initWithAttachment:(TSAttachment *)attachment
isIncoming:(BOOL)isIncoming
viewItem:(id<ConversationViewItem>)viewItem
conversationStyle:(ConversationStyle *)conversationStyle;
- (void)createContents;
+ (CGFloat)bubbleHeight;
- (void)updateContents;
@end
NS_ASSUME_NONNULL_END

@ -1,305 +0,0 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
#import "OWSAudioMessageView.h"
#import "ConversationViewItem.h"
#import "Session-Swift.h"
#import "UIColor+OWS.h"
#import "ViewControllerUtils.h"
#import <SignalMessaging/OWSFormat.h>
#import <SignalMessaging/UIColor+OWS.h>
#import <SessionServiceKit/MIMETypeUtil.h>
NS_ASSUME_NONNULL_BEGIN
@interface OWSAudioMessageView ()
@property (nonatomic) TSAttachment *attachment;
@property (nonatomic, nullable) TSAttachmentStream *attachmentStream;
@property (nonatomic) BOOL isIncoming;
@property (nonatomic, weak) id<ConversationViewItem> viewItem;
@property (nonatomic, readonly) ConversationStyle *conversationStyle;
@property (nonatomic, nullable) UIButton *audioPlayPauseButton;
@property (nonatomic, nullable) UILabel *audioBottomLabel;
@property (nonatomic, nullable) AudioProgressView *audioProgressView;
@end
#pragma mark -
@implementation OWSAudioMessageView
- (instancetype)initWithAttachment:(TSAttachment *)attachment
isIncoming:(BOOL)isIncoming
viewItem:(id<ConversationViewItem>)viewItem
conversationStyle:(ConversationStyle *)conversationStyle
{
self = [super init];
if (self) {
_attachment = attachment;
if ([attachment isKindOfClass:[TSAttachmentStream class]]) {
_attachmentStream = (TSAttachmentStream *)attachment;
}
_isIncoming = isIncoming;
_viewItem = viewItem;
_conversationStyle = conversationStyle;
}
return self;
}
- (void)updateContents
{
[self updateAudioProgressView];
[self updateAudioBottomLabel];
if (self.audioPlaybackState == AudioPlaybackState_Playing) {
[self setAudioIconToPause];
} else {
[self setAudioIconToPlay];
}
}
- (CGFloat)audioProgressSeconds
{
return [self.viewItem audioProgressSeconds];
}
- (CGFloat)audioDurationSeconds
{
return self.viewItem.audioDurationSeconds;
}
- (AudioPlaybackState)audioPlaybackState
{
return [self.viewItem audioPlaybackState];
}
- (BOOL)isAudioPlaying
{
return self.audioPlaybackState == AudioPlaybackState_Playing;
}
- (void)updateAudioBottomLabel
{
if (self.isAudioPlaying && self.audioProgressSeconds > 0 && self.audioDurationSeconds > 0) {
self.audioBottomLabel.text =
[NSString stringWithFormat:@"%@ / %@",
[OWSFormat formatDurationSeconds:(long)round(self.audioProgressSeconds)],
[OWSFormat formatDurationSeconds:(long)round(self.audioDurationSeconds)]];
} else {
self.audioBottomLabel.text =
[NSString stringWithFormat:@"%@", [OWSFormat formatDurationSeconds:(long)round(self.audioDurationSeconds)]];
}
}
- (void)setAudioIcon:(UIImage *)icon
{
icon = [icon resizedImageToSize:CGSizeMake(self.iconSize, self.iconSize)];
[_audioPlayPauseButton setImage:icon forState:UIControlStateNormal];
[_audioPlayPauseButton setImage:icon forState:UIControlStateDisabled];
}
- (void)setAudioIconToPlay
{
[self setAudioIcon:[UIImage imageNamed:@"CirclePlay"]];
}
- (void)setAudioIconToPause
{
[self setAudioIcon:[UIImage imageNamed:@"CirclePause"]];
}
- (void)updateAudioProgressView
{
[self.audioProgressView
setProgress:(self.audioDurationSeconds > 0 ? self.audioProgressSeconds / self.audioDurationSeconds : 0.f)];
UIColor *progressColor = [self.conversationStyle bubbleSecondaryTextColorWithIsIncoming:self.isIncoming];
self.audioProgressView.horizontalBarColor = progressColor;
self.audioProgressView.progressColor = progressColor;
}
- (void)replaceIconWithDownloadProgressIfNecessary:(UIView *)iconView
{
if (!self.viewItem.attachmentPointer) {
return;
}
switch (self.viewItem.attachmentPointer.state) {
case TSAttachmentPointerStateFailed:
// We don't need to handle the "tap to retry" state here,
// only download progress.
return;
case TSAttachmentPointerStateEnqueued:
case TSAttachmentPointerStateDownloading:
break;
}
switch (self.viewItem.attachmentPointer.pointerType) {
case TSAttachmentPointerTypeRestoring:
// TODO: Show "restoring" indicator and possibly progress.
return;
case TSAttachmentPointerTypeUnknown:
case TSAttachmentPointerTypeIncoming:
break;
}
NSString *_Nullable uniqueId = self.viewItem.attachmentPointer.uniqueId;
if (uniqueId.length < 1) {
OWSFailDebug(@"Missing uniqueId.");
return;
}
CGFloat downloadViewSize = self.iconSize;
MediaDownloadView *downloadView =
[[MediaDownloadView alloc] initWithAttachmentId:uniqueId radius:downloadViewSize * 0.5f];
iconView.layer.opacity = 0.01f;
[self addSubview:downloadView];
[downloadView autoSetDimensionsToSize:CGSizeMake(downloadViewSize, downloadViewSize)];
[downloadView autoAlignAxis:ALAxisHorizontal toSameAxisOfView:iconView];
[downloadView autoAlignAxis:ALAxisVertical toSameAxisOfView:iconView];
}
#pragma mark -
- (CGFloat)hMargin
{
return 0.f;
}
- (CGFloat)hSpacing
{
return 8.f;
}
+ (CGFloat)vMargin
{
return 0.f;
}
- (CGFloat)vMargin
{
return [OWSAudioMessageView vMargin];
}
+ (CGFloat)bubbleHeight
{
CGFloat iconHeight = self.iconSize;
CGFloat labelsHeight = ([OWSAudioMessageView labelFont].lineHeight * 2 +
[OWSAudioMessageView audioProgressViewHeight] + [OWSAudioMessageView labelVSpacing] * 2);
CGFloat contentHeight = MAX(iconHeight, labelsHeight);
return contentHeight + self.vMargin * 2;
}
- (CGFloat)bubbleHeight
{
return [OWSAudioMessageView bubbleHeight];
}
+ (CGFloat)iconSize
{
return 72.f;
}
- (CGFloat)iconSize
{
return [OWSAudioMessageView iconSize];
}
- (BOOL)isVoiceMessage
{
return self.attachment.isVoiceMessage;
}
- (void)createContents
{
self.axis = UILayoutConstraintAxisHorizontal;
self.alignment = UIStackViewAlignmentCenter;
self.spacing = self.hSpacing;
self.layoutMarginsRelativeArrangement = YES;
self.layoutMargins = UIEdgeInsetsMake(self.vMargin, 0, self.vMargin, 0);
_audioPlayPauseButton = [UIButton buttonWithType:UIButtonTypeCustom];
self.audioPlayPauseButton.enabled = NO;
[self addArrangedSubview:self.audioPlayPauseButton];
self.audioPlayPauseButton.imageView.contentMode = UIViewContentModeCenter;
[self.audioPlayPauseButton autoSetDimension:ALDimensionWidth toSize:56.f];
[self.audioPlayPauseButton autoSetDimension:ALDimensionHeight toSize:56.f];
self.audioPlayPauseButton.imageView.clipsToBounds = NO;
self.audioPlayPauseButton.clipsToBounds = NO;
self.clipsToBounds = NO;
[self replaceIconWithDownloadProgressIfNecessary:self.audioPlayPauseButton];
NSString *_Nullable filename = self.attachment.sourceFilename;
if (filename.length < 1) {
filename = [self.attachmentStream.originalFilePath lastPathComponent];
}
NSString *topText = [[filename stringByDeletingPathExtension] ows_stripped];
if (topText.length < 1) {
topText = [MIMETypeUtil fileExtensionForMIMEType:self.attachment.contentType].localizedUppercaseString;
}
if (topText.length < 1) {
topText = NSLocalizedString(@"GENERIC_ATTACHMENT_LABEL", @"A label for generic attachments.");
}
if (self.isVoiceMessage) {
topText = nil;
}
UILabel *topLabel = [UILabel new];
topLabel.text = topText;
topLabel.textColor = [self.conversationStyle bubbleTextColorWithIsIncoming:self.isIncoming];
topLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
topLabel.font = [OWSAudioMessageView labelFont];
AudioProgressView *audioProgressView = [AudioProgressView new];
self.audioProgressView = audioProgressView;
[self updateAudioProgressView];
[audioProgressView autoSetDimension:ALDimensionHeight toSize:[OWSAudioMessageView audioProgressViewHeight]];
UILabel *bottomLabel = [UILabel new];
self.audioBottomLabel = bottomLabel;
[self updateAudioBottomLabel];
bottomLabel.textColor = [self.conversationStyle bubbleSecondaryTextColorWithIsIncoming:self.isIncoming];
bottomLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
bottomLabel.font = [OWSAudioMessageView labelFont];
UIStackView *labelsView = [UIStackView new];
labelsView.axis = UILayoutConstraintAxisVertical;
labelsView.spacing = [OWSAudioMessageView labelVSpacing];
[labelsView addArrangedSubview:topLabel];
[labelsView addArrangedSubview:audioProgressView];
[labelsView addArrangedSubview:bottomLabel];
// Ensure the "audio progress" and "play button" are v-center-aligned using a container.
UIView *labelsContainerView = [UIView containerView];
[self addArrangedSubview:labelsContainerView];
[labelsContainerView addSubview:labelsView];
[labelsView autoPinWidthToSuperview];
[labelsView autoPinEdgeToSuperviewMargin:ALEdgeTop relation:NSLayoutRelationGreaterThanOrEqual];
[labelsView autoPinEdgeToSuperviewMargin:ALEdgeBottom relation:NSLayoutRelationGreaterThanOrEqual];
[audioProgressView autoAlignAxis:ALAxisHorizontal toSameAxisOfView:self.audioPlayPauseButton];
[self updateContents];
}
+ (CGFloat)audioProgressViewHeight
{
return 12.f;
}
+ (UIFont *)labelFont
{
return [UIFont ows_dynamicTypeCaption2Font];
}
+ (CGFloat)labelVSpacing
{
return 2.f;
}
@end
NS_ASSUME_NONNULL_END

@ -5,7 +5,6 @@
#import "OWSMessageBubbleView.h"
#import "AttachmentUploadView.h"
#import "ConversationViewItem.h"
#import "OWSAudioMessageView.h"
#import "OWSBubbleShapeView.h"
#import "OWSBubbleView.h"
#import "OWSContactShareButtonsView.h"
@ -840,13 +839,10 @@ NS_ASSUME_NONNULL_BEGIN
OWSAssertDebug(attachment);
OWSAssertDebug([attachment isAudio]);
OWSAudioMessageView *audioMessageView = [[OWSAudioMessageView alloc] initWithAttachment:attachment
isIncoming:self.isIncoming
viewItem:self.viewItem
conversationStyle:self.conversationStyle];
self.viewItem.lastAudioMessageView = audioMessageView;
[audioMessageView createContents];
[self addProgressViewsIfNecessary:audioMessageView shouldShowDownloadProgress:NO];
LKVoiceMessageView *voiceMessageView = [[LKVoiceMessageView alloc] initWithVoiceMessage:attachment viewItem:self.viewItem];
self.viewItem.lastAudioMessageView = voiceMessageView;
[voiceMessageView update];
self.loadCellContentBlock = ^{
// Do nothing.
@ -855,7 +851,7 @@ NS_ASSUME_NONNULL_BEGIN
// Do nothing.
};
return audioMessageView;
return voiceMessageView;
}
- (UIView *)loadViewForGenericAttachment
@ -1068,7 +1064,7 @@ NS_ASSUME_NONNULL_BEGIN
return nil;
}
case OWSMessageCellType_Audio:
result = CGSizeMake(maxMessageWidth, OWSAudioMessageView.bubbleHeight);
result = CGSizeMake(maxMessageWidth, [LKVoiceMessageView getHeightFor:self.viewItem]);
break;
case OWSMessageCellType_GenericAttachment: {
TSAttachment *attachment = (self.viewItem.attachmentStream ?: self.viewItem.attachmentPointer);

@ -2430,7 +2430,7 @@ typedef enum : NSUInteger {
NSFileManager *fileManager = [NSFileManager defaultManager];
if (![fileManager fileExistsAtPath:attachmentStream.originalFilePath]) {
OWSFailDebug(@"Missing video file: %@", attachmentStream.originalMediaURL);
OWSFailDebug(@"Missing audio file: %@", attachmentStream.originalMediaURL);
}
[self dismissKeyBoard];

@ -24,7 +24,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType);
@class ContactShareViewModel;
@class ConversationViewCell;
@class DisplayableText;
@class OWSAudioMessageView;
@class LKVoiceMessageView;
@class OWSLinkPreview;
@class OWSQuotedReplyModel;
@class OWSUnreadIndicator;
@ -99,7 +99,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType);
#pragma mark - Audio Playback
@property (nonatomic, weak) OWSAudioMessageView *lastAudioMessageView;
@property (nonatomic, weak) LKVoiceMessageView *lastAudioMessageView;
@property (nonatomic, readonly) CGFloat audioDurationSeconds;
@property (nonatomic, readonly) CGFloat audioProgressSeconds;

@ -3,7 +3,6 @@
//
#import "ConversationViewItem.h"
#import "OWSAudioMessageView.h"
#import "OWSContactOffersCell.h"
#import "OWSMessageCell.h"
#import "OWSMessageHeaderView.h"
@ -476,7 +475,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType)
{
_audioPlaybackState = audioPlaybackState;
[self.lastAudioMessageView updateContents];
[self.lastAudioMessageView update];
}
- (void)setAudioProgress:(CGFloat)progress duration:(CGFloat)duration
@ -485,7 +484,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType)
self.audioProgressSeconds = progress;
[self.lastAudioMessageView updateContents];
[self.lastAudioMessageView update];
}
#pragma mark - Displayable Text

@ -5,7 +5,7 @@ public final class LokiPushNotificationManager : NSObject {
// MARK: Settings
#if DEBUG
private static let server = "https://dev.apns.getsession.org"
private static let server = "https://live.apns.getsession.org"
#else
private static let server = "https://live.apns.getsession.org"
#endif

Loading…
Cancel
Save