mirror of https://github.com/oxen-io/session-ios
Minor refactoring
parent
8738829358
commit
54fc54e7b9
@ -1,156 +0,0 @@
|
||||
|
||||
@objc(LKGroupMembersVC)
|
||||
final class GroupMembersVC : BaseVC, UITableViewDataSource {
|
||||
private let thread: TSGroupThread
|
||||
|
||||
private lazy var members: [String] = {
|
||||
func getDisplayName(for hexEncodedPublicKey: String) -> String {
|
||||
return Storage.shared.getContact(with: hexEncodedPublicKey)?.displayName(for: .regular) ?? hexEncodedPublicKey
|
||||
}
|
||||
return GroupUtilities.getClosedGroupMembers(thread).sorted { getDisplayName(for: $0) < getDisplayName(for: $1) }
|
||||
}()
|
||||
|
||||
// MARK: Components
|
||||
@objc private lazy var tableView: UITableView = {
|
||||
let result = UITableView()
|
||||
result.dataSource = self
|
||||
result.register(Cell.self, forCellReuseIdentifier: "Cell")
|
||||
result.separatorStyle = .none
|
||||
result.backgroundColor = .clear
|
||||
result.showsVerticalScrollIndicator = false
|
||||
result.alwaysBounceVertical = false
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Lifecycle
|
||||
@objc init(thread: TSGroupThread) {
|
||||
self.thread = thread
|
||||
super.init(nibName: nil, bundle: nil)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) { fatalError("Using GroupMembersVC.init(nibName:bundle:) isn't allowed. Use GroupMembersVC.init(thread:) instead.") }
|
||||
override init(nibName: String?, bundle: Bundle?) { fatalError("Using GroupMembersVC.init(nibName:bundle:) isn't allowed. Use GroupMembersVC.init(thread:) instead.") }
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
setUpGradientBackground()
|
||||
setUpNavBarStyle()
|
||||
setNavBarTitle("Group Members")
|
||||
// Set up explanation label
|
||||
let explanationLabel = UILabel()
|
||||
explanationLabel.textColor = Colors.text.withAlphaComponent(Values.mediumOpacity)
|
||||
explanationLabel.font = .systemFont(ofSize: Values.smallFontSize)
|
||||
explanationLabel.text = "The ability to add members to a closed group is coming soon."
|
||||
explanationLabel.numberOfLines = 0
|
||||
explanationLabel.textAlignment = .center
|
||||
explanationLabel.lineBreakMode = .byWordWrapping
|
||||
// Set up view hierarchy
|
||||
view.addSubview(explanationLabel)
|
||||
explanationLabel.pin(.leading, to: .leading, of: view, withInset: Values.largeSpacing)
|
||||
explanationLabel.pin(.top, to: .top, of: view, withInset: Values.mediumSpacing)
|
||||
explanationLabel.pin(.trailing, to: .trailing, of: view, withInset: -Values.largeSpacing)
|
||||
let separator = UIView()
|
||||
separator.backgroundColor = Colors.separator
|
||||
separator.set(.height, to: Values.separatorThickness)
|
||||
view.addSubview(separator)
|
||||
separator.pin(.leading, to: .leading, of: view)
|
||||
separator.pin(.top, to: .bottom, of: explanationLabel, withInset: Values.mediumSpacing)
|
||||
separator.pin(.trailing, to: .trailing, of: view)
|
||||
view.addSubview(tableView)
|
||||
tableView.pin(.leading, to: .leading, of: view)
|
||||
tableView.pin(.top, to: .bottom, of: separator)
|
||||
tableView.pin(.trailing, to: .trailing, of: view)
|
||||
tableView.pin(.bottom, to: .bottom, of: view)
|
||||
}
|
||||
|
||||
// MARK: Data
|
||||
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
|
||||
return members.count
|
||||
}
|
||||
|
||||
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
|
||||
let cell = tableView.dequeueReusableCell(withIdentifier: "Cell") as! Cell
|
||||
let contact = members[indexPath.row]
|
||||
cell.hexEncodedPublicKey = contact
|
||||
return cell
|
||||
}
|
||||
|
||||
@objc private func close() {
|
||||
dismiss(animated: true, completion: nil)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Cell
|
||||
|
||||
private extension GroupMembersVC {
|
||||
|
||||
final class Cell : UITableViewCell {
|
||||
var hexEncodedPublicKey = "" { didSet { update() } }
|
||||
|
||||
// MARK: Components
|
||||
private lazy var profilePictureView = ProfilePictureView()
|
||||
|
||||
private lazy var displayNameLabel: UILabel = {
|
||||
let result = UILabel()
|
||||
result.textColor = Colors.text
|
||||
result.font = .boldSystemFont(ofSize: Values.mediumFontSize)
|
||||
result.lineBreakMode = .byTruncatingTail
|
||||
return result
|
||||
}()
|
||||
|
||||
private lazy var separator: UIView = {
|
||||
let result = UIView()
|
||||
result.backgroundColor = Colors.separator
|
||||
result.set(.height, to: Values.separatorThickness)
|
||||
return result
|
||||
}()
|
||||
|
||||
// MARK: Initialization
|
||||
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
|
||||
super.init(style: style, reuseIdentifier: reuseIdentifier)
|
||||
setUpViewHierarchy()
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
super.init(coder: coder)
|
||||
setUpViewHierarchy()
|
||||
}
|
||||
|
||||
private func setUpViewHierarchy() {
|
||||
// Set the cell background color
|
||||
backgroundColor = Colors.cellBackground
|
||||
// Set up the highlight color
|
||||
let selectedBackgroundView = UIView()
|
||||
selectedBackgroundView.backgroundColor = .clear // Disabled for now
|
||||
self.selectedBackgroundView = selectedBackgroundView
|
||||
// Set up the profile picture image view
|
||||
let profilePictureViewSize = Values.smallProfilePictureSize
|
||||
profilePictureView.set(.width, to: profilePictureViewSize)
|
||||
profilePictureView.set(.height, to: profilePictureViewSize)
|
||||
profilePictureView.size = profilePictureViewSize
|
||||
// Set up the main stack view
|
||||
let stackView = UIStackView(arrangedSubviews: [ profilePictureView, displayNameLabel ])
|
||||
stackView.axis = .horizontal
|
||||
stackView.alignment = .center
|
||||
stackView.spacing = Values.mediumSpacing
|
||||
stackView.set(.height, to: profilePictureViewSize)
|
||||
contentView.addSubview(stackView)
|
||||
stackView.pin(.leading, to: .leading, of: contentView, withInset: Values.mediumSpacing)
|
||||
stackView.pin(.top, to: .top, of: contentView, withInset: Values.mediumSpacing)
|
||||
contentView.pin(.bottom, to: .bottom, of: stackView, withInset: Values.mediumSpacing)
|
||||
stackView.set(.width, to: UIScreen.main.bounds.width - 2 * Values.mediumSpacing)
|
||||
// Set up the separator
|
||||
addSubview(separator)
|
||||
separator.pin(.leading, to: .leading, of: self)
|
||||
separator.pin(.bottom, to: .bottom, of: self)
|
||||
separator.set(.width, to: UIScreen.main.bounds.width)
|
||||
}
|
||||
|
||||
// MARK: Updating
|
||||
private func update() {
|
||||
profilePictureView.publicKey = hexEncodedPublicKey
|
||||
profilePictureView.update()
|
||||
displayNameLabel.text = Storage.shared.getContact(with: hexEncodedPublicKey)?.displayName(for: .regular) ?? hexEncodedPublicKey
|
||||
}
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
|
||||
extension Storage {
|
||||
|
||||
private static let volumeSamplesCollection = "LokiVolumeSamplesCollection"
|
||||
|
||||
public func getVolumeSamples(for attachment: String) -> [Float]? {
|
||||
var result: [Float]?
|
||||
Storage.read { transaction in
|
||||
result = transaction.object(forKey: attachment, inCollection: Storage.volumeSamplesCollection) as? [Float]
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public func setVolumeSamples(for attachment: String, to volumeSamples: [Float], using transaction: Any) {
|
||||
(transaction as! YapDatabaseReadWriteTransaction).setObject(volumeSamples, forKey: attachment, inCollection: Storage.volumeSamplesCollection)
|
||||
}
|
||||
}
|
@ -1,190 +0,0 @@
|
||||
import Accelerate
|
||||
import PromiseKit
|
||||
|
||||
enum AudioUtilities {
|
||||
private static let noiseFloor: Float = -80
|
||||
|
||||
private struct FileInfo {
|
||||
let sampleCount: Int
|
||||
let asset: AVAsset
|
||||
let track: AVAssetTrack
|
||||
}
|
||||
|
||||
enum Error : LocalizedError {
|
||||
case noAudioTrack
|
||||
case noAudioFormatDescription
|
||||
case loadingFailed
|
||||
case parsingFailed
|
||||
|
||||
var errorDescription: String? {
|
||||
switch self {
|
||||
case .noAudioTrack: return "No audio track."
|
||||
case .noAudioFormatDescription: return "No audio format description."
|
||||
case .loadingFailed: return "Couldn't load asset."
|
||||
case .parsingFailed: return "Couldn't parse asset."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static func getVolumeSamples(for audioFileURL: URL, targetSampleCount: Int) -> Promise<[Float]> {
|
||||
return loadFile(audioFileURL).then { fileInfo in
|
||||
AudioUtilities.parseSamples(from: fileInfo, with: targetSampleCount)
|
||||
}
|
||||
}
|
||||
|
||||
private static func loadFile(_ audioFileURL: URL, isRetry: Bool = false) -> Promise<FileInfo> {
|
||||
let asset = AVURLAsset(url: audioFileURL)
|
||||
guard let track = asset.tracks(withMediaType: AVMediaType.audio).first else {
|
||||
if isRetry {
|
||||
return Promise(error: Error.loadingFailed)
|
||||
} else {
|
||||
// Workaround for issue where MP3 files sent by Android get saved as M4A
|
||||
var newAudioFileURL = audioFileURL.deletingPathExtension()
|
||||
let fileName = newAudioFileURL.lastPathComponent
|
||||
newAudioFileURL = newAudioFileURL.deletingLastPathComponent()
|
||||
newAudioFileURL = newAudioFileURL.appendingPathComponent("\(fileName).mp3")
|
||||
let fileManager = FileManager.default
|
||||
if fileManager.fileExists(atPath: newAudioFileURL.path) {
|
||||
return loadFile(newAudioFileURL, isRetry: true)
|
||||
} else {
|
||||
do {
|
||||
try FileManager.default.copyItem(at: audioFileURL, to: newAudioFileURL)
|
||||
} catch {
|
||||
return Promise(error: Error.loadingFailed)
|
||||
}
|
||||
return loadFile(newAudioFileURL, isRetry: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
let (promise, seal) = Promise<FileInfo>.pending()
|
||||
asset.loadValuesAsynchronously(forKeys: [ #keyPath(AVAsset.duration) ]) {
|
||||
var nsError: NSError?
|
||||
let status = asset.statusOfValue(forKey: #keyPath(AVAsset.duration), error: &nsError)
|
||||
switch status {
|
||||
case .loaded:
|
||||
guard let formatDescriptions = track.formatDescriptions as? [CMAudioFormatDescription],
|
||||
let audioFormatDescription = formatDescriptions.first,
|
||||
let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDescription)
|
||||
else { return seal.reject(Error.noAudioFormatDescription) }
|
||||
let sampleCount = Int((asbd.pointee.mSampleRate) * Float64(asset.duration.value) / Float64(asset.duration.timescale))
|
||||
let fileInfo = FileInfo(sampleCount: sampleCount, asset: asset, track: track)
|
||||
seal.fulfill(fileInfo)
|
||||
default:
|
||||
print("Couldn't load asset due to error: \(nsError?.localizedDescription ?? "no description provided").")
|
||||
seal.reject(Error.loadingFailed)
|
||||
}
|
||||
}
|
||||
return promise
|
||||
}
|
||||
|
||||
private static func parseSamples(from fileInfo: FileInfo, with targetSampleCount: Int) -> Promise<[Float]> {
|
||||
// Prepare the reader
|
||||
guard let reader = try? AVAssetReader(asset: fileInfo.asset) else { return Promise(error: Error.parsingFailed) }
|
||||
let range = 0..<fileInfo.sampleCount
|
||||
reader.timeRange = CMTimeRange(start: CMTime(value: Int64(range.lowerBound), timescale: fileInfo.asset.duration.timescale),
|
||||
duration: CMTime(value: Int64(range.count), timescale: fileInfo.asset.duration.timescale))
|
||||
let outputSettings: [String:Any] = [
|
||||
AVFormatIDKey : Int(kAudioFormatLinearPCM),
|
||||
AVLinearPCMBitDepthKey : 16,
|
||||
AVLinearPCMIsBigEndianKey : false,
|
||||
AVLinearPCMIsFloatKey : false,
|
||||
AVLinearPCMIsNonInterleaved : false
|
||||
]
|
||||
let output = AVAssetReaderTrackOutput(track: fileInfo.track, outputSettings: outputSettings)
|
||||
output.alwaysCopiesSampleData = false
|
||||
reader.add(output)
|
||||
var channelCount = 1
|
||||
let formatDescriptions = fileInfo.track.formatDescriptions as! [CMAudioFormatDescription]
|
||||
for audioFormatDescription in formatDescriptions {
|
||||
guard let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDescription) else {
|
||||
return Promise(error: Error.parsingFailed)
|
||||
}
|
||||
channelCount = Int(asbd.pointee.mChannelsPerFrame)
|
||||
}
|
||||
let samplesPerPixel = max(1, channelCount * range.count / targetSampleCount)
|
||||
let filter = [Float](repeating: 1 / Float(samplesPerPixel), count: samplesPerPixel)
|
||||
var result = [Float]()
|
||||
var sampleBuffer = Data()
|
||||
// Read the file
|
||||
reader.startReading()
|
||||
defer { reader.cancelReading() }
|
||||
while reader.status == .reading {
|
||||
guard let readSampleBuffer = output.copyNextSampleBuffer(),
|
||||
let readBuffer = CMSampleBufferGetDataBuffer(readSampleBuffer) else { break }
|
||||
var readBufferLength = 0
|
||||
var readBufferPointer: UnsafeMutablePointer<Int8>?
|
||||
CMBlockBufferGetDataPointer(readBuffer,
|
||||
atOffset: 0,
|
||||
lengthAtOffsetOut: &readBufferLength,
|
||||
totalLengthOut: nil,
|
||||
dataPointerOut: &readBufferPointer)
|
||||
sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength))
|
||||
CMSampleBufferInvalidate(readSampleBuffer)
|
||||
let sampleCount = sampleBuffer.count / MemoryLayout<Int16>.size
|
||||
let downSampledLength = sampleCount / samplesPerPixel
|
||||
let samplesToProcess = downSampledLength * samplesPerPixel
|
||||
guard samplesToProcess > 0 else { continue }
|
||||
processSamples(from: &sampleBuffer,
|
||||
outputSamples: &result,
|
||||
samplesToProcess: samplesToProcess,
|
||||
downSampledLength: downSampledLength,
|
||||
samplesPerPixel: samplesPerPixel,
|
||||
filter: filter)
|
||||
}
|
||||
// Process any remaining samples
|
||||
let samplesToProcess = sampleBuffer.count / MemoryLayout<Int16>.size
|
||||
if samplesToProcess > 0 {
|
||||
let downSampledLength = 1
|
||||
let samplesPerPixel = samplesToProcess
|
||||
let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel)
|
||||
processSamples(from: &sampleBuffer,
|
||||
outputSamples: &result,
|
||||
samplesToProcess: samplesToProcess,
|
||||
downSampledLength: downSampledLength,
|
||||
samplesPerPixel: samplesPerPixel,
|
||||
filter: filter)
|
||||
}
|
||||
guard reader.status == .completed else { return Promise(error: Error.parsingFailed) }
|
||||
// Return
|
||||
return Promise { $0.fulfill(result) }
|
||||
}
|
||||
|
||||
private static func processSamples(from sampleBuffer: inout Data, outputSamples: inout [Float], samplesToProcess: Int,
|
||||
downSampledLength: Int, samplesPerPixel: Int, filter: [Float]) {
|
||||
sampleBuffer.withUnsafeBytes { (samples: UnsafeRawBufferPointer) in
|
||||
var processingBuffer = [Float](repeating: 0, count: samplesToProcess)
|
||||
let sampleCount = vDSP_Length(samplesToProcess)
|
||||
// Create an UnsafePointer<Int16> from the samples
|
||||
let unsafeBufferPointer = samples.bindMemory(to: Int16.self)
|
||||
let unsafePointer = unsafeBufferPointer.baseAddress!
|
||||
// Convert 16 bit int samples to floats
|
||||
vDSP_vflt16(unsafePointer, 1, &processingBuffer, 1, sampleCount)
|
||||
// Take the absolute values to get the amplitude
|
||||
vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, sampleCount)
|
||||
// Get the corresponding dB values and clip the results
|
||||
getdB(from: &processingBuffer)
|
||||
// Downsample and average
|
||||
var downSampledData = [Float](repeating: 0, count: downSampledLength)
|
||||
vDSP_desamp(processingBuffer,
|
||||
vDSP_Stride(samplesPerPixel),
|
||||
filter,
|
||||
&downSampledData,
|
||||
vDSP_Length(downSampledLength),
|
||||
vDSP_Length(samplesPerPixel))
|
||||
// Remove the processed samples
|
||||
sampleBuffer.removeFirst(samplesToProcess * MemoryLayout<Int16>.size)
|
||||
// Update the output samples
|
||||
outputSamples += downSampledData
|
||||
}
|
||||
}
|
||||
|
||||
static func getdB(from normalizedSamples: inout [Float]) {
|
||||
// Convert samples to a log scale
|
||||
var zero: Float = 32768.0
|
||||
vDSP_vdbcon(normalizedSamples, 1, &zero, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count), 1)
|
||||
// Clip to [noiseFloor, 0]
|
||||
var ceil: Float = 0.0
|
||||
var noiseFloorMutable = AudioUtilities.noiseFloor
|
||||
vDSP_vclip(normalizedSamples, 1, &noiseFloorMutable, &ceil, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count))
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue