Merge pull request #988 from mpretty-cyro/feature/swift-package-manager

Replaced Cocoapods with Swift Package Manager for dependency management
pull/1006/head
Morgan Pretty 8 months ago committed by GitHub
commit 57a76dc141
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -1,5 +1,5 @@
// This build configuration requires the following to be installed:
// Git, Xcode, XCode Command-line Tools, Cocoapods, xcbeautify, xcresultparser, pip
// Git, Xcode, XCode Command-line Tools, xcbeautify, xcresultparser, pip
// Log a bunch of version information to make it easier for debugging
local version_info = {
@ -7,7 +7,6 @@ local version_info = {
environment: { LANG: 'en_US.UTF-8' },
commands: [
'git --version',
'pod --version',
'xcodebuild -version',
'xcbeautify --version',
'xcresultparser --version',
@ -24,75 +23,6 @@ local clone_submodules = {
// cmake options for static deps mirror
local ci_dep_mirror(want_mirror) = (if want_mirror then ' -DLOCAL_MIRROR=https://oxen.rocks/deps ' else '');
// Cocoapods
//
// Unfortunately Cocoapods has a dumb restriction which requires you to use UTF-8 for the
// 'LANG' env var so we need to work around the with https://github.com/CocoaPods/CocoaPods/issues/6333
local install_cocoapods = {
name: 'Install CocoaPods',
environment: { LANG: 'en_US.UTF-8' },
commands: [
'pod install || (rm -rf ./Pods && pod install)',
],
depends_on: [
'Load CocoaPods Cache',
],
};
// Load from the cached CocoaPods directory (to speed up the build)
local load_cocoapods_cache = {
name: 'Load CocoaPods Cache',
commands: [
|||
LOOP_BREAK=0
while test -e /Users/$USER/.cocoapods_cache.lock; do
sleep 1
LOOP_BREAK=$((LOOP_BREAK + 1))
if [[ $LOOP_BREAK -ge 600 ]]; then
rm -f /Users/$USER/.cocoapods_cache.lock
fi
done
|||,
'touch /Users/$USER/.cocoapods_cache.lock',
|||
if [[ -d /Users/$USER/.cocoapods_cache ]]; then
cp -r /Users/$USER/.cocoapods_cache ./Pods
fi
|||,
'rm -f /Users/$USER/.cocoapods_cache.lock',
],
depends_on: [
'Clone Submodules',
],
};
// Override the cached CocoaPods directory (to speed up the next build)
local update_cocoapods_cache(depends_on) = {
name: 'Update CocoaPods Cache',
commands: [
|||
LOOP_BREAK=0
while test -e /Users/$USER/.cocoapods_cache.lock; do
sleep 1
LOOP_BREAK=$((LOOP_BREAK + 1))
if [[ $LOOP_BREAK -ge 600 ]]; then
rm -f /Users/$USER/.cocoapods_cache.lock
fi
done
|||,
'touch /Users/$USER/.cocoapods_cache.lock',
|||
if [[ -d ./Pods ]]; then
rsync -a --delete ./Pods/ /Users/$USER/.cocoapods_cache
fi
|||,
'rm -f /Users/$USER/.cocoapods_cache.lock',
],
depends_on: depends_on,
};
local boot_simulator(device_type) = {
name: 'Boot Test Simulator',
commands: [
@ -128,19 +58,17 @@ local sim_delete_cmd = 'if [ -f build/artifacts/sim_uuid ]; then rm -f /Users/$U
steps: [
version_info,
clone_submodules,
load_cocoapods_cache,
install_cocoapods,
boot_simulator('com.apple.CoreSimulator.SimDeviceType.iPhone-15'),
sim_keepalive,
{
name: 'Build and Run Tests',
commands: [
'NSUnbufferedIO=YES set -o pipefail && xcodebuild test -workspace Session.xcworkspace -scheme Session -derivedDataPath ./build/derivedData -resultBundlePath ./build/artifacts/testResults.xcresult -parallelizeTargets -destination "platform=iOS Simulator,id=$(<./build/artifacts/sim_uuid)" -parallel-testing-enabled NO -test-timeouts-enabled YES -maximum-test-execution-time-allowance 10 -collect-test-diagnostics never 2>&1 | xcbeautify --is-ci',
'NSUnbufferedIO=YES set -o pipefail && xcodebuild test -project Session.xcodeproj -scheme Session -derivedDataPath ./build/derivedData -resultBundlePath ./build/artifacts/testResults.xcresult -parallelizeTargets -destination "platform=iOS Simulator,id=$(<./build/artifacts/sim_uuid)" -parallel-testing-enabled NO -test-timeouts-enabled YES -maximum-test-execution-time-allowance 10 -collect-test-diagnostics never 2>&1 | xcbeautify --is-ci',
],
depends_on: [
'Boot Test Simulator',
'Install CocoaPods',
'Clone Submodules',
'Boot Test Simulator'
],
},
{
@ -160,7 +88,6 @@ local sim_delete_cmd = 'if [ -f build/artifacts/sim_uuid ]; then rm -f /Users/$U
status: ['failure', 'success'],
},
},
update_cocoapods_cache(['Build and Run Tests']),
{
name: 'Install Codecov CLI',
commands: [
@ -221,19 +148,16 @@ local sim_delete_cmd = 'if [ -f build/artifacts/sim_uuid ]; then rm -f /Users/$U
steps: [
version_info,
clone_submodules,
load_cocoapods_cache,
install_cocoapods,
{
name: 'Build',
commands: [
'mkdir build',
'NSUnbufferedIO=YES set -o pipefail && xcodebuild archive -workspace Session.xcworkspace -scheme Session -derivedDataPath ./build/derivedData -parallelizeTargets -configuration "App_Store_Release" -sdk iphonesimulator -archivePath ./build/Session_sim.xcarchive -destination "generic/platform=iOS Simulator" | xcbeautify --is-ci',
'NSUnbufferedIO=YES set -o pipefail && xcodebuild archive -project Session.xcodeproj -scheme Session -derivedDataPath ./build/derivedData -parallelizeTargets -configuration "App_Store_Release" -sdk iphonesimulator -archivePath ./build/Session_sim.xcarchive -destination "generic/platform=iOS Simulator" | xcbeautify --is-ci',
],
depends_on: [
'Install CocoaPods',
'Clone Submodules',
],
},
update_cocoapods_cache(['Build']),
{
name: 'Upload artifacts',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' } },

@ -38,15 +38,7 @@ Session requires a number of submodules to build, these can be retrieved by navi
git submodule update --init --recursive
```
## 3. Pods
To build and configure the libraries Session uses, just run:
```
pod install
```
## 4. libSession build dependencies
## 3. libSession build dependencies
The iOS project has a share C++ library called `libSession` which is built as one of the project dependencies, in order for this to compile the following dependencies need to be installed:
- cmake
@ -59,12 +51,12 @@ Additionally `xcode-select` needs to be setup correctly (depending on the order
`sudo xcode-select -s /Applications/Xcode.app/Contents/Developer`
## 5. Xcode
## 4. Xcode
Open the `Session.xcworkspace` in Xcode.
Open the `Session.xcodeproj` in Xcode.
```
open Session.xcworkspace
open Session.xcodeproj
```
In the TARGETS area of the General tab, change the Team dropdown to
@ -80,6 +72,15 @@ Build and Run and you are ready to go!
## Known issues
### Address & Undefined Behaviour Sanitizer Linker Errors
It seems that there is an open issue with Swift Package Manager (https://github.com/swiftlang/swift-package-manager/issues/4407) where some packages (in our case `libwebp`) run into issues when the Address Sanitizer or Undefined Behaviour Sanitizer are enabled within the scheme, if you see linker errors like the below when building this is likely the issue and can be resolved by disabling these sanitisers.
In order to still benefit from these settings they are explicitly set as `Other C Flags` for the `SessionUtil` target when building in debug mode to enable better debugging of `libSession`.
```
Undefined symbol: ___asan_init
Undefined symbol: ___ubsan_handle_add_overflow
```
### Third-party Installation
The database for the app is stored within an `App Group` directory which is based on the app identifier, unfortunately the identifier cannot be retrieved at runtime so it's currently hard-coded in the code. In order to be able to run session on a device you will need to update the `UserDefaults.applicationGroup` variable in `SessionUtilitiesKit/General/SNUserDefaults` to match the value provided (You may also need to create the `App Group` on your Apple Developer account).

@ -1 +1 @@
Subproject commit f0016f512956c3f8fef83e3daf17e76fec272968
Subproject commit ba7919d304bfe44a75a77d418638bf82ac0b7f93

@ -1,120 +0,0 @@
platform :ios, '13.0'
use_frameworks!
inhibit_all_warnings!
install! 'cocoapods', :warn_for_unused_master_specs_repo => false
# Dependencies to be included in the app and all extensions/frameworks
abstract_target 'GlobalDependencies' do
# FIXME: If https://github.com/jedisct1/swift-sodium/pull/249 gets resolved then revert this back to the standard pod
pod 'Sodium', :git => 'https://github.com/oxen-io/session-ios-swift-sodium.git', commit: '310c343'
pod 'GRDB.swift/SQLCipher'
# FIXME: Would be nice to migrate from CocoaPods to SwiftPackageManager (should allow us to speed up build time), haven't gone through all of the dependencies but currently unfortunately SQLCipher doesn't support SPM (for more info see: https://github.com/sqlcipher/sqlcipher/issues/371)
pod 'SQLCipher', '~> 4.5.7'
pod 'WebRTC-lib'
target 'Session' do
pod 'PureLayout', '~> 3.1.8'
pod 'NVActivityIndicatorView'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'DifferenceKit'
target 'SessionTests' do
inherit! :complete
pod 'Quick'
pod 'Nimble'
end
end
# Dependencies to be included only in all extensions/frameworks
abstract_target 'FrameworkAndExtensionDependencies' do
pod 'Curve25519Kit', git: 'https://github.com/oxen-io/session-ios-curve-25519-kit.git', branch: 'session-version'
pod 'SignalCoreKit', git: 'https://github.com/oxen-io/session-ios-core-kit', :commit => '3acbfe5'
target 'SessionNotificationServiceExtension'
# Dependencies that are shared across a number of extensions/frameworks but not all
abstract_target 'ExtendedDependencies' do
pod 'PureLayout', '~> 3.1.8'
target 'SessionShareExtension' do
pod 'NVActivityIndicatorView'
pod 'DifferenceKit'
end
target 'SignalUtilitiesKit' do
pod 'NVActivityIndicatorView'
pod 'SAMKeychain'
pod 'SwiftProtobuf', '~> 1.5.0'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'DifferenceKit'
end
target 'SessionMessagingKit' do
pod 'SAMKeychain'
pod 'SwiftProtobuf', '~> 1.5.0'
pod 'DifferenceKit'
target 'SessionMessagingKitTests' do
inherit! :complete
pod 'Quick'
pod 'Nimble'
# Need to include this for the tests because otherwise it won't actually build
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
end
end
target 'SessionUtilitiesKit' do
pod 'SAMKeychain'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'DifferenceKit'
target 'SessionUtilitiesKitTests' do
inherit! :complete
pod 'Quick'
pod 'Nimble'
end
end
end
target 'SessionSnodeKit' do
target 'SessionSnodeKitTests' do
inherit! :complete
pod 'Quick'
pod 'Nimble'
# Need to include these for the tests because otherwise it won't actually build
pod 'SAMKeychain'
pod 'PureLayout', '~> 3.1.8'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'DifferenceKit'
end
end
end
target 'SessionUIKit' do
pod 'GRDB.swift/SQLCipher'
pod 'DifferenceKit'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
end
end
# Actions to perform post-install
post_install do |installer|
set_minimum_deployment_target(installer)
end
def set_minimum_deployment_target(installer)
installer.pods_project.targets.each do |target|
target.build_configurations.each do |build_configuration|
build_configuration.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '13.0'
end
end
end

@ -1,132 +0,0 @@
PODS:
- CocoaLumberjack (3.8.0):
- CocoaLumberjack/Core (= 3.8.0)
- CocoaLumberjack/Core (3.8.0)
- Curve25519Kit (2.1.0):
- CocoaLumberjack
- SignalCoreKit
- DifferenceKit (1.3.0):
- DifferenceKit/Core (= 1.3.0)
- DifferenceKit/UIKitExtension (= 1.3.0)
- DifferenceKit/Core (1.3.0)
- DifferenceKit/UIKitExtension (1.3.0):
- DifferenceKit/Core
- GRDB.swift/SQLCipher (6.24.1):
- SQLCipher (>= 3.4.2)
- libwebp (1.3.2):
- libwebp/demux (= 1.3.2)
- libwebp/mux (= 1.3.2)
- libwebp/sharpyuv (= 1.3.2)
- libwebp/webp (= 1.3.2)
- libwebp/demux (1.3.2):
- libwebp/webp
- libwebp/mux (1.3.2):
- libwebp/demux
- libwebp/sharpyuv (1.3.2)
- libwebp/webp (1.3.2):
- libwebp/sharpyuv
- Nimble (12.3.0)
- NVActivityIndicatorView (5.1.1):
- NVActivityIndicatorView/Base (= 5.1.1)
- NVActivityIndicatorView/Base (5.1.1)
- OpenSSL-Universal (1.1.2200)
- PureLayout (3.1.9)
- Quick (7.3.0)
- SAMKeychain (1.5.3)
- SignalCoreKit (1.0.0):
- CocoaLumberjack
- OpenSSL-Universal
- Sodium (0.9.1)
- SQLCipher (4.5.7):
- SQLCipher/standard (= 4.5.7)
- SQLCipher/common (4.5.7)
- SQLCipher/standard (4.5.7):
- SQLCipher/common
- SwiftProtobuf (1.5.0)
- WebRTC-lib (114.0.0)
- YYImage/Core (1.0.4)
- YYImage/libwebp (1.0.4):
- libwebp
- YYImage/Core
DEPENDENCIES:
- Curve25519Kit (from `https://github.com/oxen-io/session-ios-curve-25519-kit.git`, branch `session-version`)
- DifferenceKit
- GRDB.swift/SQLCipher
- Nimble
- NVActivityIndicatorView
- PureLayout (~> 3.1.8)
- Quick
- SAMKeychain
- SignalCoreKit (from `https://github.com/oxen-io/session-ios-core-kit`, commit `3acbfe5`)
- Sodium (from `https://github.com/oxen-io/session-ios-swift-sodium.git`, commit `310c343`)
- SQLCipher (~> 4.5.7)
- SwiftProtobuf (~> 1.5.0)
- WebRTC-lib
- YYImage/libwebp (from `https://github.com/signalapp/YYImage`)
SPEC REPOS:
trunk:
- CocoaLumberjack
- DifferenceKit
- GRDB.swift
- libwebp
- Nimble
- NVActivityIndicatorView
- OpenSSL-Universal
- PureLayout
- Quick
- SAMKeychain
- SQLCipher
- SwiftProtobuf
- WebRTC-lib
EXTERNAL SOURCES:
Curve25519Kit:
:branch: session-version
:git: https://github.com/oxen-io/session-ios-curve-25519-kit.git
SignalCoreKit:
:commit: 3acbfe5
:git: https://github.com/oxen-io/session-ios-core-kit
Sodium:
:commit: 310c343
:git: https://github.com/oxen-io/session-ios-swift-sodium.git
YYImage:
:git: https://github.com/signalapp/YYImage
CHECKOUT OPTIONS:
Curve25519Kit:
:commit: ee1bc83e61d9d672105eed85a4b8fbaec3d376f5
:git: https://github.com/oxen-io/session-ios-curve-25519-kit.git
SignalCoreKit:
:commit: 3acbfe5
:git: https://github.com/oxen-io/session-ios-core-kit
Sodium:
:commit: 310c343
:git: https://github.com/oxen-io/session-ios-swift-sodium.git
YYImage:
:commit: 62a4cede20bcf31da73d18163408e46a92f171c6
:git: https://github.com/signalapp/YYImage
SPEC CHECKSUMS:
CocoaLumberjack: 78abfb691154e2a9df8ded4350d504ee19d90732
Curve25519Kit: e63f9859ede02438ae3defc5e1a87e09d1ec7ee6
DifferenceKit: ab185c4d7f9cef8af3fcf593e5b387fb81e999ca
GRDB.swift: 136dcb5d8dddca50aae3ba7d77475f79e7232cd8
libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
Nimble: f8a8219d16f176429b951e8f7e72df5c23ceddc0
NVActivityIndicatorView: 1f6c5687f1171810aa27a3296814dc2d7dec3667
OpenSSL-Universal: 6e1ae0555546e604dbc632a2b9a24a9c46c41ef6
PureLayout: 5fb5e5429519627d60d079ccb1eaa7265ce7cf88
Quick: d32871931c05547cb4e0bc9009d66a18b50d8558
SAMKeychain: 483e1c9f32984d50ca961e26818a534283b4cd5c
SignalCoreKit: 1fbd8732163ef76de16cd1107d1fa3684b607e5d
Sodium: a7d42cb46e789d2630fa552d35870b416ed055ae
SQLCipher: 5e6bfb47323635c8b657b1b27d25c5f1baf63bf5
SwiftProtobuf: 241400280f912735c1e1b9fe675fdd2c6c4d42e2
WebRTC-lib: d83df8976fa608b980f1d85796b3de66d60a1953
YYImage: f1ddd15ac032a58b78bbed1e012b50302d318331
PODFILE CHECKSUM: 6d85dee189f35e1e9a49cf8e95799a7087cfbdd5
COCOAPODS: 1.15.2

@ -572,7 +572,7 @@ public func serializedData() throws -> Data {
# if self.can_field_be_optional(field):
writer.add('guard proto.%s else {' % field.has_accessor_name() )
writer.push_indent()
writer.add('throw %s.invalidProtobuf(description: "\(String(describing: logTag)) missing required field: %s")' % ( writer.invalid_protobuf_error_name, field.name_swift, ) )
writer.add('throw %s.invalidProtobuf(description: "\\(NSStringFromClass(self)) missing required field: %s")' % ( writer.invalid_protobuf_error_name, field.name_swift, ) )
writer.pop_indent()
writer.add('}')
@ -648,7 +648,7 @@ public func serializedData() throws -> Data {
# description
writer.add('@objc public override var debugDescription: String {')
writer.push_indent()
writer.add('return "\(proto)"')
writer.add('return "\\(proto)"')
writer.pop_indent()
writer.add('}')
writer.newline()
@ -842,7 +842,7 @@ public func serializedData() throws -> Data {
writer.push_indent()
writer.add('var fields = [String]()')
for field in self.fields():
writer.add('fields.append("%s: \(proto.%s)")' % ( field.name_swift, field.name_swift, ) )
writer.add('fields.append("%s: \\(proto.%s)")' % ( field.name_swift, field.name_swift, ) )
writer.add('return "[" + fields.joined(separator: ", ") + "]"')
writer.pop_indent()
writer.add('}')

@ -222,7 +222,8 @@ for i in "${!TARGET_ARCHS[@]}"; do
-DBUILD_STATIC_DEPS=ON \
-DENABLE_VISIBILITY=ON \
-DSUBMODULE_CHECK=$submodule_check \
-DCMAKE_BUILD_TYPE=$build_type
-DCMAKE_BUILD_TYPE=$build_type \
-DLOCAL_MIRROR=https://oxen.rocks/deps
# Capture the exit status of the ./utils/static-bundle.sh command
EXIT_STATUS=$?

File diff suppressed because it is too large Load Diff

@ -2,6 +2,6 @@
<Workspace
version = "1.0">
<FileRef
location = "self:Signal.xcodeproj">
location = "self:">
</FileRef>
</Workspace>

@ -0,0 +1,132 @@
{
"originHash" : "b77f342bc30b5d1971118f9ad70bf3af24442c4edb9a716222ad6bc0fb9b3f8e",
"pins" : [
{
"identity" : "cocoalumberjack",
"kind" : "remoteSourceControl",
"location" : "https://github.com/CocoaLumberjack/CocoaLumberjack.git",
"state" : {
"revision" : "4b8714a7fb84d42393314ce897127b3939885ec3",
"version" : "3.8.5"
}
},
{
"identity" : "cwlcatchexception",
"kind" : "remoteSourceControl",
"location" : "https://github.com/mattgallagher/CwlCatchException.git",
"state" : {
"revision" : "3ef6999c73b6938cc0da422f2c912d0158abb0a0",
"version" : "2.2.0"
}
},
{
"identity" : "cwlpreconditiontesting",
"kind" : "remoteSourceControl",
"location" : "https://github.com/mattgallagher/CwlPreconditionTesting.git",
"state" : {
"revision" : "2ef56b2caf25f55fa7eef8784c30d5a767550f54",
"version" : "2.2.1"
}
},
{
"identity" : "differencekit",
"kind" : "remoteSourceControl",
"location" : "https://github.com/ra1028/DifferenceKit.git",
"state" : {
"revision" : "073b9671ce2b9b5b96398611427a1f929927e428",
"version" : "1.3.0"
}
},
{
"identity" : "keychain-swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/evgenyneu/keychain-swift.git",
"state" : {
"revision" : "5e1b02b6a9dac2a759a1d5dbc175c86bd192a608",
"version" : "24.0.0"
}
},
{
"identity" : "libwebp-xcode",
"kind" : "remoteSourceControl",
"location" : "https://github.com/SDWebImage/libwebp-Xcode.git",
"state" : {
"revision" : "b2b1d20a90b14d11f6ef4241da6b81c1d3f171e4",
"version" : "1.3.2"
}
},
{
"identity" : "nimble",
"kind" : "remoteSourceControl",
"location" : "https://github.com/Quick/Nimble.git",
"state" : {
"revision" : "1c49fc1243018f81a7ea99cb5e0985b00096e9f4",
"version" : "13.3.0"
}
},
{
"identity" : "nvactivityindicatorview",
"kind" : "remoteSourceControl",
"location" : "https://github.com/ninjaprox/NVActivityIndicatorView.git",
"state" : {
"revision" : "121455c4e630fcb95aaefd7e4257b0c2e3cfe6d5",
"version" : "5.2.0"
}
},
{
"identity" : "quick",
"kind" : "remoteSourceControl",
"location" : "https://github.com/Quick/Quick.git",
"state" : {
"revision" : "26529ff2209c40ae50fd642b031f930d9d68ea02",
"version" : "7.5.0"
}
},
{
"identity" : "session-grdb-swift",
"kind" : "remoteSourceControl",
"location" : "https://github.com/oxen-io/session-grdb-swift",
"state" : {
"revision" : "52043c998154b39ecd8e069ba22244bf36464c61",
"version" : "106.27.1"
}
},
{
"identity" : "session-ios-yyimage",
"kind" : "remoteSourceControl",
"location" : "https://github.com/oxen-io/session-ios-yyimage.git",
"state" : {
"revision" : "14786afd2523f80be304b377f9dbab6b7904bf02",
"version" : "1.1.0"
}
},
{
"identity" : "swift-log",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-log",
"state" : {
"revision" : "9cb486020ebf03bfa5b5df985387a14a98744537",
"version" : "1.6.1"
}
},
{
"identity" : "swift-protobuf",
"kind" : "remoteSourceControl",
"location" : "https://github.com/apple/swift-protobuf.git",
"state" : {
"revision" : "7bf52ab1f5ee87aeb89f2a6b9bfc6369408476f7",
"version" : "1.5.0"
}
},
{
"identity" : "webrtc",
"kind" : "remoteSourceControl",
"location" : "https://github.com/stasel/WebRTC.git",
"state" : {
"revision" : "c487cd3e567ca0b58c59496475f1ca6f0572aec6",
"version" : "116.0.0"
}
}
],
"version" : 3
}

@ -186,7 +186,6 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
enableUBSanitizer = "YES"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Session.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>FILEHEADER</key>
<string>
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// </string>
</dict>
</plist>

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>

@ -70,10 +70,10 @@ extension SessionCallManager {
private func requestTransaction(_ transaction: CXTransaction, completion: ((Error?) -> Void)? = nil) {
callController?.request(transaction) { error in
if let error = error {
SNLog("Error requesting transaction: \(error)")
Log.error("[SessionCallManager] Error requesting transaction: \(error)")
}
else {
SNLog("Requested transaction successfully")
Log.info("[SessionCallManager] Requested transaction successfully")
}
completion?(error)

@ -4,16 +4,15 @@ import Foundation
import AVFAudio
import CallKit
import SessionUtilitiesKit
import SignalCoreKit
extension SessionCallManager: CXProviderDelegate {
public func providerDidReset(_ provider: CXProvider) {
AssertIsOnMainThread()
Log.assertOnMainThread()
(currentCall as? SessionCall)?.endSessionCall()
}
public func provider(_ provider: CXProvider, perform action: CXStartCallAction) {
AssertIsOnMainThread()
Log.assertOnMainThread()
if startCallAction() {
action.fulfill()
}
@ -23,8 +22,8 @@ extension SessionCallManager: CXProviderDelegate {
}
public func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
AssertIsOnMainThread()
print("[CallKit] Perform CXAnswerCallAction")
Log.assertOnMainThread()
Log.debug("[CallKit] Perform CXAnswerCallAction")
guard let call: SessionCall = (self.currentCall as? SessionCall) else { return action.fail() }
@ -42,8 +41,8 @@ extension SessionCallManager: CXProviderDelegate {
}
public func provider(_ provider: CXProvider, perform action: CXEndCallAction) {
print("[CallKit] Perform CXEndCallAction")
AssertIsOnMainThread()
Log.debug("[CallKit] Perform CXEndCallAction")
Log.assertOnMainThread()
if endCallAction() {
action.fulfill()
@ -54,8 +53,8 @@ extension SessionCallManager: CXProviderDelegate {
}
public func provider(_ provider: CXProvider, perform action: CXSetMutedCallAction) {
print("[CallKit] Perform CXSetMutedCallAction, isMuted: \(action.isMuted)")
AssertIsOnMainThread()
Log.debug("[CallKit] Perform CXSetMutedCallAction, isMuted: \(action.isMuted)")
Log.assertOnMainThread()
if setMutedCallAction(isMuted: action.isMuted) {
action.fulfill()
@ -74,8 +73,8 @@ extension SessionCallManager: CXProviderDelegate {
}
public func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
print("[CallKit] Audio session did activate.")
AssertIsOnMainThread()
Log.debug("[CallKit] Audio session did activate.")
Log.assertOnMainThread()
guard let call: SessionCall = (self.currentCall as? SessionCall) else { return }
call.webRTCSession.audioSessionDidActivate(audioSession)
@ -83,8 +82,8 @@ extension SessionCallManager: CXProviderDelegate {
}
public func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
print("[CallKit] Audio session did deactivate.")
AssertIsOnMainThread()
Log.debug("[CallKit] Audio session did deactivate.")
Log.assertOnMainThread()
guard let call: SessionCall = (self.currentCall as? SessionCall) else { return }
call.webRTCSession.audioSessionDidDeactivate(audioSession)

@ -3,9 +3,9 @@
import UIKit
import CallKit
import GRDB
import WebRTC
import SessionUIKit
import SessionMessagingKit
import SignalCoreKit
import SignalUtilitiesKit
import SessionUtilitiesKit
@ -93,7 +93,7 @@ public final class SessionCallManager: NSObject, CallManagerProtocol {
}
public func reportOutgoingCall(_ call: SessionCall) {
AssertIsOnMainThread()
Log.assertOnMainThread()
UserDefaults.sharedLokiProject?[.isCallOngoing] = true
UserDefaults.sharedLokiProject?[.lastCallPreOffer] = Date()
@ -170,11 +170,18 @@ public final class SessionCallManager: NSObject, CallManagerProtocol {
call.updateCallMessage(mode: .local)
}
call.webRTCSession.dropConnection()
(call as? SessionCall)?.webRTCSession.dropConnection()
self.currentCall = nil
handleCallEnded()
}
public func currentWebRTCSessionMatches(callId: String) -> Bool {
return (
WebRTCSession.current != nil &&
WebRTCSession.current?.uuid == callId
)
}
// MARK: - Util
private func disableUnsupportedFeatures(callUpdate: CXCallUpdate) {
@ -243,6 +250,24 @@ public final class SessionCallManager: NSObject, CallManagerProtocol {
}
}
public func handleICECandidates(message: CallMessage, sdpMLineIndexes: [UInt32], sdpMids: [String]) {
guard
let currentWebRTCSession = WebRTCSession.current,
currentWebRTCSession.uuid == message.uuid
else { return }
var candidates: [RTCIceCandidate] = []
let sdps = message.sdps
for i in 0..<sdps.count {
let sdp = sdps[i]
let sdpMLineIndex = sdpMLineIndexes[i]
let sdpMid = sdpMids[i]
let candidate = RTCIceCandidate(sdp: sdp, sdpMLineIndex: Int32(sdpMLineIndex), sdpMid: sdpMid)
candidates.append(candidate)
}
currentWebRTCSession.handleICECandidates(candidates)
}
public func handleAnswerMessage(_ message: CallMessage) {
guard Singleton.hasAppContext else { return }
guard Thread.isMainThread else {

@ -520,13 +520,14 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
}
private func addFloatingVideoView() {
guard Singleton.hasAppContext else { return }
let safeAreaInsets = UIApplication.shared.keyWindow?.safeAreaInsets
Singleton.appContext.mainWindow?.addSubview(floatingViewContainer)
floatingViewContainer.autoPinEdge(toSuperviewEdge: .right, withInset: Values.smallSpacing)
let topMargin = (safeAreaInsets?.top ?? 0) + Values.veryLargeSpacing
floatingViewContainer.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
guard
Singleton.hasAppContext,
let window: UIWindow = Singleton.appContext.mainWindow
else { return }
window.addSubview(floatingViewContainer)
floatingViewContainer.pin(.top, to: .top, of: window, withInset: (window.safeAreaInsets.top + Values.veryLargeSpacing))
floatingViewContainer.pin(.right, to: .right, of: window, withInset: -Values.smallSpacing)
}
override func viewDidAppear(_ animated: Bool) {
@ -574,8 +575,8 @@ final class CallVC: UIViewController, VideoPreviewDelegate {
switch UIDevice.current.orientation {
case .portrait: rotateAllButtons(rotationAngle: 0)
case .portraitUpsideDown: rotateAllButtons(rotationAngle: .pi)
case .landscapeLeft: rotateAllButtons(rotationAngle: .halfPi)
case .landscapeRight: rotateAllButtons(rotationAngle: .pi + .halfPi)
case .landscapeLeft: rotateAllButtons(rotationAngle: .pi * 0.5)
case .landscapeRight: rotateAllButtons(rotationAngle: .pi * 1.5)
default: break
}
}

@ -21,7 +21,7 @@ final class CameraManager : NSObject {
private var videoInput: AVCaptureDeviceInput?
func prepare() {
print("[Calls] Preparing camera.")
Log.debug("[CameraManager] Preparing camera.")
addNewVideoIO(position: .front)
}
@ -41,7 +41,7 @@ final class CameraManager : NSObject {
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = (position == .front)
} else {
SNLog("Couldn't add video data output to capture session.")
Log.info("[CameraManager] Couldn't add video data output to capture session.")
}
}
@ -50,7 +50,7 @@ final class CameraManager : NSObject {
// Note: The 'startRunning' task is blocking so we want to do it on a non-main thread
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
print("[Calls] Starting camera.")
Log.debug("[CameraManager] Starting camera.")
self?.isCapturing = true
self?.captureSession.startRunning()
}
@ -61,7 +61,7 @@ final class CameraManager : NSObject {
// Note: The 'stopRunning' task is blocking so we want to do it on a non-main thread
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
print("[Calls] Stopping camera.")
Log.debug("[CameraManager] Stopping camera.")
self?.isCapturing = false
self?.captureSession.stopRunning()
}
@ -91,6 +91,6 @@ extension CameraManager : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptur
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("[Calls] Frame dropped.")
Log.debug("[CameraManager] Frame dropped.")
}
}

@ -3,7 +3,6 @@
import WebRTC
import Foundation
import SessionUtilitiesKit
import SignalCoreKit
#if targetEnvironment(simulator)
// Note: 'RTCMTLVideoView' doesn't seem to work on the simulator so use 'RTCEAGLVideoView' instead
@ -20,7 +19,7 @@ class RemoteVideoView: TargetView {
super.renderFrame(frame)
guard let frame = frame else { return }
if UIDevice.current.isIPad {
DispatchMainThreadSafe {
Threading.dispatchMainThreadSafe {
#if targetEnvironment(simulator)
self.contentMode = .scaleAspectFit
#else

@ -36,7 +36,7 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
let result = UIButton(type: .custom)
result.setImage(
UIImage(named: "AnswerCall")?
.resizedImage(to: CGSize(width: 24.8, height: 24.8))?
.resized(to: CGSize(width: 24.8, height: 24.8))?
.withRenderingMode(.alwaysTemplate),
for: .normal
)
@ -54,7 +54,7 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
let result = UIButton(type: .custom)
result.setImage(
UIImage(named: "EndCall")?
.resizedImage(to: CGSize(width: 29.6, height: 11.2))?
.resized(to: CGSize(width: 29.6, height: 11.2))?
.withRenderingMode(.alwaysTemplate),
for: .normal
)
@ -126,7 +126,7 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
self.addSubview(stackView)
stackView.center(.vertical, in: self)
stackView.autoPinWidthToSuperview(withMargin: Values.mediumSpacing)
stackView.set(.width, to: .width, of: self, withOffset: Values.mediumSpacing)
}
private func setUpGestureRecognizers() {
@ -222,8 +222,8 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
window.addSubview(self)
let topMargin = window.safeAreaInsets.top - Values.smallSpacing
self.autoPinWidthToSuperview(withMargin: Values.smallSpacing)
self.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
self.set(.width, to: .width, of: window, withOffset: Values.smallSpacing)
self.pin(.top, to: .top, of: window, withInset: topMargin)
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
self.alpha = 1.0

@ -167,11 +167,11 @@ final class MiniCallView: UIView, RTCVideoViewDelegate {
else { return }
window.addSubview(self)
left = self.autoPinEdge(toSuperviewEdge: .left)
left = self.pin(.left, to: .left, of: window)
left?.isActive = false
right = self.autoPinEdge(toSuperviewEdge: .right, withInset: Values.smallSpacing)
top = self.autoPinEdge(toSuperviewEdge: .top, withInset: topMargin)
bottom = self.autoPinEdge(toSuperviewEdge: .bottom, withInset: bottomMargin)
right = self.pin(.right, to: .right, of: window, withInset: -Values.smallSpacing)
top = self.pin(.top, to: .top, of: window, withInset: topMargin)
bottom = self.pin(.bottom, to: .bottom, of: window, withInset: -bottomMargin)
bottom?.isActive = false
UIView.animate(withDuration: 0.5, delay: 0, options: [], animations: {
@ -208,7 +208,7 @@ final class MiniCallView: UIView, RTCVideoViewDelegate {
func persistCurrentPosition(newSize: CGSize) {
let currentCenter = self.center
if currentCenter.x < ((self.superview?.width() ?? 0) / 2) {
if currentCenter.x < ((self.superview?.bounds.width ?? 0) / 2) {
left?.isActive = true
right?.isActive = false
}
@ -218,7 +218,7 @@ final class MiniCallView: UIView, RTCVideoViewDelegate {
}
let willTouchTop: Bool = (currentCenter.y < ((newSize.height / 2) + topMargin))
let willTouchBottom: Bool = ((currentCenter.y + (newSize.height / 2)) >= (self.superview?.height() ?? 0))
let willTouchBottom: Bool = ((currentCenter.y + (newSize.height / 2)) >= (self.superview?.bounds.height ?? 0))
if willTouchBottom {
top?.isActive = false
@ -244,11 +244,11 @@ final class MiniCallView: UIView, RTCVideoViewDelegate {
}
if self.right?.isActive == true {
self.right?.constant = (self.frame.maxX - (self.superview?.width() ?? 0))
self.right?.constant = (self.frame.maxX - (self.superview?.bounds.width ?? 0))
}
if self.bottom?.isActive == true {
self.bottom?.constant = (self.frame.maxY - (self.superview?.height() ?? 0))
self.bottom?.constant = (self.frame.maxY - (self.superview?.bounds.height ?? 0))
}
self.window?.bringSubviewToFront(self)

@ -449,8 +449,8 @@ extension WebRTCSession {
let audioSession = RTCAudioSession.sharedInstance()
audioSession.lockForConfiguration()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord.rawValue)
try audioSession.setMode(AVAudioSession.Mode.voiceChat.rawValue)
try audioSession.setCategory(AVAudioSession.Category.playAndRecord)
try audioSession.setMode(AVAudioSession.Mode.voiceChat)
try audioSession.overrideOutputAudioPort(outputAudioPort)
try audioSession.setActive(true)
} catch let error {

@ -79,7 +79,7 @@ extension ContextMenuVC {
themeBackgroundColor = .clear
iconImageView.image = action.icon?
.resizedImage(to: CGSize(width: ActionView.iconSize, height: ActionView.iconSize))?
.resized(to: CGSize(width: ActionView.iconSize, height: ActionView.iconSize))?
.withRenderingMode(.alwaysTemplate)
titleLabel.text = action.title
setUpSubtitle()

@ -3,7 +3,6 @@
import UIKit
import GRDB
import SignalUtilitiesKit
import SignalCoreKit
import SessionUIKit
import SessionUtilitiesKit
@ -72,7 +71,7 @@ extension ConversationSearchController: UISearchControllerDelegate {
extension ConversationSearchController: UISearchResultsUpdating {
public func updateSearchResults(for searchController: UISearchController) {
Logger.verbose("searchBar.text: \( searchController.searchBar.text ?? "<blank>")")
Log.verbose("searchBar.text: \( searchController.searchBar.text ?? "<blank>")")
guard
let searchText: String = searchController.searchBar.text?.stripped,
@ -262,7 +261,6 @@ public final class SearchResultsBar: UIView {
}
@objc public func handleDownButtonTapped() {
Logger.debug("")
guard let results: [Interaction.TimestampInfo] = results.wrappedValue else { return }
guard let currentIndex: Int = currentIndex, currentIndex > 0 else { return }

@ -7,7 +7,6 @@ import Combine
import CoreServices
import Photos
import PhotosUI
import Sodium
import GRDB
import SessionUIKit
import SessionMessagingKit
@ -191,10 +190,9 @@ extension ConversationVC:
didApproveAttachments attachments: [SignalAttachment],
forThreadId threadId: String,
threadVariant: SessionThread.Variant,
messageText: String?,
using dependencies: Dependencies
messageText: String?
) {
sendMessage(text: (messageText ?? ""), attachments: attachments, using: dependencies)
sendMessage(text: (messageText ?? ""), attachments: attachments, using: viewModel.dependencies)
resetMentions()
dismiss(animated: true) { [weak self] in
@ -222,10 +220,9 @@ extension ConversationVC:
didApproveAttachments attachments: [SignalAttachment],
forThreadId threadId: String,
threadVariant: SessionThread.Variant,
messageText: String?,
using dependencies: Dependencies
messageText: String?
) {
sendMessage(text: (messageText ?? ""), attachments: attachments, using: dependencies)
sendMessage(text: (messageText ?? ""), attachments: attachments, using: viewModel.dependencies)
resetMentions()
dismiss(animated: true) { [weak self] in
@ -301,11 +298,12 @@ extension ConversationVC:
let threadId: String = self.viewModel.threadData.threadId
let threadVariant: SessionThread.Variant = self.viewModel.threadData.threadVariant
Permissions.requestLibraryPermissionIfNeeded { [weak self] in
Permissions.requestLibraryPermissionIfNeeded { [weak self, dependencies = viewModel.dependencies] in
DispatchQueue.main.async {
let sendMediaNavController = SendMediaNavigationController.showingMediaLibraryFirst(
threadId: threadId,
threadVariant: threadVariant
threadVariant: threadVariant,
using: dependencies
)
sendMediaNavController.sendMediaNavDelegate = self
sendMediaNavController.modalPresentationStyle = .fullScreen
@ -325,7 +323,8 @@ extension ConversationVC:
let sendMediaNavController = SendMediaNavigationController.showingCameraFirst(
threadId: self.viewModel.threadData.threadId,
threadVariant: self.viewModel.threadData.threadVariant
threadVariant: self.viewModel.threadData.threadVariant,
using: self.viewModel.dependencies
)
sendMediaNavController.sendMediaNavDelegate = self
sendMediaNavController.modalPresentationStyle = .fullScreen
@ -382,7 +381,7 @@ extension ConversationVC:
}
let fileName = urlResourceValues.name ?? NSLocalizedString("ATTACHMENT_DEFAULT_FILENAME", comment: "")
guard let dataSource = DataSourcePath.dataSource(with: url, shouldDeleteOnDeallocation: false) else {
guard let dataSource = DataSourcePath(fileUrl: url, shouldDeleteOnDeinit: false) else {
DispatchQueue.main.async { [weak self] in
let modal: ConfirmationModal = ConfirmationModal(
targetView: self?.view,
@ -410,26 +409,28 @@ extension ConversationVC:
}
func showAttachmentApprovalDialog(for attachments: [SignalAttachment]) {
let navController = AttachmentApprovalViewController.wrappedInNavController(
guard let navController = AttachmentApprovalViewController.wrappedInNavController(
threadId: self.viewModel.threadData.threadId,
threadVariant: self.viewModel.threadData.threadVariant,
attachments: attachments,
approvalDelegate: self
)
approvalDelegate: self,
using: self.viewModel.dependencies
) else { return }
navController.modalPresentationStyle = .fullScreen
present(navController, animated: true, completion: nil)
}
func showAttachmentApprovalDialogAfterProcessingVideo(at url: URL, with fileName: String) {
ModalActivityIndicatorViewController.present(fromViewController: self, canCancel: true, message: nil) { [weak self] modalActivityIndicator in
let dataSource = DataSourcePath.dataSource(with: url, shouldDeleteOnDeallocation: false)!
ModalActivityIndicatorViewController.present(fromViewController: self, canCancel: true, message: nil) { [weak self, dependencies = viewModel.dependencies] modalActivityIndicator in
let dataSource = DataSourcePath(fileUrl: url, shouldDeleteOnDeinit: false)!
dataSource.sourceFilename = fileName
SignalAttachment
.compressVideoAsMp4(
dataSource: dataSource,
dataUTI: kUTTypeMPEG4 as String
dataUTI: kUTTypeMPEG4 as String,
using: dependencies
)
.attachmentPublisher
.sinkUntilComplete(
@ -714,15 +715,16 @@ extension ConversationVC:
func didPasteImageFromPasteboard(_ image: UIImage) {
guard let imageData = image.jpegData(compressionQuality: 1.0) else { return }
let dataSource = DataSourceValue.dataSource(with: imageData, utiType: kUTTypeJPEG as String)
let dataSource = DataSourceValue(data: imageData, utiType: kUTTypeJPEG as String)
let attachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: kUTTypeJPEG as String, imageQuality: .medium)
let approvalVC = AttachmentApprovalViewController.wrappedInNavController(
guard let approvalVC = AttachmentApprovalViewController.wrappedInNavController(
threadId: self.viewModel.threadData.threadId,
threadVariant: self.viewModel.threadData.threadVariant,
attachments: [ attachment ],
approvalDelegate: self
)
approvalDelegate: self,
using: self.viewModel.dependencies
) else { return }
approvalVC.modalPresentationStyle = .fullScreen
self.present(approvalVC, animated: true, completion: nil)
@ -1127,7 +1129,7 @@ extension ConversationVC:
if
attachment.isText ||
attachment.isMicrosoftDoc ||
attachment.contentType == OWSMimeTypeApplicationPdf
attachment.contentType == MimeTypeUtil.MimeType.applicationPdf
{
// FIXME: If given an invalid text file (eg with binary data) this hangs forever
// Note: I tried dispatching after a short delay, detecting that the new UI is invalid and dismissing it
@ -1253,14 +1255,18 @@ extension ConversationVC:
func startThread(with sessionId: String, openGroupServer: String?, openGroupPublicKey: String?) {
guard viewModel.threadData.canWrite else { return }
// FIXME: Add in support for starting a thread with a 'blinded25' id
guard SessionId.Prefix(from: sessionId) != .blinded25 else { return }
guard SessionId.Prefix(from: sessionId) == .blinded15 else {
guard (try? SessionId.Prefix(from: sessionId)) != .blinded25 else { return }
guard (try? SessionId.Prefix(from: sessionId)) == .blinded15 else {
Storage.shared.write { db in
try SessionThread
.fetchOrCreate(db, id: sessionId, variant: .contact, shouldBeVisible: nil)
}
let conversationVC: ConversationVC = ConversationVC(threadId: sessionId, threadVariant: .contact)
let conversationVC: ConversationVC = ConversationVC(
threadId: sessionId,
threadVariant: .contact,
using: viewModel.dependencies
)
self.navigationController?.pushViewController(conversationVC, animated: true)
return
@ -1294,7 +1300,11 @@ extension ConversationVC:
guard let threadId: String = targetThreadId else { return }
let conversationVC: ConversationVC = ConversationVC(threadId: threadId, threadVariant: .contact)
let conversationVC: ConversationVC = ConversationVC(
threadId: threadId,
threadVariant: .contact,
using: viewModel.dependencies
)
self.navigationController?.pushViewController(conversationVC, animated: true)
}
@ -1976,7 +1986,7 @@ extension ConversationVC:
attachment.state == .downloaded ||
attachment.state == .uploaded
),
let utiType: String = MIMETypeUtil.utiType(forMIMEType: attachment.contentType),
let utiType: String = MimeTypeUtil.utiType(for: attachment.contentType),
let originalFilePath: String = attachment.originalFilePath,
let data: Data = try? Data(contentsOf: URL(fileURLWithPath: originalFilePath))
else { return }
@ -2216,7 +2226,6 @@ extension ConversationVC:
let actionSheet: UIAlertController = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
actionSheet.addAction(UIAlertAction(
title: "delete_message_for_me".localized(),
accessibilityIdentifier: "Delete for me",
style: .destructive
) { [weak self] _ in
Storage.shared.writeAsync { db in
@ -2251,7 +2260,6 @@ extension ConversationVC:
)
}
}(),
accessibilityIdentifier: "Delete for everyone",
style: .destructive
) { [weak self] _ in
let completeServerDeletion = { [weak self] in
@ -2572,7 +2580,7 @@ extension ConversationVC:
}
// Get data
let dataSourceOrNil = DataSourcePath.dataSource(with: audioRecorder.url, shouldDeleteOnDeallocation: true)
let dataSourceOrNil = DataSourcePath(fileUrl: audioRecorder.url, shouldDeleteOnDeinit: true)
self.audioRecorder = nil
guard let dataSource = dataSourceOrNil else { return SNLog("Couldn't load recorded data.") }

@ -331,8 +331,13 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
// MARK: - Initialization
init(threadId: String, threadVariant: SessionThread.Variant, focusedInteractionInfo: Interaction.TimestampInfo? = nil) {
self.viewModel = ConversationViewModel(threadId: threadId, threadVariant: threadVariant, focusedInteractionInfo: focusedInteractionInfo)
init(
threadId: String,
threadVariant: SessionThread.Variant,
focusedInteractionInfo: Interaction.TimestampInfo? = nil,
using dependencies: Dependencies
) {
self.viewModel = ConversationViewModel(threadId: threadId, threadVariant: threadVariant, focusedInteractionInfo: focusedInteractionInfo, using: dependencies)
Storage.shared.addObserver(viewModel.pagedDataObserver)
@ -543,7 +548,8 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
!LibSession.conversationInConfig(
threadId: threadId,
threadVariant: viewModel.threadData.threadVariant,
visibleOnly: false
visibleOnly: false,
using: viewModel.dependencies
)
{
Storage.shared.writeAsync { db in
@ -591,8 +597,8 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
guard
let sessionId: String = self?.viewModel.threadData.threadId,
(
SessionId.Prefix(from: sessionId) == .blinded15 ||
SessionId.Prefix(from: sessionId) == .blinded25
(try? SessionId.Prefix(from: sessionId)) == .blinded15 ||
(try? SessionId.Prefix(from: sessionId)) == .blinded25
),
let blindedLookup: BlindedIdLookup = Storage.shared.read({ db in
try BlindedIdLookup
@ -1788,12 +1794,14 @@ final class ConversationVC: BaseVC, LibSessionRespondingViewController, Conversa
ipadCancelButton.setThemeTitleColor(.textPrimary, for: .normal)
searchBarContainer.addSubview(ipadCancelButton)
ipadCancelButton.pin(.trailing, to: .trailing, of: searchBarContainer)
ipadCancelButton.autoVCenterInSuperview()
searchBar.autoPinEdgesToSuperviewEdges(with: UIEdgeInsets.zero, excludingEdge: .trailing)
ipadCancelButton.center(.vertical, in: searchBarContainer)
searchBar.pin(.top, to: .top, of: searchBar)
searchBar.pin(.leading, to: .leading, of: searchBar)
searchBar.pin(.trailing, to: .leading, of: ipadCancelButton, withInset: -Values.smallSpacing)
searchBar.pin(.bottom, to: .bottom, of: searchBar)
}
else {
searchBar.autoPinEdgesToSuperviewMargins()
searchBar.pin(toMarginsOf: searchBarContainer)
}
// Nav bar buttons

@ -55,6 +55,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
private let initialUnreadInteractionId: Int64?
private let markAsReadTrigger: PassthroughSubject<(SessionThreadViewModel.ReadTarget, Int64?), Never> = PassthroughSubject()
private var markAsReadPublisher: AnyPublisher<Void, Never>?
public let dependencies: Dependencies
public lazy var blockedBannerMessage: String = {
let threadData: SessionThreadViewModel = self._threadData.wrappedValue
@ -74,7 +75,12 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
// MARK: - Initialization
init(threadId: String, threadVariant: SessionThread.Variant, focusedInteractionInfo: Interaction.TimestampInfo?) {
init(
threadId: String,
threadVariant: SessionThread.Variant,
focusedInteractionInfo: Interaction.TimestampInfo?,
using dependencies: Dependencies
) {
typealias InitialData = (
currentUserPublicKey: String,
initialUnreadInteractionInfo: Interaction.TimestampInfo?,
@ -177,6 +183,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
)
)
self.pagedDataObserver = nil
self.dependencies = dependencies
// Note: Since this references self we need to finish initializing before setting it, we
// also want to skip the initial query and trigger it async so that the push animation
@ -257,7 +264,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
private var lastInteractionIdMarkedAsRead: Int64? = nil
private var lastInteractionTimestampMsMarkedAsRead: Int64 = 0
public private(set) var unobservedInteractionDataChanges: ([SectionModel], StagedChangeset<[SectionModel]>)?
public private(set) var unobservedInteractionDataChanges: [SectionModel]?
public private(set) var interactionData: [SectionModel] = []
public private(set) var reactionExpandedInteractionIds: Set<Int64> = []
public private(set) var pagedDataObserver: PagedDatabaseObserver<Interaction, MessageViewModel>?
@ -266,14 +273,15 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
didSet {
// When starting to observe interaction changes we want to trigger a UI update just in case the
// data was changed while we weren't observing
if let changes: ([SectionModel], StagedChangeset<[SectionModel]>) = self.unobservedInteractionDataChanges {
let performChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ())? = onInteractionChange
switch Thread.isMainThread {
case true: performChange?(changes.0, changes.1)
case false: DispatchQueue.main.async { performChange?(changes.0, changes.1) }
}
if let changes: [SectionModel] = self.unobservedInteractionDataChanges {
PagedData.processAndTriggerUpdates(
updatedData: changes,
currentDataRetriever: { [weak self] in self?.interactionData },
onDataChangeRetriever: { [weak self] in self?.onInteractionChange },
onUnobservedDataChange: { [weak self] updatedData in
self?.unobservedInteractionDataChanges = updatedData
}
)
self.unobservedInteractionDataChanges = nil
}
}
@ -417,11 +425,8 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
),
currentDataRetriever: { self?.interactionData },
onDataChangeRetriever: { self?.onInteractionChange },
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedInteractionDataChanges = (changeset.isEmpty ?
nil :
(updatedData, changeset)
)
onUnobservedDataChange: { updatedData in
self?.unobservedInteractionDataChanges = updatedData
}
)
}
@ -566,7 +571,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
let linkPreviewAttachment: Attachment? = linkPreviewDraft.map { draft in
try? LinkPreview.generateAttachmentIfPossible(
imageData: draft.jpegImageData,
mimeType: OWSMimeTypeImageJpeg
mimeType: MimeTypeUtil.MimeType.imageJpeg
)
}
@ -681,7 +686,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
guard let currentPageInfo: PagedData.PageInfo = self.pagedDataObserver?.pageInfo.wrappedValue else { return }
/// **MUST** have the same logic as in the 'PagedDataObserver.onChangeUnsorted' above
let currentData: [SectionModel] = (unobservedInteractionDataChanges?.0 ?? interactionData)
let currentData: [SectionModel] = (unobservedInteractionDataChanges ?? interactionData)
PagedData.processAndTriggerUpdates(
updatedData: process(
@ -692,11 +697,8 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
),
currentDataRetriever: { [weak self] in self?.interactionData },
onDataChangeRetriever: { [weak self] in self?.onInteractionChange },
onUnobservedDataChange: { [weak self] updatedData, changeset in
self?.unobservedInteractionDataChanges = (changeset.isEmpty ?
nil :
(updatedData, changeset)
)
onUnobservedDataChange: { [weak self] updatedData in
self?.unobservedInteractionDataChanges = updatedData
}
)
}

@ -3,7 +3,6 @@
import UIKit
import SessionUIKit
import SessionUtilitiesKit
import SignalCoreKit
protocol EmojiPickerCollectionViewDelegate: AnyObject {
func emojiPicker(_ emojiPicker: EmojiPickerCollectionView?, didSelectEmoji emoji: EmojiWithSkinTones)
@ -108,7 +107,7 @@ class EmojiPickerCollectionView: UICollectionView {
// This is not an exact calculation, but is simple and works for our purposes.
var numberOfColumns: Int {
Int((self.width()) / (EmojiPickerCollectionView.emojiWidth + EmojiPickerCollectionView.minimumSpacing))
Int(self.bounds.width / (EmojiPickerCollectionView.emojiWidth + EmojiPickerCollectionView.minimumSpacing))
}
// At max, we show 3 rows of recent emoji
@ -119,12 +118,12 @@ class EmojiPickerCollectionView: UICollectionView {
guard section > 0 || !hasRecentEmoji else { return Array(recentEmoji[0..<min(maxRecentEmoji, recentEmoji.count)]) }
guard let category = Emoji.Category.allCases[safe: section - categoryIndexOffset] else {
owsFailDebug("Unexpectedly missing category for section \(section)")
Log.error("[EmojiPickerCollectionView] Unexpectedly missing category for section \(section)")
return []
}
guard let categoryEmoji = allSendableEmojiByCategory[category] else {
owsFailDebug("Unexpectedly missing emoji for category \(category)")
Log.error("[EmojiPickerCollectionView] Unexpectedly missing emoji for category \(category)")
return []
}
@ -141,7 +140,7 @@ class EmojiPickerCollectionView: UICollectionView {
}
guard let category = Emoji.Category.allCases[safe: section - categoryIndexOffset] else {
owsFailDebug("Unexpectedly missing category for section \(section)")
Log.error("[EmojiPickerCollectionView] Unexpectedly missing category for section \(section)")
return nil
}
@ -229,7 +228,7 @@ extension EmojiPickerCollectionView: UIGestureRecognizerDelegate {
extension EmojiPickerCollectionView: UICollectionViewDelegate {
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
guard let emoji = emojiForIndexPath(indexPath) else {
return owsFailDebug("Missing emoji for indexPath \(indexPath)")
return Log.error("[EmojiPickerCollectionView] Missing emoji for indexPath \(indexPath)")
}
pickerDelegate?.emojiPicker(self, didSelectEmoji: emoji)
@ -249,12 +248,12 @@ extension EmojiPickerCollectionView: UICollectionViewDataSource {
let cell = dequeueReusableCell(withReuseIdentifier: EmojiCell.reuseIdentifier, for: indexPath)
guard let emojiCell = cell as? EmojiCell else {
owsFailDebug("unexpected cell type")
Log.error("[EmojiPickerCollectionView] unexpected cell type")
return cell
}
guard let emoji = emojiForIndexPath(indexPath) else {
owsFailDebug("unexpected indexPath")
Log.error("[EmojiPickerCollectionView] unexpected indexPath")
return cell
}
@ -272,7 +271,7 @@ extension EmojiPickerCollectionView: UICollectionViewDataSource {
)
guard let sectionHeader = supplementaryView as? EmojiSectionHeader else {
owsFailDebug("unexpected supplementary view type")
Log.error("[EmojiPickerCollectionView] unexpected supplementary view type")
return supplementaryView
}
@ -292,7 +291,7 @@ extension EmojiPickerCollectionView: UICollectionViewDelegateFlowLayout {
let measureCell = EmojiSectionHeader()
measureCell.label.text = nameForSection(section)
return measureCell.sizeThatFits(CGSize(width: self.width(), height: .greatestFiniteMagnitude))
return measureCell.sizeThatFits(CGSize(width: self.bounds.width, height: .greatestFiniteMagnitude))
}
}
@ -308,7 +307,7 @@ private class EmojiCell: UICollectionViewCell {
emojiLabel.font = .boldSystemFont(ofSize: 32)
contentView.addSubview(emojiLabel)
emojiLabel.autoPinEdgesToSuperviewEdges()
emojiLabel.pin(to: contentView)
// For whatever reason, some emoji glyphs occasionally have different typographic widths on certain devices
// e.g. 👩🦰: 36x38.19, 👱: 40x38. (See: commit message for more info)
@ -344,8 +343,8 @@ private class EmojiSectionHeader: UICollectionReusableView {
label.font = .systemFont(ofSize: Values.smallFontSize)
label.themeTextColor = .textPrimary
addSubview(label)
label.autoPinEdgesToSuperviewMargins()
label.setCompressionResistanceHigh()
label.pin(to: self)
label.setCompressionResistance(to: .required)
}
required init?(coder: NSCoder) {

@ -2,8 +2,8 @@
import UIKit
import SessionUIKit
import SignalCoreKit
import SignalUtilitiesKit
import SessionUtilitiesKit
class EmojiSkinTonePicker: UIView {
let emoji: Emoji
@ -25,42 +25,42 @@ class EmojiSkinTonePicker: UIView {
let picker = EmojiSkinTonePicker(emoji: emoji, completion: completion)
guard let superview = referenceView.superview else {
owsFailDebug("reference is missing superview")
Log.error("[EmojiSkinTonePicker] Reference is missing superview")
return nil
}
superview.addSubview(picker)
picker.referenceOverlay.autoMatch(.width, to: .width, of: referenceView)
picker.referenceOverlay.autoMatch(.height, to: .height, of: referenceView, withOffset: 30)
picker.referenceOverlay.autoPinEdge(.leading, to: .leading, of: referenceView)
picker.referenceOverlay.set(.width, to: .width, of: referenceView)
picker.referenceOverlay.set(.height, to: .height, of: referenceView, withOffset: 30)
picker.referenceOverlay.pin(.leading, to: .leading, of: referenceView)
let leadingConstraint = picker.autoPinEdge(toSuperviewEdge: .leading)
let leadingConstraint = picker.pin(.leading, to: .leading, of: superview)
picker.layoutIfNeeded()
let halfWidth = picker.width() / 2
let halfWidth = picker.bounds.width / 2
let margin: CGFloat = 8
if (halfWidth + margin) > referenceView.center.x {
leadingConstraint.constant = margin
} else if (halfWidth + margin) > (superview.width() - referenceView.center.x) {
leadingConstraint.constant = superview.width() - picker.width() - margin
} else if (halfWidth + margin) > (superview.bounds.width - referenceView.center.x) {
leadingConstraint.constant = superview.bounds.width - picker.bounds.width - margin
} else {
leadingConstraint.constant = referenceView.center.x - halfWidth
}
let distanceFromTop = referenceView.frame.minY - superview.bounds.minY
if distanceFromTop > picker.containerView.height() {
picker.containerView.autoPinEdge(toSuperviewEdge: .top)
picker.referenceOverlay.autoPinEdge(.top, to: .bottom, of: picker.containerView, withOffset: -20)
picker.referenceOverlay.autoPinEdge(toSuperviewEdge: .bottom)
picker.autoPinEdge(.bottom, to: .bottom, of: referenceView)
if distanceFromTop > picker.containerView.bounds.height {
picker.containerView.pin(.top, to: .top, of: picker)
picker.referenceOverlay.pin(.top, to: .bottom, of: picker.containerView, withInset: -20)
picker.referenceOverlay.pin(.bottom, to: .bottom, of: picker)
picker.pin(.bottom, to: .bottom, of: referenceView)
} else {
picker.containerView.autoPinEdge(toSuperviewEdge: .bottom)
picker.referenceOverlay.autoPinEdge(.bottom, to: .top, of: picker.containerView, withOffset: 20)
picker.referenceOverlay.autoPinEdge(toSuperviewEdge: .top)
picker.autoPinEdge(.top, to: .top, of: referenceView)
picker.containerView.pin(.bottom, to: .bottom, of: picker)
picker.referenceOverlay.pin(.bottom, to: .top, of: picker.containerView, withInset: 20)
picker.referenceOverlay.pin(.top, to: .top, of: picker)
picker.pin(.top, to: .top, of: referenceView)
}
picker.alpha = 0
@ -108,7 +108,7 @@ class EmojiSkinTonePicker: UIView {
}
init(emoji: EmojiWithSkinTones, completion: @escaping (EmojiWithSkinTones?) -> Void) {
owsAssertDebug(emoji.baseEmoji!.hasSkinTones)
Log.assert(emoji.baseEmoji!.hasSkinTones)
self.emoji = emoji.baseEmoji!
self.preferredSkinTonePermutation = emoji.skinTones
@ -128,8 +128,8 @@ class EmojiSkinTonePicker: UIView {
containerView.themeBackgroundColor = .backgroundSecondary
containerView.layer.cornerRadius = 11
addSubview(containerView)
containerView.autoPinWidthToSuperview()
containerView.setCompressionResistanceHigh()
containerView.set(.width, to: .width, of: self)
containerView.setCompressionResistance(to: .required)
if emoji.baseEmoji!.allowsMultipleSkinTones {
prepareForMultipleSkinTones()
@ -156,14 +156,14 @@ class EmojiSkinTonePicker: UIView {
hStack.axis = .horizontal
hStack.spacing = 8
containerView.addSubview(hStack)
hStack.autoPinEdgesToSuperviewMargins()
hStack.pin(toMarginsOf: containerView)
hStack.addArrangedSubview(yellowButton)
hStack.addArrangedSubview(.spacer(withWidth: 2))
let divider = UIView()
divider.autoSetDimension(.width, toSize: 1)
divider.set(.width, to: 1)
divider.themeBackgroundColor = .borderSeparator
hStack.addArrangedSubview(divider)
@ -182,7 +182,7 @@ class EmojiSkinTonePicker: UIView {
private lazy var skinToneComponentEmoji: [Emoji] = {
guard let skinToneComponentEmoji = emoji.skinToneComponentEmoji else {
owsFailDebug("missing skin tone component emoji \(emoji)")
Log.error("[EmojiSkinTonePicker] Missing skin tone component emoji \(emoji)")
return []
}
return skinToneComponentEmoji
@ -246,7 +246,7 @@ class EmojiSkinTonePicker: UIView {
vStack.axis = .vertical
vStack.spacing = 6
containerView.addSubview(vStack)
vStack.autoPinEdgesToSuperviewMargins()
vStack.pin(toMarginsOf: containerView)
for (idx, emoji) in skinToneComponentEmoji.enumerated() {
let skinToneButtons = self.skinToneButtons(for: emoji) { [weak self] emojiWithSkinTone in
@ -270,7 +270,7 @@ class EmojiSkinTonePicker: UIView {
}
let divider = UIView()
divider.autoSetDimension(.height, toSize: 1)
divider.set(.height, to: 1)
divider.themeBackgroundColor = .borderSeparator
vStack.addArrangedSubview(divider)
@ -282,8 +282,8 @@ class EmojiSkinTonePicker: UIView {
hStack.axis = .horizontal
vStack.addArrangedSubview(hStack)
leftSpacer.autoMatch(.width, to: .width, of: rightSpacer)
middleSpacer.autoMatch(.width, to: .width, of: rightSpacer)
leftSpacer.set(.width, to: .width, of: rightSpacer)
middleSpacer.set(.width, to: .width, of: rightSpacer)
}
// MARK: - Button Helpers
@ -304,7 +304,8 @@ class EmojiSkinTonePicker: UIView {
button.setThemeBackgroundColor(.backgroundPrimary, for: .selected)
button.layer.cornerRadius = 6
button.clipsToBounds = true
button.autoSetDimensions(to: CGSize(width: 38, height: 38))
button.set(.width, to: 38)
button.set(.height, to: 38)
return button
}
}

@ -75,13 +75,13 @@ final class CallMessageCell: MessageCell {
withInset: -((CallMessageCell.inset * 2) + infoImageView.bounds.size.width)
)
label.pin(.bottom, to: .bottom, of: result, withInset: -CallMessageCell.inset)
result.addSubview(iconImageView)
iconImageView.autoVCenterInSuperview()
result.addSubview(iconImageView)
iconImageView.center(.vertical, in: result)
iconImageView.pin(.left, to: .left, of: result, withInset: CallMessageCell.inset)
result.addSubview(infoImageView)
infoImageView.autoVCenterInSuperview()
result.addSubview(infoImageView)
infoImageView.center(.vertical, in: result)
infoImageView.pin(.right, to: .right, of: result, withInset: -CallMessageCell.inset)
return result

@ -29,7 +29,7 @@ final class DeletedMessageView: UIView {
private func setUpViewHierarchy(textColor: ThemeValue) {
// Image view
let icon = UIImage(named: "ic_trash")?
.resizedImage(to: CGSize(
.resized(to: CGSize(
width: DeletedMessageView.iconSize,
height: DeletedMessageView.iconSize
))?

@ -2,7 +2,7 @@
import UIKit
import SessionMessagingKit
import SignalCoreKit
import SessionUtilitiesKit
protocol LinkPreviewState {
var isLoaded: Bool { get }
@ -51,7 +51,7 @@ public extension LinkPreview {
var image: UIImage? {
guard let jpegImageData = linkPreviewDraft.jpegImageData else { return nil }
guard let image = UIImage(data: jpegImageData) else {
owsFailDebug("Could not load image: \(jpegImageData.count)")
Log.error("[LinkPreview] Could not load image: \(jpegImageData.count)")
return nil
}
@ -84,7 +84,7 @@ public extension LinkPreview {
var imageState: LinkPreview.ImageState {
guard linkPreview.attachmentId != nil else { return .none }
guard let imageAttachment: Attachment = imageAttachment else {
owsFailDebug("Missing imageAttachment.")
Log.error("[LinkPreview] Missing imageAttachment.")
return .none
}
@ -109,7 +109,7 @@ public extension LinkPreview {
return nil
}
guard let image = UIImage(data: imageData) else {
owsFailDebug("Could not load image: \(imageAttachment?.localRelativeFilePath ?? "unknown")")
Log.error("[LinkPreview] Could not load image: \(imageAttachment?.localRelativeFilePath ?? "unknown")")
return nil
}

@ -2,7 +2,6 @@
import UIKit
import SessionMessagingKit
import SignalCoreKit
import SessionUtilitiesKit
public class MediaAlbumView: UIStackView {
@ -17,7 +16,7 @@ public class MediaAlbumView: UIStackView {
@available(*, unavailable, message: "use other init() instead.")
required public init(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
public required init(
@ -56,21 +55,21 @@ public class MediaAlbumView: UIStackView {
backgroundView.themeBackgroundColor = .backgroundPrimary
addSubview(backgroundView)
backgroundView.setContentHuggingLow()
backgroundView.setCompressionResistanceLow()
backgroundView.setContentHugging(to: .defaultLow)
backgroundView.setCompressionResistance(to: .defaultLow)
backgroundView.pin(to: backgroundView)
switch itemViews.count {
case 0: return owsFailDebug("No item views.")
case 0: return Log.error("[MediaAlbumView] No item views.")
case 1:
// X
guard let itemView = itemViews.first else {
owsFailDebug("Missing item view.")
Log.error("[MediaAlbumView] Missing item view.")
return
}
addSubview(itemView)
itemView.autoPinEdgesToSuperviewEdges()
itemView.pin(to: self)
case 2:
// X X
@ -92,7 +91,7 @@ public class MediaAlbumView: UIStackView {
let bigImageSize = smallImageSize * 2 + MediaAlbumView.kSpacingPts
guard let leftItemView = itemViews.first else {
owsFailDebug("Missing view")
Log.error("[MediaAlbumView] Missing view")
return
}
autoSet(viewSize: bigImageSize, ofViews: [leftItemView])
@ -111,7 +110,7 @@ public class MediaAlbumView: UIStackView {
if items.count > MediaAlbumView.kMaxItems {
guard let lastView = rightViews.last else {
owsFailDebug("Missing lastView")
Log.error("[MediaAlbumView] Missing lastView")
return
}
@ -120,7 +119,7 @@ public class MediaAlbumView: UIStackView {
let tintView = UIView()
tintView.themeBackgroundColor = .messageBubble_overlay
lastView.addSubview(tintView)
tintView.autoPinEdgesToSuperviewEdges()
tintView.pin(to: self)
let moreCount = max(1, items.count - MediaAlbumView.kMaxItems)
let moreText = String(
@ -133,7 +132,7 @@ public class MediaAlbumView: UIStackView {
moreLabel.text = moreText
moreLabel.themeTextColor = .white
lastView.addSubview(moreLabel)
moreLabel.autoCenterInSuperview()
moreLabel.center(in: lastView)
}
}
@ -144,7 +143,7 @@ public class MediaAlbumView: UIStackView {
continue
}
guard let index = itemViews.firstIndex(of: itemView) else {
owsFailDebug("Couldn't determine index of item view.")
Log.error("[MediaAlbumView] Couldn't determine index of item view.")
continue
}
let item = items[index]
@ -155,14 +154,14 @@ public class MediaAlbumView: UIStackView {
continue
}
guard let icon = UIImage(named: "media_album_caption") else {
owsFailDebug("Couldn't load icon.")
Log.error("[MediaAlbumView] Couldn't load icon.")
continue
}
let iconView = UIImageView(image: icon)
itemView.addSubview(iconView)
itemView.layoutMargins = .zero
iconView.autoPinTopToSuperviewMargin(withInset: 6)
iconView.autoPinLeadingToSuperviewMargin(withInset: 6)
iconView.pin(.top, to: .top, of: itemView.layoutMarginsGuide, withInset: 6)
iconView.pin(.leading, to: .leading, of: itemView.layoutMarginsGuide, withInset: 6)
}
}
@ -171,7 +170,8 @@ public class MediaAlbumView: UIStackView {
ofViews views: [MediaView]
) {
for itemView in views {
itemView.autoSetDimensions(to: CGSize(width: viewSize, height: viewSize))
itemView.set(.width, to: viewSize)
itemView.set(.height, to: viewSize)
}
}
@ -250,7 +250,7 @@ public class MediaAlbumView: UIStackView {
var bestDistance: CGFloat = 0
for itemView in itemViews {
let itemCenter = convert(itemView.center, from: itemView.superview)
let distance = CGPointDistance(location, itemCenter)
let distance = location.distance(to: itemCenter)
if bestMediaView != nil && distance > bestDistance {
continue
}

@ -46,7 +46,7 @@ final class MediaPlaceholderView: UIView {
// Image view
let imageView = UIImageView(
image: UIImage(named: iconName)?
.resizedImage(
.resized(
to: CGSize(
width: MediaPlaceholderView.iconSize,
height: MediaPlaceholderView.iconSize

@ -4,7 +4,6 @@ import UIKit
import YYImage
import SessionUIKit
import SessionMessagingKit
import SignalCoreKit
import SignalUtilitiesKit
import SessionUtilitiesKit
@ -72,7 +71,7 @@ public class MediaView: UIView {
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
deinit {
@ -82,7 +81,7 @@ public class MediaView: UIView {
// MARK: -
private func createContents() {
AssertIsOnMainThread()
Log.assertOnMainThread()
guard attachment.state != .pendingDownload && attachment.state != .downloading else {
addDownloadProgressIfNecessary()
@ -107,7 +106,7 @@ public class MediaView: UIView {
configureForVideo(attachment: attachment)
}
else {
owsFailDebug("Attachment has unexpected type.")
Log.error("[MediaView] Attachment has unexpected type.")
configure(forError: .invalid)
}
}
@ -159,14 +158,14 @@ public class MediaView: UIView {
animatedImageView.themeBackgroundColor = .backgroundSecondary
animatedImageView.isHidden = !attachment.isValid
addSubview(animatedImageView)
animatedImageView.autoPinEdgesToSuperviewEdges()
animatedImageView.pin(to: self)
_ = addUploadProgressIfNecessary(animatedImageView)
loadBlock = { [weak self] in
AssertIsOnMainThread()
Log.assertOnMainThread()
if animatedImageView.image != nil {
owsFailDebug("Unexpectedly already loaded.")
Log.error("[MediaView] Unexpectedly already loaded.")
return
}
self?.tryToLoadMedia(
@ -176,7 +175,7 @@ public class MediaView: UIView {
return
}
guard let filePath: String = attachment.originalFilePath else {
owsFailDebug("Attachment stream missing original file path.")
Log.error("[MediaView] Attachment stream missing original file path.")
self?.configure(forError: .invalid)
return
}
@ -184,10 +183,10 @@ public class MediaView: UIView {
applyMediaBlock(YYImage(contentsOfFile: filePath))
},
applyMediaBlock: { media in
AssertIsOnMainThread()
Log.assertOnMainThread()
guard let image: YYImage = media as? YYImage else {
owsFailDebug("Media has unexpected type: \(type(of: media))")
Log.error("[MediaView] Media has unexpected type: \(type(of: media))")
self?.configure(forError: .invalid)
return
}
@ -198,7 +197,7 @@ public class MediaView: UIView {
)
}
unloadBlock = {
AssertIsOnMainThread()
Log.assertOnMainThread()
animatedImageView.image = nil
}
@ -216,14 +215,14 @@ public class MediaView: UIView {
stillImageView.themeBackgroundColor = .backgroundSecondary
stillImageView.isHidden = !attachment.isValid
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
stillImageView.pin(to: self)
_ = addUploadProgressIfNecessary(stillImageView)
loadBlock = { [weak self] in
AssertIsOnMainThread()
Log.assertOnMainThread()
if stillImageView.image != nil {
owsFailDebug("Unexpectedly already loaded.")
Log.error("[MediaView] Unexpectedly already loaded.")
return
}
self?.tryToLoadMedia(
@ -237,16 +236,16 @@ public class MediaView: UIView {
size: .large,
success: { image, _ in applyMediaBlock(image) },
failure: {
Logger.error("Could not load thumbnail")
Log.error("[MediaView] Could not load thumbnail")
self?.configure(forError: .invalid)
}
)
},
applyMediaBlock: { media in
AssertIsOnMainThread()
Log.assertOnMainThread()
guard let image: UIImage = media as? UIImage else {
owsFailDebug("Media has unexpected type: \(type(of: media))")
Log.error("[MediaView] Media has unexpected type: \(type(of: media))")
self?.configure(forError: .invalid)
return
}
@ -257,7 +256,7 @@ public class MediaView: UIView {
)
}
unloadBlock = {
AssertIsOnMainThread()
Log.assertOnMainThread()
stillImageView.image = nil
}
@ -276,7 +275,7 @@ public class MediaView: UIView {
stillImageView.isHidden = !attachment.isValid
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
stillImageView.pin(to: self)
if !addUploadProgressIfNecessary(stillImageView) && !shouldSupressControls {
if let duration: TimeInterval = attachment.duration {
@ -306,14 +305,14 @@ public class MediaView: UIView {
videoPlayButton.set(.width, to: 72)
videoPlayButton.set(.height, to: 72)
stillImageView.addSubview(videoPlayButton)
videoPlayButton.autoCenterInSuperview()
videoPlayButton.center(in: stillImageView)
}
loadBlock = { [weak self] in
AssertIsOnMainThread()
Log.assertOnMainThread()
if stillImageView.image != nil {
owsFailDebug("Unexpectedly already loaded.")
Log.error("[MediaView] Unexpectedly already loaded.")
return
}
self?.tryToLoadMedia(
@ -327,16 +326,16 @@ public class MediaView: UIView {
size: .medium,
success: { image, _ in applyMediaBlock(image) },
failure: {
Logger.error("Could not load thumbnail")
Log.error("[MediaView] Could not load thumbnail")
self?.configure(forError: .invalid)
}
)
},
applyMediaBlock: { media in
AssertIsOnMainThread()
Log.assertOnMainThread()
guard let image: UIImage = media as? UIImage else {
owsFailDebug("Media has unexpected type: \(type(of: media))")
Log.error("[MediaView] Media has unexpected type: \(type(of: media))")
self?.configure(forError: .invalid)
return
}
@ -347,7 +346,7 @@ public class MediaView: UIView {
)
}
unloadBlock = {
AssertIsOnMainThread()
Log.assertOnMainThread()
stillImageView.image = nil
}
@ -369,14 +368,14 @@ public class MediaView: UIView {
switch error {
case .failed:
guard let asset = UIImage(named: "media_retry") else {
owsFailDebug("Missing image")
Log.error("[MediaView] Missing image")
return
}
icon = asset
case .invalid:
guard let asset = UIImage(named: "media_invalid") else {
owsFailDebug("Missing image")
Log.error("[MediaView] Missing image")
return
}
icon = asset
@ -398,7 +397,7 @@ public class MediaView: UIView {
iconView.themeTintColor = .textPrimary
iconView.alpha = Values.mediumOpacity
addSubview(iconView)
iconView.autoCenterInSuperview()
iconView.center(in: self)
}
private func tryToLoadMedia(
@ -410,7 +409,7 @@ public class MediaView: UIView {
// our load attempt is complete.
let loadCompletion: (AnyObject?) -> Void = { [weak self] possibleMedia in
guard self?.loadState.wrappedValue == .loading else {
Logger.verbose("Skipping obsolete load.")
Log.verbose("[MediaView] Skipping obsolete load.")
return
}
guard let media: AnyObject = possibleMedia else {
@ -427,12 +426,12 @@ public class MediaView: UIView {
}
guard loadState.wrappedValue == .loading else {
owsFailDebug("Unexpected load state: \(loadState)")
Log.error("[MediaView] Unexpected load state: \(loadState)")
return
}
if let media: AnyObject = self.mediaCache?.object(forKey: cacheKey as NSString) {
Logger.verbose("media cache hit")
Log.verbose("[MediaView] media cache hit")
guard Thread.isMainThread else {
DispatchQueue.main.async {
@ -445,11 +444,11 @@ public class MediaView: UIView {
return
}
Logger.verbose("media cache miss")
Log.verbose("[MediaView] media cache miss")
MediaView.loadQueue.async { [weak self] in
guard self?.loadState.wrappedValue == .loading else {
Logger.verbose("Skipping obsolete load.")
Log.verbose("[MediaView] Skipping obsolete load.")
return
}

@ -68,7 +68,7 @@ final class OpenGroupInvitationView: UIView {
let iconImageViewSize = OpenGroupInvitationView.iconImageViewSize
let iconImageView = UIImageView(
image: UIImage(named: iconName)?
.resizedImage(to: CGSize(width: iconSize, height: iconSize))?
.resized(to: CGSize(width: iconSize, height: iconSize))?
.withRenderingMode(.alwaysTemplate)
)
iconImageView.themeTintColor = (isOutgoing ? .messageBubble_outgoingText : .textPrimary)

@ -100,11 +100,11 @@ final class QuoteView: UIView {
contentView.pin(to: self)
if let attachment: Attachment = attachment {
let isAudio: Bool = MIMETypeUtil.isAudio(attachment.contentType)
let isAudio: Bool = MimeTypeUtil.isAudio(attachment.contentType)
let fallbackImageName: String = (isAudio ? "attachment_audio" : "actionsheet_document_black")
let imageView: UIImageView = UIImageView(
image: UIImage(named: fallbackImageName)?
.resizedImage(to: CGSize(width: iconSize, height: iconSize))?
.resized(to: CGSize(width: iconSize, height: iconSize))?
.withRenderingMode(.alwaysTemplate)
)

@ -56,12 +56,11 @@ final class ReactionContainerView: UIView {
lazy var collapseButton: UIView = {
let arrow: UIImageView = UIImageView(
image: UIImage(named: "ic_chevron_up")?
.resizedImage(to: ReactionContainerView.arrowSize)?
.withRenderingMode(.alwaysTemplate)
image: UIImage(named: "ic_chevron_up")?.withRenderingMode(.alwaysTemplate)
)
arrow.themeTintColor = .textPrimary
arrow.setContentHuggingPriority(.required, for: .horizontal)
arrow.set(.width, to: ReactionContainerView.arrowSize.width)
arrow.set(.height, to: ReactionContainerView.arrowSize.height)
let textLabel: UILabel = UILabel()
textLabel.setContentHuggingPriority(.required, for: .vertical)

@ -39,7 +39,7 @@ struct OpenGroupInvitationView_SwiftUI: View {
// Icon
let iconName = (isOutgoing ? "Globe" : "Plus")
if let iconImage = UIImage(named: iconName)?
.resizedImage(to: CGSize(width: Self.iconSize, height: Self.iconSize))?
.resized(to: CGSize(width: Self.iconSize, height: Self.iconSize))?
.withRenderingMode(.alwaysTemplate)
{
Image(uiImage: iconImage)

@ -95,9 +95,9 @@ struct QuoteView_SwiftUI: View {
return thumbnail
}
let fallbackImageName: String = (MIMETypeUtil.isAudio(attachment.contentType) ? "attachment_audio" : "actionsheet_document_black")
let fallbackImageName: String = (MimeTypeUtil.isAudio(attachment.contentType) ? "attachment_audio" : "actionsheet_document_black")
return UIImage(named: fallbackImageName)?
.resizedImage(to: CGSize(width: Self.iconSize, height: Self.iconSize))?
.resized(to: CGSize(width: Self.iconSize, height: Self.iconSize))?
.withRenderingMode(.alwaysTemplate)
}() {
Image(uiImage: image)

@ -2,7 +2,6 @@
import UIKit
import SessionUIKit
import SignalCoreKit
import SessionUtilitiesKit
@objc class TypingIndicatorView: UIStackView {
@ -21,12 +20,12 @@ import SessionUtilitiesKit
@available(*, unavailable, message:"use other constructor instead.")
required init(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
@available(*, unavailable, message:"use other constructor instead.")
override init(frame: CGRect) {
notImplemented()
fatalError("init(frame:) has not been implemented")
}
@objc
@ -55,7 +54,7 @@ import SessionUtilitiesKit
// MARK: - Notifications
@objc func didBecomeActive() {
AssertIsOnMainThread()
Log.assertOnMainThread()
// CoreAnimation animations are stopped in the background, so ensure
// animations are restored if necessary.
@ -108,12 +107,12 @@ import SessionUtilitiesKit
@available(*, unavailable, message:"use other constructor instead.")
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
@available(*, unavailable, message:"use other constructor instead.")
override init(frame: CGRect) {
notImplemented()
fatalError("init(frame:) has not been implemented")
}
init(dotType: DotType) {
@ -121,8 +120,8 @@ import SessionUtilitiesKit
super.init(frame: .zero)
autoSetDimension(.width, toSize: kMaxRadiusPt)
autoSetDimension(.height, toSize: kMaxRadiusPt)
set(.width, to: kMaxRadiusPt)
set(.height, to: kMaxRadiusPt)
layer.addSublayer(shapeLayer)
@ -144,9 +143,9 @@ import SessionUtilitiesKit
var animationDuration: CFTimeInterval = 0
let addDotKeyFrame = { (keyFrameTime: CFTimeInterval, progress: CGFloat) in
let dotColor = baseColor.withAlphaComponent(CGFloatLerp(0.4, 1.0, CGFloatClamp01(progress)))
let dotColor = baseColor.withAlphaComponent(progress.clamp01().lerp(0.4, 1.0))
colorValues.append(dotColor.cgColor)
let radius = CGFloatLerp(TypingIndicatorView.kMinRadiusPt, TypingIndicatorView.kMaxRadiusPt, CGFloatClamp01(progress))
let radius = progress.clamp01().lerp(TypingIndicatorView.kMinRadiusPt, TypingIndicatorView.kMaxRadiusPt)
let margin = (TypingIndicatorView.kMaxRadiusPt - radius) * 0.5
let bezierPath = UIBezierPath(ovalIn: CGRect(x: margin, y: margin, width: radius, height: radius))
pathValues.append(bezierPath.cgPath)

@ -874,7 +874,7 @@ final class VisibleMessageCell: MessageCell, TappableLabelDelegate {
// For open groups only attempt to start a conversation if the author has a blinded id
guard cellViewModel.threadVariant != .community else {
// FIXME: Add in support for opening a conversation with a 'blinded25' id
guard SessionId.Prefix(from: cellViewModel.authorId) == .blinded15 else { return }
guard (try? SessionId.Prefix(from: cellViewModel.authorId)) == .blinded15 else { return }
delegate?.startThread(
with: cellViewModel.authorId,

@ -56,7 +56,7 @@ class MessageRequestFooterView: UIView {
private lazy var blockButton: UIButton = {
let result: UIButton = UIButton()
result.setCompressionResistanceHigh()
result.setCompressionResistance(to: .defaultHigh)
result.accessibilityLabel = "Block message request"
result.translatesAutoresizingMaskIntoConstraints = false
result.clipsToBounds = true

@ -3,19 +3,18 @@
// stringlint:disable
import Foundation
import SignalCoreKit
import SessionUtilitiesKit
extension Emoji {
private static let availableCache: Atomic<[Emoji:Bool]> = Atomic([:])
private static let iosVersionKey = "iosVersion"
private static let cacheUrl = URL(fileURLWithPath: OWSFileSystem.appSharedDataDirectoryPath())
private static let cacheUrl = URL(fileURLWithPath: FileManager.default.appSharedDataDirectoryPath)
.appendingPathComponent("Library")
.appendingPathComponent("Caches")
.appendingPathComponent("emoji.plist")
static func warmAvailableCache() {
owsAssertDebug(!Thread.isMainThread)
Log.assertOnMainThread()
guard Singleton.hasAppContext && Singleton.appContext.isMainAppAndActive else { return }
@ -29,28 +28,28 @@ extension Emoji {
do {
availableMap = try NSMutableDictionary(contentsOf: Self.cacheUrl, error: ())
} catch {
Logger.info("Re-building emoji availability cache. Cache could not be loaded. \(error)")
Log.info("[Emoji] Re-building emoji availability cache. Cache could not be loaded. \(error)")
uncachedEmoji = Emoji.allCases
}
let lastIosVersion = availableMap[iosVersionKey] as? String
if lastIosVersion == iosVersion {
Logger.debug("Loading emoji availability cache (expect \(Emoji.allCases.count) items, found \(availableMap.count - 1)).")
Log.debug("[Emoji] Loading emoji availability cache (expect \(Emoji.allCases.count) items, found \(availableMap.count - 1)).")
for emoji in Emoji.allCases {
if let available = availableMap[emoji.rawValue] as? Bool {
availableCache[emoji] = available
} else {
Logger.warn("Emoji unexpectedly missing from cache: \(emoji).")
Log.warn("[Emoji] Emoji unexpectedly missing from cache: \(emoji).")
uncachedEmoji.append(emoji)
}
}
} else if uncachedEmoji.isEmpty {
Logger.info("Re-building emoji availability cache. iOS version upgraded from \(lastIosVersion ?? "(none)") -> \(iosVersion)")
Log.info("[Emoji] Re-building emoji availability cache. iOS version upgraded from \(lastIosVersion ?? "(none)") -> \(iosVersion)")
uncachedEmoji = Emoji.allCases
}
if !uncachedEmoji.isEmpty {
Logger.info("Checking emoji availability for \(uncachedEmoji.count) uncached emoji")
Log.info("[Emoji] Checking emoji availability for \(uncachedEmoji.count) uncached emoji")
uncachedEmoji.forEach {
let available = isEmojiAvailable($0)
availableMap[$0.rawValue] = available
@ -65,11 +64,11 @@ extension Emoji {
withIntermediateDirectories: true)
try availableMap.write(to: Self.cacheUrl)
} catch {
Logger.warn("Failed to save emoji availability cache; it will be recomputed next time! \(error)")
Log.warn("[Emoji] Failed to save emoji availability cache; it will be recomputed next time! \(error)")
}
}
Logger.info("Warmed emoji availability cache with \(availableCache.lazy.filter { $0.value }.count) available emoji for iOS \(iosVersion)")
Log.info("[Emoji] Warmed emoji availability cache with \(availableCache.lazy.filter { $0.value }.count) available emoji for iOS \(iosVersion)")
Self.availableCache.mutate{ $0 = availableCache }
}

@ -1,11 +1,9 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import UIKit
import PureLayout
import SessionUIKit
import SessionUtilitiesKit
import NVActivityIndicatorView
import SignalCoreKit
class EmptySearchResultCell: UITableViewCell {
private lazy var messageLabel: UILabel = {
@ -44,22 +42,21 @@ class EmptySearchResultCell: UITableViewCell {
selectionStyle = .none
contentView.addSubview(messageLabel)
messageLabel.autoSetDimension(.height, toSize: 150)
messageLabel.autoPinEdge(toSuperviewMargin: .top, relation: .greaterThanOrEqual)
messageLabel.autoPinEdge(toSuperviewMargin: .leading, relation: .greaterThanOrEqual)
messageLabel.autoPinEdge(toSuperviewMargin: .bottom, relation: .greaterThanOrEqual)
messageLabel.autoPinEdge(toSuperviewMargin: .trailing, relation: .greaterThanOrEqual)
messageLabel.autoVCenterInSuperview()
messageLabel.autoHCenterInSuperview()
messageLabel.setContentHuggingHigh()
messageLabel.setCompressionResistanceHigh()
messageLabel.set(.height, to: 150)
messageLabel.pin(.top, greaterThanOrEqualTo: .top, of: contentView)
messageLabel.pin(.leading, greaterThanOrEqualTo: .leading, of: contentView)
messageLabel.pin(.bottom, lessThanOrEqualTo: .bottom, of: contentView)
messageLabel.pin(.trailing, lessThanOrEqualTo: .trailing, of: contentView)
messageLabel.center(in: contentView)
messageLabel.setContentHugging(to: .required)
messageLabel.setCompressionResistance(to: .required)
contentView.addSubview(spinner)
spinner.autoCenterInSuperview()
spinner.center(in: contentView)
}
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
public func configure(isLoading: Bool) {

@ -7,7 +7,6 @@ import SessionUIKit
import SessionMessagingKit
import SessionUtilitiesKit
import SignalUtilitiesKit
import SignalCoreKit
class GlobalSearchViewController: BaseVC, LibSessionRespondingViewController, UITableViewDelegate, UITableViewDataSource {
fileprivate typealias SectionModel = ArraySection<SearchSection, SessionThreadViewModel>
@ -41,6 +40,7 @@ class GlobalSearchViewController: BaseVC, LibSessionRespondingViewController, UI
// MARK: - Variables
private let dependencies: Dependencies
private lazy var defaultSearchResults: SearchResultData = {
let nonalphabeticNameTitle: String = "#" // stringlint:disable
let contacts: [SessionThreadViewModel] = Storage.shared.read { db -> [SessionThreadViewModel]? in
@ -116,11 +116,23 @@ class GlobalSearchViewController: BaseVC, LibSessionRespondingViewController, UI
@objc public var searchText = "" {
didSet {
AssertIsOnMainThread()
Log.assertOnMainThread()
// Use a slight delay to debounce updates.
refreshSearchResults()
}
}
// MARK: - Initialization
init(using dependencies: Dependencies) {
self.dependencies = dependencies
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - UI Components
@ -207,12 +219,14 @@ class GlobalSearchViewController: BaseVC, LibSessionRespondingViewController, UI
searchBarContainer.addSubview(ipadCancelButton)
ipadCancelButton.pin(.trailing, to: .trailing, of: searchBarContainer)
ipadCancelButton.autoVCenterInSuperview()
searchBar.autoPinEdgesToSuperviewEdges(with: UIEdgeInsets.zero, excludingEdge: .trailing)
ipadCancelButton.center(.vertical, in: searchBarContainer)
searchBar.pin(.top, to: .top, of: searchBar)
searchBar.pin(.leading, to: .leading, of: searchBar)
searchBar.pin(.trailing, to: .leading, of: ipadCancelButton, withInset: -Values.smallSpacing)
searchBar.pin(.bottom, to: .bottom, of: searchBar)
}
else {
searchBar.autoPinEdgesToSuperviewMargins()
searchBar.pin(toMarginsOf: searchBarContainer)
}
}
@ -322,7 +336,7 @@ extension GlobalSearchViewController: UISearchBarDelegate {
}
func updateSearchText() {
guard let searchText = searchBar.text?.ows_stripped() else { return }
guard let searchText = searchBar.text?.stripped else { return }
self.searchText = searchText
}
}
@ -363,7 +377,12 @@ extension GlobalSearchViewController {
}
}
private func show(threadId: String, threadVariant: SessionThread.Variant, focusedInteractionInfo: Interaction.TimestampInfo? = nil, animated: Bool = true) {
private func show(
threadId: String,
threadVariant: SessionThread.Variant,
focusedInteractionInfo: Interaction.TimestampInfo? = nil,
animated: Bool = true
) {
guard Thread.isMainThread else {
DispatchQueue.main.async { [weak self] in
self?.show(threadId: threadId, threadVariant: threadVariant, focusedInteractionInfo: focusedInteractionInfo, animated: animated)
@ -387,7 +406,8 @@ extension GlobalSearchViewController {
let viewController: ConversationVC = ConversationVC(
threadId: threadId,
threadVariant: threadVariant,
focusedInteractionInfo: focusedInteractionInfo
focusedInteractionInfo: focusedInteractionInfo,
using: dependencies
)
self.navigationController?.pushViewController(viewController, animated: true)
}

@ -12,7 +12,7 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
private static let loadingHeaderHeight: CGFloat = 40
public static let newConversationButtonSize: CGFloat = 60
private let viewModel: HomeViewModel = HomeViewModel()
private let viewModel: HomeViewModel
private var dataChangeObservable: DatabaseCancellable? {
didSet { oldValue?.cancel() } // Cancel the old observable if there was one
}
@ -29,7 +29,8 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
// MARK: - Intialization
init(flow: Onboarding.Flow? = nil) {
init(flow: Onboarding.Flow? = nil, using dependencies: Dependencies) {
self.viewModel = HomeViewModel(using: dependencies)
Storage.shared.addObserver(viewModel.pagedDataObserver)
self.flow = flow
super.init(nibName: nil, bundle: nil)
@ -590,7 +591,7 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
// Container view
let profilePictureViewContainer = UIView()
profilePictureViewContainer.addSubview(profilePictureView)
profilePictureView.autoPinEdgesToSuperviewEdges()
profilePictureView.pin(to: profilePictureViewContainer)
profilePictureViewContainer.addSubview(pathStatusView)
pathStatusView.pin(.trailing, to: .trailing, of: profilePictureViewContainer)
pathStatusView.pin(.bottom, to: .bottom, of: profilePictureViewContainer)
@ -739,7 +740,7 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
// Cannot properly sync outgoing blinded message requests so don't provide the option
guard
threadViewModel.threadVariant != .contact ||
SessionId(from: section.elements[indexPath.row].threadId)?.prefix == .standard
(try? SessionId(from: section.elements[indexPath.row].threadId))?.prefix == .standard
else { return nil }
return UIContextualAction.configuration(
@ -775,7 +776,7 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
)
case .threads:
let sessionIdPrefix: SessionId.Prefix? = SessionId(from: threadViewModel.threadId)?.prefix
let sessionIdPrefix: SessionId.Prefix? = (try? SessionId(from: threadViewModel.threadId))?.prefix
// Cannot properly sync outgoing blinded message requests so only provide valid options
let shouldHavePinAction: Bool = (
@ -866,7 +867,8 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
ConversationVC(
threadId: threadId,
threadVariant: variant,
focusedInteractionInfo: focusedInteractionInfo
focusedInteractionInfo: focusedInteractionInfo,
using: viewModel.dependencies
)
].compactMap { $0 }
@ -886,7 +888,7 @@ final class HomeVC: BaseVC, LibSessionRespondingViewController, UITableViewDataS
if let presentedVC = self.presentedViewController {
presentedVC.dismiss(animated: false, completion: nil)
}
let searchController = GlobalSearchViewController()
let searchController = GlobalSearchViewController(using: viewModel.dependencies)
self.navigationController?.setViewControllers([ self, searchController ], animated: true)
}

@ -29,9 +29,11 @@ public class HomeViewModel {
let userProfile: Profile
}
public let dependencies: Dependencies
// MARK: - Initialization
init() {
init(using dependencies: Dependencies) {
typealias InitialData = (
showViewedSeedBanner: Bool,
hasHiddenMessageRequests: Bool,
@ -46,6 +48,7 @@ public class HomeViewModel {
)
}
self.dependencies = dependencies
self.state = State(
showViewedSeedBanner: (initialData?.showViewedSeedBanner ?? true),
hasHiddenMessageRequests: (initialData?.hasHiddenMessageRequests ?? false),
@ -208,11 +211,8 @@ public class HomeViewModel {
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.threadData },
onDataChangeRetriever: { self?.onThreadChange },
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedThreadDataChanges = (changeset.isEmpty ?
nil :
(updatedData, changeset)
)
onUnobservedDataChange: { updatedData in
self?.unobservedThreadDataChanges = updatedData
}
)
@ -277,7 +277,7 @@ public class HomeViewModel {
else { return }
/// **MUST** have the same logic as in the 'PagedDataObserver.onChangeUnsorted' above
let currentData: [SectionModel] = (self.unobservedThreadDataChanges?.0 ?? self.threadData)
let currentData: [SectionModel] = (self.unobservedThreadDataChanges ?? self.threadData)
let updatedThreadData: [SectionModel] = self.process(
data: (currentData.first(where: { $0.model == .threads })?.elements ?? []),
for: currentPageInfo
@ -285,13 +285,10 @@ public class HomeViewModel {
PagedData.processAndTriggerUpdates(
updatedData: updatedThreadData,
currentDataRetriever: { [weak self] in (self?.unobservedThreadDataChanges?.0 ?? self?.threadData) },
currentDataRetriever: { [weak self] in (self?.unobservedThreadDataChanges ?? self?.threadData) },
onDataChangeRetriever: { [weak self] in self?.onThreadChange },
onUnobservedDataChange: { [weak self] updatedData, changeset in
self?.unobservedThreadDataChanges = (changeset.isEmpty ?
nil :
(updatedData, changeset)
)
onUnobservedDataChange: { [weak self] updatedData in
self?.unobservedThreadDataChanges = updatedData
}
)
}
@ -299,22 +296,25 @@ public class HomeViewModel {
// MARK: - Thread Data
private var hasReceivedInitialThreadData: Bool = false
public private(set) var unobservedThreadDataChanges: ([SectionModel], StagedChangeset<[SectionModel]>)?
public private(set) var unobservedThreadDataChanges: [SectionModel]?
public private(set) var threadData: [SectionModel] = []
public private(set) var pagedDataObserver: PagedDatabaseObserver<SessionThread, SessionThreadViewModel>?
public var onThreadChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ())? {
didSet {
guard onThreadChange != nil else { return }
// When starting to observe interaction changes we want to trigger a UI update just in case the
// data was changed while we weren't observing
if let changes: ([SectionModel], StagedChangeset<[SectionModel]>) = self.unobservedThreadDataChanges {
let performChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ())? = onThreadChange
switch Thread.isMainThread {
case true: performChange?(changes.0, changes.1)
case false: DispatchQueue.main.async { performChange?(changes.0, changes.1) }
}
if let changes: [SectionModel] = self.unobservedThreadDataChanges {
PagedData.processAndTriggerUpdates(
updatedData: changes,
currentDataRetriever: { [weak self] in self?.threadData },
onDataChangeRetriever: { [weak self] in self?.onThreadChange },
onUnobservedDataChange: { [weak self] updatedData in
self?.unobservedThreadDataChanges = updatedData
}
)
self.unobservedThreadDataChanges = nil
}
}

@ -167,10 +167,11 @@ class MessageRequestsViewModel: SessionTableViewModel, NavigatableStateHolder, O
accessibility: Accessibility(
identifier: "Message request"
),
onTap: { [weak self] in
onTap: { [weak self, dependencies] in
let viewController: ConversationVC = ConversationVC(
threadId: viewModel.threadId,
threadVariant: viewModel.threadVariant
threadVariant: viewModel.threadVariant,
using: dependencies
)
self?.transitionToScreen(viewController, transitionType: .push)
}

@ -68,7 +68,7 @@ struct NewMessageScreen: View {
}
func continueWithAccountIdOrONS() {
let maybeSessionId: SessionId? = SessionId(from: accountIdOrONS)
let maybeSessionId: SessionId? = try? SessionId(from: accountIdOrONS)
if KeyPair.isValidHexEncodedPublicKey(candidate: accountIdOrONS) {
switch maybeSessionId?.prefix {

@ -6,7 +6,6 @@ import GRDB
import DifferenceKit
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit
public class AllMediaViewController: UIViewController, UIPageViewControllerDataSource, UIPageViewControllerDelegate {
private let pageVC = UIPageViewController(transitionStyle: .scroll, navigationOrientation: .horizontal, options: nil)
@ -53,7 +52,7 @@ public class AllMediaViewController: UIViewController, UIPageViewControllerDataS
}
required init?(coder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
// MARK: Lifecycle

@ -4,7 +4,6 @@ import Foundation
import MediaPlayer
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
// This kind of view is tricky. I've tried to organize things in the
@ -76,12 +75,12 @@ import SessionUtilitiesKit
@available(*, unavailable, message:"use other constructor instead.")
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
@objc required init(srcImage: UIImage, successCompletion : @escaping (Data) -> Void) {
// normalized() can be slightly expensive but in practice this is fine.
self.srcImage = srcImage.normalized()
self.srcImage = srcImage.normalizedImage()
self.successCompletion = successCompletion
super.init(nibName: nil, bundle: nil)
@ -151,7 +150,7 @@ import SessionUtilitiesKit
let contentView = UIView()
contentView.themeBackgroundColor = .backgroundPrimary
self.view.addSubview(contentView)
contentView.autoPinEdgesToSuperviewEdges()
contentView.pin(to: self.view)
let titleLabel: UILabel = UILabel()
titleLabel.font = .boldSystemFont(ofSize: Values.veryLargeFontSize)
@ -159,10 +158,10 @@ import SessionUtilitiesKit
titleLabel.themeTextColor = .textPrimary
titleLabel.textAlignment = .center
contentView.addSubview(titleLabel)
titleLabel.autoPinWidthToSuperview()
let titleLabelMargin = ScaleFromIPhone5(16)
titleLabel.autoPinEdge(toSuperviewSafeArea: .top, withInset: titleLabelMargin)
titleLabel.set(.width, to: .width, of: contentView)
let titleLabelMargin = Values.scaleFromIPhone5(16)
titleLabel.pin(.top, to: .top, of: titleLabel.safeAreaLayoutGuide, withInset: titleLabelMargin)
let buttonRow: UIView = createButtonRow()
contentView.addSubview(buttonRow)
@ -172,7 +171,7 @@ import SessionUtilitiesKit
buttonRow.set(
.height,
to: (
ScaleFromIPhone5To7Plus(35, 45) +
Values.scaleFromIPhone5To7Plus(35, 45) +
Values.mediumSpacing +
(UIApplication.shared.keyWindow?.safeAreaInsets.bottom ?? Values.mediumSpacing)
)
@ -500,7 +499,7 @@ import SessionUtilitiesKit
UIGraphicsBeginImageContextWithOptions(dstSizePixels, !hasAlpha, dstScale)
guard let context = UIGraphicsGetCurrentContext() else {
owsFailDebug("could not generate dst image.")
Log.error("[CropScaleImageViewController] Could not generate dst image.")
return nil
}
context.interpolationQuality = .high
@ -509,7 +508,7 @@ import SessionUtilitiesKit
srcImage.draw(in: imageViewFrame)
guard let scaledImage = UIGraphicsGetImageFromCurrentImageContext() else {
owsFailDebug("could not generate dst image.")
Log.error("[CropScaleImageViewController] Could not generate dst image.")
return nil
}
UIGraphicsEndImageContext()

@ -6,7 +6,6 @@ import GRDB
import DifferenceKit
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
public class DocumentTileViewController: UIViewController, UITableViewDelegate, UITableViewDataSource {
@ -37,7 +36,7 @@ public class DocumentTileViewController: UIViewController, UITableViewDelegate,
}
required public init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
deinit {
@ -90,7 +89,7 @@ public class DocumentTileViewController: UIViewController, UITableViewDelegate,
)
view.addSubview(self.tableView)
tableView.autoPin(toEdgesOf: view)
tableView.pin(to: view)
// Notifications
NotificationCenter.default.addObserver(
@ -145,7 +144,7 @@ public class DocumentTileViewController: UIViewController, UITableViewDelegate,
// If we have a focused item then we want to scroll to it
guard let focusedIndexPath: IndexPath = self.viewModel.focusedIndexPath else { return }
Logger.debug("scrolling to focused item at indexPath: \(focusedIndexPath)")
Log.debug("[DocumentTitleViewController] Scrolling to focused item at indexPath: \(focusedIndexPath)")
self.view.layoutIfNeeded()
self.tableView.scrollToRow(at: focusedIndexPath, at: .middle, animated: false)
@ -342,7 +341,7 @@ public class DocumentTileViewController: UIViewController, UITableViewDelegate,
if
attachment.isText ||
attachment.isMicrosoftDoc ||
attachment.contentType == OWSMimeTypeApplicationPdf
attachment.contentType == MimeTypeUtil.MimeType.applicationPdf
{
delegate?.preview(fileUrl: fileUrl)
@ -569,7 +568,7 @@ class DocumentSectionHeaderView: UIView {
@available(*, unavailable, message: "Unimplemented")
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
public func configure(title: String) {
@ -589,12 +588,15 @@ class DocumentStaticHeaderView: UIView {
label.themeTextColor = .textPrimary
label.textAlignment = .center
label.numberOfLines = 0
label.autoPinEdgesToSuperviewMargins(with: UIEdgeInsets(top: 0, leading: Values.largeSpacing, bottom: 0, trailing: Values.largeSpacing))
label.pin(.top, toMargin: .top, of: self)
label.pin(.leading, toMargin: .leading, of: self, withInset: Values.largeSpacing)
label.pin(.trailing, toMargin: .trailing, of: self, withInset: -Values.largeSpacing)
label.pin(.bottom, toMargin: .bottom, of: self)
}
@available(*, unavailable, message: "Unimplemented")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
public func configure(title: String) {

@ -4,7 +4,6 @@ import Foundation
import Combine
import YYImage
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
class GifPickerCell: UICollectionViewCell {
@ -13,7 +12,7 @@ class GifPickerCell: UICollectionViewCell {
var imageInfo: GiphyImageInfo? {
didSet {
AssertIsOnMainThread()
Log.assertOnMainThread()
ensureCellState()
}
@ -23,7 +22,7 @@ class GifPickerCell: UICollectionViewCell {
// Here's a bit of logic to not preload offscreen cells that are prefetched.
var isCellVisible = false {
didSet {
AssertIsOnMainThread()
Log.assertOnMainThread()
ensureCellState()
}
@ -40,7 +39,7 @@ class GifPickerCell: UICollectionViewCell {
var isCellSelected: Bool = false {
didSet {
AssertIsOnMainThread()
Log.assertOnMainThread()
ensureCellState()
}
}
@ -112,7 +111,7 @@ class GifPickerCell: UICollectionViewCell {
// Record high quality animated rendition, but to save bandwidth, don't start downloading
// until it's selected.
guard let highQualityAnimatedRendition = imageInfo.pickSendingRendition() else {
Logger.warn("could not pick gif rendition: \(imageInfo.giphyId)")
Log.warn("[GitPickerCell] could not pick gif rendition: \(imageInfo.giphyId)")
clearAssetRequests()
return
}
@ -121,12 +120,12 @@ class GifPickerCell: UICollectionViewCell {
// The Giphy API returns a slew of "renditions" for a given image.
// It's critical that we carefully "pick" the best rendition to use.
guard let animatedRendition = imageInfo.pickPreviewRendition() else {
Logger.warn("could not pick gif rendition: \(imageInfo.giphyId)")
Log.warn("[GitPickerCell] could not pick gif rendition: \(imageInfo.giphyId)")
clearAssetRequests()
return
}
guard let stillRendition = imageInfo.pickStillRendition() else {
Logger.warn("could not pick still rendition: \(imageInfo.giphyId)")
Log.warn("[GitPickerCell] could not pick still rendition: \(imageInfo.giphyId)")
clearAssetRequests()
return
}
@ -135,53 +134,56 @@ class GifPickerCell: UICollectionViewCell {
if stillAsset != nil || animatedAsset != nil {
clearStillAssetRequest()
} else if stillAssetRequest == nil {
stillAssetRequest = GiphyDownloader.giphyDownloader.requestAsset(assetDescription: stillRendition,
priority: .high,
success: { [weak self] assetRequest, asset in
guard let strongSelf = self else { return }
if assetRequest != nil && assetRequest != strongSelf.stillAssetRequest {
owsFailDebug("Obsolete request callback.")
return
}
strongSelf.clearStillAssetRequest()
strongSelf.stillAsset = asset
strongSelf.ensureViewState()
stillAssetRequest = GiphyDownloader.giphyDownloader.requestAsset(
assetDescription: stillRendition,
priority: .high,
success: { [weak self] assetRequest, asset in
if assetRequest != nil && assetRequest != self?.stillAssetRequest {
Log.error("[GitPickerCell] Obsolete request callback.")
return
}
self?.clearStillAssetRequest()
self?.stillAsset = asset
self?.ensureViewState()
},
failure: { [weak self] assetRequest in
guard let strongSelf = self else { return }
if assetRequest != strongSelf.stillAssetRequest {
owsFailDebug("Obsolete request callback.")
return
}
strongSelf.clearStillAssetRequest()
})
failure: { [weak self] assetRequest in
if assetRequest != self?.stillAssetRequest {
Log.error("[GitPickerCell] Obsolete request callback.")
return
}
self?.clearStillAssetRequest()
}
)
}
// Start animated asset request if necessary.
if animatedAsset != nil {
clearAnimatedAssetRequest()
} else if animatedAssetRequest == nil {
animatedAssetRequest = GiphyDownloader.giphyDownloader.requestAsset(assetDescription: animatedRendition,
priority: .low,
success: { [weak self] assetRequest, asset in
guard let strongSelf = self else { return }
if assetRequest != nil && assetRequest != strongSelf.animatedAssetRequest {
owsFailDebug("Obsolete request callback.")
return
}
// If we have the animated asset, we don't need the still asset.
strongSelf.clearAssetRequests()
strongSelf.animatedAsset = asset
strongSelf.ensureViewState()
animatedAssetRequest = GiphyDownloader.giphyDownloader.requestAsset(
assetDescription: animatedRendition,
priority: .low,
success: { [weak self] assetRequest, asset in
if assetRequest != nil && assetRequest != self?.animatedAssetRequest {
Log.error("[GitPickerCell] Obsolete request callback.")
return
}
// If we have the animated asset, we don't need the still asset.
self?.clearAssetRequests()
self?.animatedAsset = asset
self?.ensureViewState()
},
failure: { [weak self] assetRequest in
guard let strongSelf = self else { return }
if assetRequest != strongSelf.animatedAssetRequest {
owsFailDebug("Obsolete request callback.")
return
}
strongSelf.clearAnimatedAssetRequest()
})
failure: { [weak self] assetRequest in
if assetRequest != self?.animatedAssetRequest {
Log.error("[GitPickerCell] Obsolete request callback.")
return
}
self?.clearAnimatedAssetRequest()
}
)
}
}
@ -195,13 +197,13 @@ class GifPickerCell: UICollectionViewCell {
clearViewState()
return
}
guard NSData.ows_isValidImage(atPath: asset.filePath, mimeType: OWSMimeTypeImageGif) else {
owsFailDebug("invalid asset.")
guard Data.isValidImage(at: asset.filePath, mimeType: MimeTypeUtil.MimeType.imageGif) else {
Log.error("[GitPickerCell] Invalid asset.")
clearViewState()
return
}
guard let image = YYImage(contentsOfFile: asset.filePath) else {
owsFailDebug("could not load asset.")
Log.error("[GitPickerCell] Could not load asset.")
clearViewState()
return
}
@ -209,10 +211,10 @@ class GifPickerCell: UICollectionViewCell {
let imageView = YYAnimatedImageView()
self.imageView = imageView
self.contentView.addSubview(imageView)
imageView.ows_autoPinToSuperviewEdges()
imageView.pin(to: contentView)
}
guard let imageView = imageView else {
owsFailDebug("missing imageview.")
Log.error("[GitPickerCell] Missing imageview.")
clearViewState()
return
}
@ -224,15 +226,15 @@ class GifPickerCell: UICollectionViewCell {
let activityIndicator = UIActivityIndicatorView(style: .medium)
self.activityIndicator = activityIndicator
addSubview(activityIndicator)
activityIndicator.autoCenterInSuperview()
activityIndicator.center(in: self)
activityIndicator.startAnimating()
// Render activityIndicator on a white tile to ensure it's visible on
// when overlayed on a variety of potential gifs.
activityIndicator.themeBackgroundColor = .white
activityIndicator.alpha = 0.3
activityIndicator.autoSetDimension(.width, toSize: 30)
activityIndicator.autoSetDimension(.height, toSize: 30)
activityIndicator.set(.width, to: 30)
activityIndicator.set(.height, to: 30)
activityIndicator.themeShadowColor = .black
activityIndicator.layer.cornerRadius = 3
activityIndicator.layer.shadowOffset = CGSize(width: 1, height: 1)
@ -246,7 +248,7 @@ class GifPickerCell: UICollectionViewCell {
public func requestRenditionForSending() -> AnyPublisher<ProxiedContentAsset, Error> {
guard let renditionForSending = self.renditionForSending else {
owsFailDebug("renditionForSending was unexpectedly nil")
Log.error("[GitPickerCell] renditionForSending was unexpectedly nil")
return Fail(error: GiphyError.assertionError(description: "renditionForSending was unexpectedly nil"))
.eraseToAnyPublisher()
}
@ -260,7 +262,7 @@ class GifPickerCell: UICollectionViewCell {
)
.mapError { _ -> Error in
// TODO: GiphyDownloader API should pass through a useful failing error so we can pass it through here
Logger.error("request failed")
Log.error("[GitPickerCell] request failed")
return GiphyError.fetchFailure
}
.map { asset, _ in asset }

@ -1,7 +1,6 @@
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
import Foundation
import SignalCoreKit
protocol GifPickerLayoutDelegate: AnyObject {
func imageInfosForLayout() -> [GiphyImageInfo]
@ -20,7 +19,7 @@ class GifPickerLayout: UICollectionViewLayout {
@available(*, unavailable, message:"use other constructor instead.")
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
override init() {
@ -60,8 +59,7 @@ class GifPickerLayout: UICollectionViewLayout {
// 2 columns will show fewer GIFs at a time,
// but use less network & be a more responsive experience.
let columnCount = UInt(2)
let totalViewWidth = UInt(collectionView.width())
let totalViewWidth = UInt(collectionView.bounds.width)
let hTotalWhitespace = (2 * hInset) + (hSpacing * (columnCount - 1))
let hRemainderSpace = totalViewWidth - hTotalWhitespace
let columnWidth = UInt(hRemainderSpace / columnCount)
@ -137,6 +135,7 @@ class GifPickerLayout: UICollectionViewLayout {
guard let collectionView = collectionView else {
return false
}
return collectionView.width() != newBounds.size.width
return collectionView.bounds.width != newBounds.size.width
}
}

@ -4,7 +4,6 @@ import UIKit
import Combine
import SignalUtilitiesKit
import SessionUIKit
import SignalCoreKit
import SessionUtilitiesKit
class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollectionViewDataSource, UICollectionViewDelegate, GifPickerLayoutDelegate {
@ -17,7 +16,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
private var viewMode = ViewMode.idle {
didSet {
Logger.info("viewMode: \(viewMode)")
Log.debug("[GifPickerViewController] viewMode: \(viewMode)")
updateContents()
}
@ -47,7 +46,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
@available(*, unavailable, message:"use other constructor instead.")
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
required init() {
@ -68,9 +67,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
}
@objc func didBecomeActive() {
AssertIsOnMainThread()
Logger.info("")
Log.assertOnMainThread()
// Prod cells to try to load when app becomes active.
ensureCellState()
@ -79,7 +76,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
func ensureCellState() {
for cell in self.collectionView.visibleCells {
guard let cell = cell as? GifPickerCell else {
owsFailDebug("unexpected cell.")
Log.error("[GifPickerViewController] unexpected cell.")
return
}
cell.ensureCellState()
@ -145,8 +142,8 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
searchBar.delegate = self
self.view.addSubview(searchBar)
searchBar.autoPinWidthToSuperview()
searchBar.autoPinEdge(.top, to: .top, of: view)
searchBar.set(.width, to: .width, of: view)
searchBar.pin(.top, to: .top, of: view)
self.collectionView.delegate = self
self.collectionView.dataSource = self
@ -155,9 +152,9 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
// Inserted below searchbar because we later occlude the collectionview
// by inserting a masking layer between the search bar and collectionview
self.view.insertSubview(self.collectionView, belowSubview: searchBar)
self.collectionView.autoPinEdge(toSuperviewSafeArea: .leading)
self.collectionView.autoPinEdge(toSuperviewSafeArea: .trailing)
self.collectionView.autoPinEdge(.top, to: .bottom, of: searchBar)
self.collectionView.pin(.top, to: .bottom, of: searchBar)
self.collectionView.pin(.leading, to: .leading, of: view.safeAreaLayoutGuide)
self.collectionView.pin(.trailing, to: .trailing, of: view.safeAreaLayoutGuide)
// Block UIKit from adjust insets of collection view which screws up
// min/max scroll positions
@ -167,35 +164,35 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
let bottomBannerContainer = UIView()
bottomBannerContainer.themeBackgroundColor = .backgroundPrimary
self.view.addSubview(bottomBannerContainer)
bottomBannerContainer.autoPinWidthToSuperview()
bottomBannerContainer.autoPinEdge(.top, to: .bottom, of: self.collectionView)
bottomBannerContainer.autoPinEdge(toSuperviewEdge: .bottom)
bottomBannerContainer.set(.width, to: .width, of: view)
bottomBannerContainer.pin(.top, to: .bottom, of: self.collectionView)
bottomBannerContainer.pin(.bottom, to: .bottom, of: view)
let bottomBanner = UIView()
bottomBannerContainer.addSubview(bottomBanner)
bottomBanner.autoPinEdge(toSuperviewEdge: .top)
bottomBanner.autoPinWidthToSuperview()
self.autoPinView(toBottomOfViewControllerOrKeyboard: bottomBanner, avoidNotch: true)
bottomBanner.set(.width, to: .width, of: bottomBannerContainer)
bottomBanner.pin(.top, to: .top, of: bottomBannerContainer)
self.pinViewToBottomOfViewControllerOrKeyboard(bottomBanner, avoidNotch: true)
// The Giphy API requires us to "show their trademark prominently" in our GIF experience.
let logoImage = UIImage(named: "giphy_logo")
let logoImageView = UIImageView(image: logoImage)
bottomBanner.addSubview(logoImageView)
logoImageView.autoPinHeightToSuperview(withMargin: 3)
logoImageView.autoHCenterInSuperview()
logoImageView.set(.height, to: .height, of: bottomBanner, withOffset: -3)
logoImageView.center(.horizontal, in: bottomBanner)
let noResultsView = createErrorLabel(text: "GIF_VIEW_SEARCH_NO_RESULTS".localized())
self.noResultsView = noResultsView
self.view.addSubview(noResultsView)
noResultsView.autoPinWidthToSuperview(withMargin: 20)
noResultsView.autoAlignAxis(.horizontal, toSameAxisOf: self.collectionView)
noResultsView.set(.width, to: .width, of: self.view, withOffset: -20)
noResultsView.center(.horizontal, in: self.collectionView)
let searchErrorView = createErrorLabel(text: "GIF_VIEW_SEARCH_ERROR".localized())
self.searchErrorView = searchErrorView
self.view.addSubview(searchErrorView)
searchErrorView.autoPinWidthToSuperview(withMargin: 20)
searchErrorView.autoAlignAxis(.horizontal, toSameAxisOf: self.collectionView)
searchErrorView.set(.width, to: .width, of: self.view, withOffset: -20)
searchErrorView.center(.horizontal, in: self.collectionView)
searchErrorView.isUserInteractionEnabled = true
searchErrorView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(retryTapped)))
@ -203,8 +200,8 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
let activityIndicator = UIActivityIndicatorView(style: .large)
self.activityIndicator = activityIndicator
self.view.addSubview(activityIndicator)
activityIndicator.autoHCenterInSuperview()
activityIndicator.autoAlignAxis(.horizontal, toSameAxisOf: self.collectionView)
activityIndicator.center(.horizontal, in: self.view)
activityIndicator.center(.vertical, in: self.collectionView)
self.updateContents()
}
@ -223,15 +220,15 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
private func updateContents() {
guard let noResultsView = self.noResultsView else {
owsFailDebug("Missing noResultsView")
Log.error("[GifPickerViewController] Missing noResultsView")
return
}
guard let searchErrorView = self.searchErrorView else {
owsFailDebug("Missing searchErrorView")
Log.error("[GifPickerViewController] Missing searchErrorView")
return
}
guard let activityIndicator = self.activityIndicator else {
owsFailDebug("Missing activityIndicator")
Log.error("[GifPickerViewController] Missing activityIndicator")
return
}
@ -292,13 +289,13 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: kCellReuseIdentifier, for: indexPath)
guard indexPath.row < imageInfos.count else {
Logger.warn("indexPath: \(indexPath.row) out of range for imageInfo count: \(imageInfos.count) ")
Log.warn("[GifPickerViewController] indexPath: \(indexPath.row) out of range for imageInfo count: \(imageInfos.count) ")
return cell
}
let imageInfo = imageInfos[indexPath.row]
guard let gifCell = cell as? GifPickerCell else {
owsFailDebug("Unexpected cell type.")
Log.error("[GifPickerViewController] Unexpected cell type.")
return cell
}
gifCell.imageInfo = imageInfo
@ -309,18 +306,18 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
public func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
guard let cell = collectionView.cellForItem(at: indexPath) as? GifPickerCell else {
owsFailDebug("unexpected cell.")
Log.error("[GifPickerViewController] unexpected cell.")
return
}
guard cell.stillAsset != nil || cell.animatedAsset != nil else {
// we don't want to let the user blindly select a gray cell
Logger.debug("ignoring selection of cell with no preview")
Log.debug("[GifPickerViewController] ignoring selection of cell with no preview")
return
}
guard self.hasSelectedCell == false else {
owsFailDebug("Already selected cell")
Log.error("[GifPickerViewController] Already selected cell")
return
}
self.hasSelectedCell = true
@ -341,7 +338,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
layer.themeFillColor = .black
layer.opacity = 0.7
}
maskingView.autoPinEdgesToSuperviewEdges()
maskingView.pin(to: self.view)
cell.isCellSelected = true
self.collectionView.isUserInteractionEnabled = false
@ -379,16 +376,12 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
},
receiveValue: { [weak self] asset in
guard let rendition = asset.assetDescription as? GiphyRendition else {
owsFailDebug("Invalid asset description.")
Log.error("[GifPickerViewController] Invalid asset description.")
return
}
let filePath = asset.filePath
guard let dataSource = DataSourcePath.dataSource(withFilePath: filePath,
shouldDeleteOnDeallocation: false) else {
owsFailDebug("couldn't load asset.")
return
}
let dataSource = DataSourcePath(filePath: asset.filePath, shouldDeleteOnDeinit: false)
let attachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: rendition.utiType, imageQuality: .medium)
self?.dismiss(animated: true) {
@ -402,7 +395,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
public func collectionView(_ collectionView: UICollectionView, willDisplay cell: UICollectionViewCell, forItemAt indexPath: IndexPath) {
guard let cell = cell as? GifPickerCell else {
owsFailDebug("unexpected cell.")
Log.error("[GifPickerViewController] unexpected cell.")
return
}
// We only want to load the cells which are on-screen.
@ -411,7 +404,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
public func collectionView(_ collectionView: UICollectionView, didEndDisplaying cell: UICollectionViewCell, forItemAt indexPath: IndexPath) {
guard let cell = cell as? GifPickerCell else {
owsFailDebug("unexpected cell.")
Log.error("[GifPickerViewController] unexpected cell.")
return
}
cell.isCellVisible = false
@ -468,7 +461,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
let query: String = text.trimmingCharacters(in: .whitespacesAndNewlines)
if (viewMode == .searching || viewMode == .results) && lastQuery == query {
Logger.info("ignoring duplicate search: \(query)")
Log.debug("[GifPickerViewController] ignoring duplicate search: \(query)")
return
}
@ -493,18 +486,18 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
case .finished: break
case .failure(let error):
// Don't both showing error UI feedback for default "trending" results.
Logger.error("error: \(error)")
Log.error("[GifPickerViewController] error: \(error)")
}
},
receiveValue: { [weak self] imageInfos in
Logger.info("showing trending")
Log.debug("[GifPickerViewController] showing trending")
if imageInfos.count > 0 {
self?.imageInfos = imageInfos
self?.viewMode = .results
}
else {
owsFailDebug("trending results was unexpectedly empty")
Log.error("[GifPickerViewController] trending results was unexpectedly empty")
}
}
)
@ -512,7 +505,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
}
private func search(query: String) {
Logger.info("searching: \(query)")
Log.verbose("[GifPickerViewController] searching: \(query)")
progressiveSearchTimer?.invalidate()
progressiveSearchTimer = nil
@ -530,13 +523,13 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
switch result {
case .finished: break
case .failure:
Logger.info("search failed.")
Log.verbose("[GifPickerViewController] search failed.")
// TODO: Present this error to the user.
self?.viewMode = .error
}
},
receiveValue: { [weak self] imageInfos in
Logger.info("search complete")
Log.verbose("[GifPickerViewController] search complete")
self?.imageInfos = imageInfos
if imageInfos.count > 0 {

@ -5,7 +5,6 @@ import Combine
import CoreServices
import SignalUtilitiesKit
import SessionUtilitiesKit
import SignalCoreKit
// There's no UTI type for webp!
enum GiphyFormat {
@ -79,7 +78,7 @@ class GiphyRendition: ProxiedContentAssetDescription {
}
public func log() {
Logger.verbose("\t \(format), \(name), \(width), \(height), \(fileSize)")
Log.verbose("[GiphyRendition] \t \(format), \(name), \(width), \(height), \(fileSize)")
}
}
@ -111,7 +110,7 @@ class GiphyImageInfo: NSObject {
}
public func log() {
Logger.verbose("giphyId: \(giphyId), \(renditions.count)")
Log.verbose("[GiphyImageInfo] giphyId: \(giphyId), \(renditions.count)")
for rendition in renditions {
rendition.log()
}
@ -287,16 +286,16 @@ enum GiphyAPI {
return urlSession
.dataTaskPublisher(for: url)
.mapError { urlError in
Logger.error("search request failed: \(urlError)")
Log.verbose("[GiphyAPI] Search request failed: \(urlError)")
// URLError codes are negative values
return NetworkError.unknown
}
.map { data, _ in
Logger.debug("search request succeeded")
Log.verbose("[GiphyAPI] Search request succeeded")
guard let imageInfos = self.parseGiphyImages(responseData: data) else {
Logger.error("unable to parse trending images")
Log.error("[GiphyAPI] Unable to parse trending images")
return []
}
@ -335,13 +334,13 @@ enum GiphyAPI {
return urlSession
.dataTaskPublisher(for: request)
.mapError { urlError in
Logger.error("search request failed: \(urlError)")
Log.error("[GiphyAPI] Search request failed: \(urlError)")
// URLError codes are negative values
return NetworkError.unknown
}
.tryMap { data, _ -> [GiphyImageInfo] in
Logger.debug("search request succeeded")
Log.verbose("[GiphyAPI] Search request succeeded")
guard let imageInfos = self.parseGiphyImages(responseData: data) else {
throw NetworkError.invalidResponse
@ -356,16 +355,16 @@ enum GiphyAPI {
private static func parseGiphyImages(responseData: Data?) -> [GiphyImageInfo]? {
guard let responseData: Data = responseData else {
Logger.error("Missing response.")
Log.error("[GiphyAPI] Missing response.")
return nil
}
guard let responseDict: [String: Any] = try? JSONSerialization
.jsonObject(with: responseData, options: [ .fragmentsAllowed ]) as? [String: Any] else {
Logger.error("Invalid response.")
Log.error("[GiphyAPI] Invalid response.")
return nil
}
guard let imageDicts = responseDict["data"] as? [[String: Any]] else { // stringlint:disable
Logger.error("Invalid response data.")
Log.error("[GiphyAPI] Invalid response data.")
return nil
}
return imageDicts.compactMap { imageDict in
@ -376,21 +375,21 @@ enum GiphyAPI {
// Giphy API results are often incomplete or malformed, so we need to be defensive.
private static func parseGiphyImage(imageDict: [String: Any]) -> GiphyImageInfo? {
guard let giphyId = imageDict["id"] as? String else { // stringlint:disable
Logger.warn("Image dict missing id.")
Log.warn("[GiphyAPI] Image dict missing id.")
return nil
}
guard giphyId.count > 0 else {
Logger.warn("Image dict has invalid id.")
Log.warn("[GiphyAPI] Image dict has invalid id.")
return nil
}
guard let renditionDicts = imageDict["images"] as? [String: Any] else { // stringlint:disable
Logger.warn("Image dict missing renditions.")
Log.warn("[GiphyAPI] Image dict missing renditions.")
return nil
}
var renditions = [GiphyRendition]()
for (renditionName, renditionDict) in renditionDicts {
guard let renditionDict = renditionDict as? [String: Any] else {
Logger.warn("Invalid rendition dict.")
Log.warn("[GiphyAPI] Invalid rendition dict.")
continue
}
guard let rendition = parseGiphyRendition(renditionName: renditionName,
@ -400,12 +399,12 @@ enum GiphyAPI {
renditions.append(rendition)
}
guard renditions.count > 0 else {
Logger.warn("Image has no valid renditions.")
Log.warn("[GiphyAPI] Image has no valid renditions.")
return nil
}
guard let originalRendition = findOriginalRendition(renditions: renditions) else {
Logger.warn("Image has no original rendition.")
Log.warn("[GiphyAPI] Image has no original rendition.")
return nil
}
@ -442,15 +441,15 @@ enum GiphyAPI {
return nil
}
guard urlString.count > 0 else {
Logger.warn("Rendition has invalid url.")
Log.warn("[GiphyAPI] Rendition has invalid url.")
return nil
}
guard let url = NSURL(string: urlString) else {
Logger.warn("Rendition url could not be parsed.")
Log.warn("[GiphyAPI] Rendition url could not be parsed.")
return nil
}
guard let fileExtension = url.pathExtension?.lowercased() else {
Logger.warn("Rendition url missing file extension.")
Log.warn("[GiphyAPI] Rendition url missing file extension.")
return nil
}
var format = GiphyFormat.gif
@ -463,7 +462,7 @@ enum GiphyAPI {
} else if fileExtension == "webp" { // stringlint:disable
return nil
} else {
Logger.warn("Invalid file extension: \(fileExtension).")
Log.warn("[GiphyAPI] Invalid file extension: \(fileExtension).")
return nil
}
@ -488,7 +487,7 @@ enum GiphyAPI {
return nil
}
guard parsedValue > 0 else {
Logger.verbose("\(typeName) has non-positive \(key): \(parsedValue).")
Log.verbose("[GiphyAPI] \(typeName) has non-positive \(key): \(parsedValue).")
return nil
}
return parsedValue

@ -6,7 +6,6 @@ import Photos
import PhotosUI
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
protocol ImagePickerGridControllerDelegate: AnyObject {
@ -58,7 +57,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
library.add(delegate: self)
guard let collectionView = collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
@ -117,12 +116,12 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
@objc
func didPanSelection(_ selectionPanGesture: UIPanGestureRecognizer) {
guard let collectionView = collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
guard let delegate = delegate else {
owsFailDebug("delegate was unexpectedly nil")
Log.error("[ImagePickerGridController] delegate was unexpectedly nil")
return
}
@ -165,17 +164,17 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
func tryToToggleBatchSelect(at indexPath: IndexPath) {
guard let collectionView = collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
guard let delegate = delegate else {
owsFailDebug("delegate was unexpectedly nil")
Log.error("[ImagePickerGridController] delegate was unexpectedly nil")
return
}
guard delegate.isInBatchSelectMode else {
owsFailDebug("isInBatchSelectMode was unexpectedly false")
Log.error("[ImagePickerGridController] isInBatchSelectMode was unexpectedly false")
return
}
@ -270,7 +269,6 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
// If the app is backgrounded and then foregrounded, when OWSWindowManager calls mainWindow.makeKeyAndVisible
// the ConversationVC's inputAccessoryView will appear *above* us unless we'd previously become first responder.
override public var canBecomeFirstResponder: Bool {
Logger.debug("")
return true
}
@ -284,7 +282,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
self.view.layoutIfNeeded()
guard let collectionView = collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
@ -307,7 +305,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
// causing the content to adjust *after* viewWillAppear and viewSafeAreaInsetsDidChange.
// Because that something results in `scrollViewDidScroll` we re-adjust the content
// insets to the bottom.
Logger.debug("adjusting scroll offset back to bottom")
Log.debug("[ImagePickerGridController] Adjusting scroll offset back to bottom")
scrollToBottom(animated: false)
}
}
@ -357,7 +355,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
guard let delegate = delegate else { return }
guard let collectionView = collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
@ -366,7 +364,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
func clearCollectionViewSelection() {
guard let collectionView = self.collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
@ -374,10 +372,8 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
}
func showTooManySelectedToast() {
Logger.info("")
guard let collectionView = collectionView else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
@ -427,10 +423,8 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
)
func showCollectionPicker() {
Logger.debug("")
guard let collectionPickerView = collectionPickerController.view else {
owsFailDebug("collectionView was unexpectedly nil")
Log.error("[ImagePickerGridController] collectionView was unexpectedly nil")
return
}
@ -439,8 +433,10 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
addChild(collectionPickerController)
view.addSubview(collectionPickerView)
collectionPickerView.autoPinEdgesToSuperviewEdges(with: .zero, excludingEdge: .top)
collectionPickerView.autoPinEdge(toSuperviewSafeArea: .top)
collectionPickerView.pin(.top, to: .top, of: view.safeAreaLayoutGuide)
collectionPickerView.pin(.leading, to: .leading, of: view)
collectionPickerView.pin(.trailing, to: .trailing, of: view)
collectionPickerView.pin(.bottom, to: .bottom, of: view)
collectionPickerView.layoutIfNeeded()
// Initially position offscreen, we'll animate it in.
@ -453,8 +449,6 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
}
func hideCollectionPicker() {
Logger.debug("")
assert(isShowingCollectionPickerController)
isShowingCollectionPickerController = false
@ -488,7 +482,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
override func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
guard let delegate = delegate else {
owsFailDebug("delegate was unexpectedly nil")
Log.error("[ImagePickerGridController] delegate was unexpectedly nil")
return
}
@ -514,14 +508,13 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
}
public override func collectionView(_ collectionView: UICollectionView, didDeselectItemAt indexPath: IndexPath) {
Logger.debug("")
guard let delegate = delegate else {
owsFailDebug("delegate was unexpectedly nil")
Log.error("[ImagePickerGridController] delegate was unexpectedly nil")
return
}
guard let asset: PHAsset = photoCollectionContents.asset(at: indexPath.item) else {
SNLog("Failed to deselect cell for asset at \(indexPath.item)")
Log.warn("[ImagePickerGridController] Failed to deselect cell for asset at \(indexPath.item)")
delegate.imagePicker(self, failedToRetrieveAssetAt: indexPath.item, forCount: photoCollectionContents.assetCount)
return
}
@ -535,9 +528,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
}
override func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
guard let delegate = delegate else {
return UICollectionViewCell(forAutoLayout: ())
}
guard let delegate = delegate else { return UICollectionViewCell() }
let cell: PhotoGridViewCell = collectionView.dequeue(type: PhotoGridViewCell.self, for: indexPath)
@ -598,7 +589,7 @@ class TitleView: UIView {
super.init(frame: frame)
addSubview(stackView)
stackView.autoPinEdgesToSuperviewEdges()
stackView.pin(to: self)
label.font = .boldSystemFont(ofSize: Values.mediumFontSize)
label.themeTextColor = .textPrimary

@ -7,7 +7,6 @@ import YYImage
import SessionUIKit
import SignalUtilitiesKit
import SessionMessagingKit
import SignalCoreKit
import SessionUtilitiesKit
public enum MediaGalleryOption {
@ -46,7 +45,7 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate {
result.addTarget(self, action: #selector(playVideo), for: .touchUpInside)
result.alpha = 0
let playButtonSize: CGFloat = ScaleFromIPhone5(70)
let playButtonSize: CGFloat = Values.scaleFromIPhone5(70)
result.set(.width, to: playButtonSize)
result.set(.height, to: playButtonSize)

@ -43,20 +43,21 @@ public class MediaGalleryViewModel {
public private(set) var pagedDataObserver: PagedDatabaseObserver<Attachment, Item>?
/// This value is the current state of a gallery view
private var unobservedGalleryDataChanges: ([SectionModel], StagedChangeset<[SectionModel]>)?
private var unobservedGalleryDataChanges: [SectionModel]?
public private(set) var galleryData: [SectionModel] = []
public var onGalleryChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ())? {
didSet {
// When starting to observe interaction changes we want to trigger a UI update just in case the
// data was changed while we weren't observing
if let changes: ([SectionModel], StagedChangeset<[SectionModel]>) = self.unobservedGalleryDataChanges {
let performChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ())? = onGalleryChange
switch Thread.isMainThread {
case true: performChange?(changes.0, changes.1)
case false: DispatchQueue.main.async { performChange?(changes.0, changes.1) }
}
if let changes: [SectionModel] = self.unobservedGalleryDataChanges {
PagedData.processAndTriggerUpdates(
updatedData: changes,
currentDataRetriever: { [weak self] in self?.galleryData },
onDataChangeRetriever: { [weak self] in self?.onGalleryChange },
onUnobservedDataChange: { [weak self] updatedData in
self?.unobservedGalleryDataChanges = updatedData
}
)
self.unobservedGalleryDataChanges = nil
}
}
@ -104,11 +105,8 @@ public class MediaGalleryViewModel {
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.galleryData },
onDataChangeRetriever: { self?.onGalleryChange },
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedGalleryDataChanges = (changeset.isEmpty ?
nil :
(updatedData, changeset)
)
onUnobservedDataChange: { updatedData in
self?.unobservedGalleryDataChanges = updatedData
}
)
}
@ -132,7 +130,7 @@ public class MediaGalleryViewModel {
// we don't want to mess with the initial view controller behaviour)
guard !performInitialQuerySync else {
loadInitialData()
updateGalleryData(self.unobservedGalleryDataChanges?.0 ?? [])
updateGalleryData(self.unobservedGalleryDataChanges ?? [])
return
}
@ -243,7 +241,7 @@ public class MediaGalleryViewModel {
return CGSize(width: Int(width), height: Int(height))
}
var captionForDisplay: String? { attachment.caption?.filterForDisplay }
var captionForDisplay: String? { attachment.caption?.filteredForDisplay }
// MARK: - Query

@ -5,7 +5,6 @@ import GRDB
import SessionUIKit
import SessionMessagingKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
import SessionSnodeKit
@ -33,7 +32,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
public func setCurrentItem(_ item: MediaGalleryViewModel.Item, direction: UIPageViewController.NavigationDirection, animated isAnimated: Bool) {
guard let galleryPage = self.buildGalleryPage(galleryItem: item) else {
owsFailDebug("unexpectedly unable to build new gallery page")
Log.error("[MediaPageViewController] Unexpectedly unable to build new gallery page")
return
}
@ -87,11 +86,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
@available(*, unavailable, message: "Unimplemented")
required init?(coder: NSCoder) {
notImplemented()
}
deinit {
Logger.debug("deinit")
fatalError("init(coder:) has not been implemented")
}
// MARK: - Subview
@ -180,8 +175,8 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
galleryRailView.isHidden = true
galleryRailView.delegate = self
galleryRailView.autoSetDimension(.height, toSize: 72)
footerBar.autoSetDimension(.height, toSize: 44)
galleryRailView.set(.height, to: 72)
footerBar.set(.height, to: 44)
let bottomContainer: DynamicallySizedView = DynamicallySizedView()
bottomContainer.clipsToBounds = true
@ -193,7 +188,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
bottomStack.axis = .vertical
bottomStack.isLayoutMarginsRelativeArrangement = true
bottomContainer.addSubview(bottomStack)
bottomStack.autoPinEdgesToSuperviewEdges()
bottomStack.pin(to: bottomContainer)
let galleryRailBlockingView: UIView = UIView()
galleryRailBlockingView.themeBackgroundColor = .backgroundPrimary
@ -276,7 +271,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
}
override func didReceiveMemoryWarning() {
Logger.info("")
super.didReceiveMemoryWarning()
self.cachedPages = [:]
@ -287,7 +281,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
var pagerScrollViewContentOffsetObservation: NSKeyValueObservation?
func pagerScrollView(_ pagerScrollView: UIScrollView, contentOffsetDidChange change: NSKeyValueObservedChange<CGPoint>) {
guard let newValue = change.newValue else {
owsFailDebug("newValue was unexpectedly nil")
Log.error("[MediaPageViewController] newValue was unexpectedly nil")
return
}
@ -516,7 +510,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
public func share(using dependencies: Dependencies = Dependencies()) {
guard let currentViewController = self.viewControllers?[0] as? MediaDetailViewController else {
owsFailDebug("currentViewController was unexpectedly nil")
Log.error("[MediaPageViewController] currentViewController was unexpectedly nil")
return
}
guard let originalFilePath: String = currentViewController.galleryItem.attachment.originalFilePath else {
@ -534,10 +528,10 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
shareVC.completionWithItemsHandler = { activityType, completed, returnedItems, activityError in
if let activityError = activityError {
SNLog("Failed to share with activityError: \(activityError)")
Log.error("[MediaPageViewController] Failed to share with activityError: \(activityError)")
}
else if completed {
SNLog("Did share with activityType: \(activityType.debugDescription)")
Log.info("[MediaPageViewController] Did share with activityType: \(activityType.debugDescription)")
}
guard
@ -615,7 +609,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
@objc public func didPressPlayBarButton() {
guard let currentViewController = self.viewControllers?.first as? MediaDetailViewController else {
SNLog("currentViewController was unexpectedly nil")
Log.error("[MediaPageViewController] currentViewController was unexpectedly nil")
return
}
@ -626,12 +620,11 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
var pendingViewController: MediaDetailViewController?
public func pageViewController(_ pageViewController: UIPageViewController, willTransitionTo pendingViewControllers: [UIViewController]) {
Logger.debug("")
assert(pendingViewControllers.count == 1)
Log.assert(pendingViewControllers.count == 1)
pendingViewControllers.forEach { viewController in
guard let pendingViewController = viewController as? MediaDetailViewController else {
owsFailDebug("unexpected mediaDetailViewController: \(viewController)")
Log.error("[MediaPageViewController] Unexpected mediaDetailViewController: \(viewController)")
return
}
self.pendingViewController = pendingViewController
@ -645,12 +638,11 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
}
public func pageViewController(_ pageViewController: UIPageViewController, didFinishAnimating finished: Bool, previousViewControllers: [UIViewController], transitionCompleted: Bool) {
Logger.debug("")
assert(previousViewControllers.count == 1)
Log.assert(previousViewControllers.count == 1)
previousViewControllers.forEach { viewController in
guard let previousPage = viewController as? MediaDetailViewController else {
owsFailDebug("unexpected mediaDetailViewController: \(viewController)")
Log.error("[MediaPageViewController] Unexpected mediaDetailViewController: \(viewController)")
return
}
@ -792,7 +784,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
self.navigationController?.view.isUserInteractionEnabled = false
self.navigationController?.dismiss(animated: true, completion: { [weak self] in
if !IsLandscapeOrientationEnabled() {
if !UIDevice.current.isIPad {
UIDevice.current.ows_setOrientation(.portrait)
}
@ -805,8 +797,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
// MARK: MediaDetailViewControllerDelegate
public func mediaDetailViewControllerDidTapMedia(_ mediaDetailViewController: MediaDetailViewController) {
Logger.debug("")
self.shouldHideToolbars = !self.shouldHideToolbars
}
@ -855,13 +845,12 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
containerView.layoutMargins = UIEdgeInsets(top: 2, left: 8, bottom: 4, right: 8)
containerView.addSubview(stackView)
stackView.autoPinEdge(toSuperviewMargin: .top, relation: .greaterThanOrEqual)
stackView.autoPinEdge(toSuperviewMargin: .trailing, relation: .greaterThanOrEqual)
stackView.autoPinEdge(toSuperviewMargin: .bottom, relation: .greaterThanOrEqual)
stackView.autoPinEdge(toSuperviewMargin: .leading, relation: .greaterThanOrEqual)
stackView.setContentHuggingHigh()
stackView.autoCenterInSuperview()
stackView.pin(.top, greaterThanOrEqualTo: .top, of: containerView)
stackView.pin(.trailing, greaterThanOrEqualTo: .trailing, of: containerView)
stackView.pin(.bottom, lessThanOrEqualTo: .bottom, of: containerView)
stackView.pin(.leading, lessThanOrEqualTo: .leading, of: containerView)
stackView.setContentHugging(to: .required)
stackView.center(in: containerView)
return containerView
}()
@ -891,7 +880,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
return "MEDIA_GALLERY_SENDER_NAME_YOU".localized() //"Short sender label for media sent by you"
default:
owsFailDebug("Unsupported message variant: \(targetItem.interactionVariant)")
Log.error("[MediaPageViewController] Unsupported message variant: \(targetItem.interactionVariant)")
return ""
}
}()
@ -942,7 +931,7 @@ extension MediaGalleryViewModel.Item: GalleryRailItem {
extension MediaPageViewController: GalleryRailViewDelegate {
func galleryRailView(_ galleryRailView: GalleryRailView, didTapItem imageRailItem: GalleryRailItem) {
guard let targetItem = imageRailItem as? MediaGalleryViewModel.Item else {
owsFailDebug("unexpected imageRailItem: \(imageRailItem)")
Log.error("[MediaPageViewController] Unexpected imageRailItem: \(imageRailItem)")
return
}

@ -6,7 +6,6 @@ import GRDB
import DifferenceKit
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
public class MediaTileViewController: UIViewController, UICollectionViewDataSource, UICollectionViewDelegate, UICollectionViewDelegateFlowLayout {
@ -45,7 +44,7 @@ public class MediaTileViewController: UIViewController, UICollectionViewDataSour
}
required public init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
deinit {
@ -138,12 +137,12 @@ public class MediaTileViewController: UIViewController, UICollectionViewDataSour
)
view.addSubview(self.collectionView)
collectionView.autoPin(toEdgesOf: view)
collectionView.pin(to: view)
view.addSubview(self.footerBar)
footerBar.autoPinWidthToSuperview()
footerBar.autoSetDimension(.height, toSize: MediaTileViewController.footerBarHeight)
self.footerBarBottomConstraint = footerBar.autoPinEdge(toSuperviewEdge: .bottom, withInset: -MediaTileViewController.footerBarHeight)
footerBar.set(.width, to: .width, of: view)
footerBar.set(.height, to: MediaTileViewController.footerBarHeight)
footerBarBottomConstraint = footerBar.pin(.bottom, to: .bottom, of: view, withInset: -MediaTileViewController.footerBarHeight)
self.updateSelectButton(updatedData: self.viewModel.galleryData, inBatchSelectMode: false)
self.mediaTileViewLayout.invalidateLayout()
@ -211,7 +210,7 @@ public class MediaTileViewController: UIViewController, UICollectionViewDataSour
// If we have a focused item then we want to scroll to it
guard let focusedIndexPath: IndexPath = self.viewModel.focusedIndexPath else { return }
Logger.debug("scrolling to focused item at indexPath: \(focusedIndexPath)")
Log.debug("[MediaTileViewController] Scrolling to focused item at indexPath: \(focusedIndexPath)")
// Note: For some reason 'scrollToItem' doesn't always work properly so we need to manually
// calculate what the offset should be to do the initial scroll
@ -637,11 +636,12 @@ public class MediaTileViewController: UIViewController, UICollectionViewDataSour
@objc func didTapSelect(_ sender: Any) {
isInBatchSelectMode = true
// show toolbar
let view: UIView = self.view
UIView.animate(withDuration: 0.1, delay: 0, options: .curveEaseInOut, animations: { [weak self] in
self?.footerBarBottomConstraint?.isActive = false
self?.footerBarBottomConstraint = self?.footerBar.autoPinEdge(toSuperviewSafeArea: .bottom)
self?.footerBarBottomConstraint = self?.footerBar.pin(.bottom, to: .bottom, of: view.safeAreaLayoutGuide)
self?.footerBar.superview?.layoutIfNeeded()
// Ensure toolbar doesn't cover bottom row.
@ -657,9 +657,10 @@ public class MediaTileViewController: UIViewController, UICollectionViewDataSour
isInBatchSelectMode = false
// hide toolbar
let view: UIView = self.view
UIView.animate(withDuration: 0.1, delay: 0, options: .curveEaseInOut, animations: { [weak self] in
self?.footerBarBottomConstraint?.isActive = false
self?.footerBarBottomConstraint = self?.footerBar.autoPinEdge(toSuperviewEdge: .bottom, withInset: -MediaTileViewController.footerBarHeight)
self?.footerBarBottomConstraint = self?.footerBar.pin(.bottom, to: .bottom, of: view, withInset: -MediaTileViewController.footerBarHeight)
self?.footerBar.superview?.layoutIfNeeded()
// Undo "Ensure toolbar doesn't cover bottom row."
@ -672,7 +673,7 @@ public class MediaTileViewController: UIViewController, UICollectionViewDataSour
@objc func didPressDelete(_ sender: Any) {
guard let indexPaths = collectionView.indexPathsForSelectedItems else {
owsFailDebug("indexPaths was unexpectedly nil")
Log.error("[MediaTileViewController] indexPaths was unexpectedly nil")
return
}
@ -801,7 +802,7 @@ private class MediaGallerySectionHeader: UICollectionReusableView {
@available(*, unavailable, message: "Unimplemented")
required init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
public func configure(title: String) {
@ -829,12 +830,15 @@ private class MediaGalleryStaticHeader: UICollectionViewCell {
label.themeTextColor = .textPrimary
label.textAlignment = .center
label.numberOfLines = 0
label.autoPinEdgesToSuperviewMargins(with: UIEdgeInsets(top: 0, leading: Values.largeSpacing, bottom: 0, trailing: Values.largeSpacing))
label.pin(.top, toMargin: .top, of: self)
label.pin(.leading, toMargin: .leading, of: self, withInset: Values.largeSpacing)
label.pin(.trailing, toMargin: .trailing, of: self, withInset: -Values.largeSpacing)
label.pin(.bottom, toMargin: .bottom, of: self)
}
@available(*, unavailable, message: "Unimplemented")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
public func configure(title: String) {

@ -10,6 +10,6 @@ import SignalUtilitiesKit
// MARK: Orientation
override public var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return DefaultUIInterfaceOrientationMask()
return (UIDevice.current.isIPad ? .all : .portrait)
}
}

@ -8,7 +8,6 @@ import AVFoundation
import CoreServices
import SessionMessagingKit
import SessionUtilitiesKit
import SignalCoreKit
protocol PhotoCaptureDelegate: AnyObject {
func photoCapture(_ photoCapture: PhotoCapture, didFinishProcessingAttachment attachment: SignalAttachment)
@ -66,7 +65,7 @@ class PhotoCapture: NSObject {
session.addInput(audioDeviceInput)
self.audioDeviceInput = audioDeviceInput
} else {
owsFailDebug("Could not add audio device input to the session")
Log.error("[PhotoCapture] Could not add audio device input to the session")
}
}
@ -77,7 +76,7 @@ class PhotoCapture: NSObject {
defer { self.session.commitConfiguration() }
guard let audioDeviceInput = self.audioDeviceInput else {
owsFailDebug("audioDevice was unexpectedly nil")
Log.error("[PhotoCapture] audioDevice was unexpectedly nil")
return
}
session.removeInput(audioDeviceInput)
@ -147,11 +146,11 @@ class PhotoCapture: NSObject {
}
func assertIsOnSessionQueue() {
assertOnQueue(sessionQueue)
dispatchPrecondition(condition: .onQueue(sessionQueue))
}
func switchCamera() -> AnyPublisher<Void, Error> {
AssertIsOnMainThread()
Log.assertOnMainThread()
desiredPosition = {
switch desiredPosition {
@ -204,15 +203,15 @@ class PhotoCapture: NSObject {
receiveOutput: { [weak self] _ in
switch self?.captureOutput.flashMode {
case .auto:
Logger.debug("new flashMode: on")
Log.debug("[PhotoCapture] new flashMode: on")
self?.captureOutput.flashMode = .on
case .on:
Logger.debug("new flashMode: off")
Log.debug("[PhotoCapture] new flashMode: off")
self?.captureOutput.flashMode = .off
case .off:
Logger.debug("new flashMode: auto")
Log.debug("[PhotoCapture] new flashMode: auto")
self?.captureOutput.flashMode = .auto
default: break
@ -228,7 +227,7 @@ class PhotoCapture: NSObject {
monitorSubjectAreaChange: Bool) {
sessionQueue.async {
guard let device = self.captureDevice else {
owsFailDebug("device was unexpectedly nil")
Log.error("[PhotoCapture] device was unexpectedly nil")
return
}
do {
@ -249,7 +248,7 @@ class PhotoCapture: NSObject {
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
} catch {
owsFailDebug("error: \(error)")
Log.error("[PhotoCapture] error: \(error)")
}
}
}
@ -274,12 +273,12 @@ class PhotoCapture: NSObject {
assert(alpha >= 0 && alpha <= 1)
sessionQueue.async {
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
Log.error("[PhotoCapture] captureDevice was unexpectedly nil")
return
}
// we might want this to be non-linear
let scale = CGFloatLerp(self.minimumZoom, self.maximumZoom, alpha)
let scale = alpha.lerp(self.minimumZoom, self.maximumZoom)
let zoomFactor = self.clampZoom(scale, device: captureDevice)
self.updateZoom(factor: zoomFactor)
}
@ -288,7 +287,7 @@ class PhotoCapture: NSObject {
func updateZoom(scaleFromPreviousZoomFactor scale: CGFloat) {
sessionQueue.async {
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
Log.error("[PhotoCapture] captureDevice was unexpectedly nil")
return
}
@ -300,13 +299,13 @@ class PhotoCapture: NSObject {
func completeZoom(scaleFromPreviousZoomFactor scale: CGFloat) {
sessionQueue.async {
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
Log.error("[PhotoCapture] captureDevice was unexpectedly nil")
return
}
let zoomFactor = self.clampZoom(scale * self.previousZoomFactor, device: captureDevice)
Logger.debug("ended with scaleFactor: \(zoomFactor)")
Log.debug("[PhotoCapture] ended with scaleFactor: \(zoomFactor)")
self.previousZoomFactor = zoomFactor
self.updateZoom(factor: zoomFactor)
@ -317,7 +316,7 @@ class PhotoCapture: NSObject {
assertIsOnSessionQueue()
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
Log.error("[PhotoCapture] captureDevice was unexpectedly nil")
return
}
@ -326,7 +325,7 @@ class PhotoCapture: NSObject {
captureDevice.videoZoomFactor = factor
captureDevice.unlockForConfiguration()
} catch {
owsFailDebug("error: \(error)")
Log.error("[PhotoCapture] error: \(error)")
}
}
@ -340,7 +339,6 @@ extension PhotoCapture: CaptureButtonDelegate {
// MARK: - Photo
func didTapCaptureButton(_ captureButton: CaptureButton) {
Logger.verbose("")
sessionQueue.async {
self.captureOutput.takePhoto(delegate: self)
}
@ -349,10 +347,8 @@ extension PhotoCapture: CaptureButtonDelegate {
// MARK: - Video
func didBeginLongPressCaptureButton(_ captureButton: CaptureButton) {
AssertIsOnMainThread()
Log.assertOnMainThread()
Logger.verbose("")
sessionQueue.async { [weak self] in // Must run this on a specific queue to prevent crashes
guard let strongSelf = self else { return }
@ -373,19 +369,17 @@ extension PhotoCapture: CaptureButtonDelegate {
}
func didCompleteLongPressCaptureButton(_ captureButton: CaptureButton) {
Logger.verbose("")
sessionQueue.async {
self.captureOutput.completeVideo(delegate: self)
self.stopAudioCapture()
}
AssertIsOnMainThread()
Log.assertOnMainThread()
// immediately inform UI that capture is stopping
delegate?.photoCaptureDidCompleteVideo(self)
}
func didCancelLongPressCaptureButton(_ captureButton: CaptureButton) {
Logger.verbose("")
AssertIsOnMainThread()
Log.assertOnMainThread()
sessionQueue.async {
self.stopAudioCapture()
}
@ -397,7 +391,7 @@ extension PhotoCapture: CaptureButtonDelegate {
}
func longPressCaptureButton(_ captureButton: CaptureButton, didUpdateZoomAlpha zoomAlpha: CGFloat) {
Logger.verbose("zoomAlpha: \(zoomAlpha)")
Log.verbose("[PhotoCapture] zoomAlpha: \(zoomAlpha)")
updateZoom(alpha: zoomAlpha)
}
}
@ -413,8 +407,7 @@ extension PhotoCapture: CaptureOutputDelegate {
// MARK: - Photo
func captureOutputDidFinishProcessing(photoData: Data?, error: Error?) {
Logger.verbose("")
AssertIsOnMainThread()
Log.assertOnMainThread()
if let error = error {
delegate?.photoCapture(self, processingDidError: error)
@ -422,14 +415,12 @@ extension PhotoCapture: CaptureOutputDelegate {
}
guard let photoData = photoData else {
owsFailDebug("photoData was unexpectedly nil")
Log.error("[PhotoCapture] photoData was unexpectedly nil")
delegate?.photoCapture(self, processingDidError: PhotoCaptureError.captureFailed)
return
}
let dataSource = DataSourceValue.dataSource(with: photoData, utiType: kUTTypeJPEG as String)
let dataSource = DataSourceValue(data: photoData, utiType: kUTTypeJPEG as String)
let attachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: kUTTypeJPEG as String, imageQuality: .medium)
delegate?.photoCapture(self, didFinishProcessingAttachment: attachment)
}
@ -437,24 +428,21 @@ extension PhotoCapture: CaptureOutputDelegate {
// MARK: - Movie
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
Logger.verbose("")
AssertIsOnMainThread()
Log.assertOnMainThread()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
Logger.verbose("")
AssertIsOnMainThread()
Log.assertOnMainThread()
if let error = error {
guard didSucceedDespiteError(error) else {
delegate?.photoCapture(self, processingDidError: error)
return
}
Logger.info("Ignoring error, since capture succeeded.")
Log.debug("[PhotoCapture] Ignoring error, since capture succeeded.")
}
let dataSource = DataSourcePath.dataSource(with: outputFileURL, shouldDeleteOnDeallocation: true)
let dataSource = DataSourcePath(fileUrl: outputFileURL, shouldDeleteOnDeinit: true)
let attachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: kUTTypeMPEG4 as String)
delegate?.photoCapture(self, didFinishProcessingAttachment: attachment)
}
@ -523,18 +511,18 @@ class CaptureOutput {
delegate.assertIsOnSessionQueue()
guard let photoOutput = photoOutput else {
owsFailDebug("photoOutput was unexpectedly nil")
Log.error("[CaptureOutput] photoOutput was unexpectedly nil")
return
}
guard let photoVideoConnection = photoOutput.connection(with: .video) else {
owsFailDebug("photoVideoConnection was unexpectedly nil")
Log.error("[CaptureOutput] photoVideoConnection was unexpectedly nil")
return
}
let videoOrientation = delegate.captureOrientation
photoVideoConnection.videoOrientation = videoOrientation
Logger.verbose("videoOrientation: \(videoOrientation)")
Log.verbose("[CaptureOutput] videoOrientation: \(videoOrientation)")
return imageOutput.takePhoto(delegate: delegate)
}
@ -544,14 +532,14 @@ class CaptureOutput {
func beginVideo(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
guard let videoConnection = movieOutput.connection(with: .video) else {
owsFailDebug("movieOutputConnection was unexpectedly nil")
Log.error("[CaptureOutput] movieOutputConnection was unexpectedly nil")
return
}
let videoOrientation = delegate.captureOrientation
videoConnection.videoOrientation = videoOrientation
let outputFilePath = OWSFileSystem.temporaryFilePath(withFileExtension: "mp4")
let outputFilePath = FileSystem.temporaryFilePath(fileExtension: "mp4")
movieOutput.startRecording(to: URL(fileURLWithPath: outputFilePath), recordingDelegate: delegate)
}
@ -563,7 +551,7 @@ class CaptureOutput {
func cancelVideo(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
// There's currently no user-visible way to cancel, if so, we may need to do some cleanup here.
owsFailDebug("video was unexpectedly canceled.")
Log.error("[CaptureOutput] video was unexpectedly canceled.")
}
}
@ -625,7 +613,7 @@ class PhotoCaptureOutputAdaptee: NSObject, ImageCaptureOutput {
var data = photo.fileDataRepresentation()!
// Call normalized here to fix the orientation
if let srcImage = UIImage(data: data) {
data = srcImage.normalized().jpegData(compressionQuality: 1.0)!
data = srcImage.normalizedImage().jpegData(compressionQuality: 1.0)!
}
DispatchQueue.main.async {
self.delegate?.captureOutputDidFinishProcessing(photoData: data, error: error)
@ -651,13 +639,13 @@ class StillImageCaptureOutput: ImageCaptureOutput {
func takePhoto(delegate: CaptureOutputDelegate) {
guard let videoConnection = stillImageOutput.connection(with: .video) else {
owsFailDebug("videoConnection was unexpectedly nil")
Log.error("[StillImageCaptureOutput] videoConnection was unexpectedly nil")
return
}
stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { [weak delegate] (sampleBuffer, error) in
guard let sampleBuffer = sampleBuffer else {
owsFailDebug("sampleBuffer was unexpectedly nil")
Log.error("[StillImageCaptureOutput] sampleBuffer was unexpectedly nil")
return
}
@ -671,7 +659,7 @@ class StillImageCaptureOutput: ImageCaptureOutput {
func videoDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let captureDevices = AVCaptureDevice.devices()
guard let device = (captureDevices.first { $0.hasMediaType(.video) && $0.position == position }) else {
Logger.debug("unable to find desired position: \(position)")
Log.debug("[StillImageCaptureOutput] unable to find desired position: \(position)")
return captureDevices.first
}

@ -5,7 +5,6 @@ import Combine
import AVFoundation
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
protocol PhotoCaptureViewControllerDelegate: AnyObject {
@ -46,7 +45,7 @@ class PhotoCaptureViewController: OWSViewController {
receiveCompletion: { result in
switch result {
case .failure: break
case .finished: Logger.debug("stopCapture completed")
case .finished: Log.debug("[PhotoCaptureViewController] stopCapture completed")
}
}
)
@ -105,7 +104,6 @@ class PhotoCaptureViewController: OWSViewController {
// If the app is backgrounded and then foregrounded, when OWSWindowManager calls mainWindow.makeKeyAndVisible
// the ConversationVC's inputAccessoryView will appear *above* us unless we'd previously become first responder.
override public var canBecomeFirstResponder: Bool {
Logger.debug("")
return true
}
@ -124,7 +122,7 @@ class PhotoCaptureViewController: OWSViewController {
init(imageName: String, block: @escaping () -> Void) {
self.button = OWSButton(imageName: imageName, tintColor: .white, block: block)
button.autoPinToSquareAspectRatio()
button.set(.width, to: .height, of: button)
button.themeShadowColor = .black
button.layer.shadowOffset = CGSize.zero
button.layer.shadowOpacity = 0.35
@ -179,20 +177,19 @@ class PhotoCaptureViewController: OWSViewController {
@objc
func didTapSwitchCamera() {
Logger.debug("")
switchCamera()
}
@objc
func didDoubleTapToSwitchCamera(tapGesture: UITapGestureRecognizer) {
Logger.debug("")
switchCamera()
}
private func switchCamera() {
UIView.animate(withDuration: 0.2) {
let epsilonToForceCounterClockwiseRotation: CGFloat = 0.00001
self.switchCameraControl.button.transform = self.switchCameraControl.button.transform.rotate(.pi + epsilonToForceCounterClockwiseRotation)
self.switchCameraControl.button.transform = self.switchCameraControl.button.transform
.rotated(by: .pi + epsilonToForceCounterClockwiseRotation)
}
photoCapture.switchCamera()
@ -209,7 +206,6 @@ class PhotoCaptureViewController: OWSViewController {
@objc
func didTapFlashMode() {
Logger.debug("")
photoCapture.switchFlashMode()
.receive(on: DispatchQueue.main)
.sinkUntilComplete(
@ -262,7 +258,7 @@ class PhotoCaptureViewController: OWSViewController {
// since the "face up" and "face down" orientations aren't reflected in the photo output,
// we need to capture the last known _other_ orientation so we can reflect the appropriate
// portrait/landscape in our captured photos.
Logger.verbose("lastKnownCaptureOrientation: \(lastKnownCaptureOrientation)->\(captureOrientation)")
Log.verbose("[PhotoCaptureViewController] lastKnownCaptureOrientation: \(lastKnownCaptureOrientation)->\(captureOrientation)")
lastKnownCaptureOrientation = captureOrientation
updateIconOrientations(isAnimated: true, captureOrientation: captureOrientation)
}
@ -271,14 +267,14 @@ class PhotoCaptureViewController: OWSViewController {
// MARK: -
private func updateIconOrientations(isAnimated: Bool, captureOrientation: AVCaptureVideoOrientation) {
Logger.verbose("captureOrientation: \(captureOrientation)")
Log.verbose("[PhotoCaptureViewController] captureOrientation: \(captureOrientation)")
let transformFromOrientation: CGAffineTransform
switch captureOrientation {
case .portrait: transformFromOrientation = .identity
case .portraitUpsideDown: transformFromOrientation = CGAffineTransform(rotationAngle: .pi)
case .landscapeLeft: transformFromOrientation = CGAffineTransform(rotationAngle: .halfPi)
case .landscapeRight: transformFromOrientation = CGAffineTransform(rotationAngle: -1 * .halfPi)
case .landscapeLeft: transformFromOrientation = CGAffineTransform(rotationAngle: .pi * 0.5)
case .landscapeRight: transformFromOrientation = CGAffineTransform(rotationAngle: -1 * .pi * 0.5)
@unknown default: transformFromOrientation = .identity
}
@ -316,21 +312,23 @@ class PhotoCaptureViewController: OWSViewController {
}
private func showCaptureUI() {
Logger.debug("")
view.addSubview(previewView)
if UIDevice.current.hasIPhoneXNotch {
previewView.autoPinEdgesToSuperviewEdges()
previewView.pin(to: view)
} else {
previewView.autoPinEdgesToSuperviewEdges(with: UIEdgeInsets(top: 0, leading: 0, bottom: 40, trailing: 0))
previewView.pin(.top, to: .top, of: view)
previewView.pin(.leading, to: .leading, of: view)
previewView.pin(.trailing, to: .trailing, of: view)
previewView.pin(.bottom, to: .bottom, of: view, withInset: -40)
}
view.addSubview(captureButton)
captureButton.autoHCenterInSuperview()
captureButton.center(.horizontal, in: view)
captureButton.centerYAnchor.constraint(equalTo: view.layoutMarginsGuide.bottomAnchor, constant: SendMediaNavigationController.bottomButtonsCenterOffset).isActive = true
}
private func showFailureUI(error: Error) {
Logger.error("error: \(error)")
Log.error("[PhotoCaptureViewController] Error: \(error)")
let modal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: CommonStrings.errorAlertTitle,
@ -387,7 +385,7 @@ extension PhotoCaptureViewController: PhotoCaptureDelegate {
}
func photoCaptureDidCancelVideo(_ photoCapture: PhotoCapture) {
owsFailDebug("If we ever allow this, we should test.")
Log.error("[PhotoCaptureViewController] photoCaptureDidCancelVideo called - If we ever allow this, we should test.")
isRecordingMovie = false
recordingTimerView.stopCounting()
updateNavigationItems()
@ -432,8 +430,8 @@ class CaptureButton: UIView {
weak var delegate: CaptureButtonDelegate?
let defaultDiameter: CGFloat = ScaleFromIPhone5To7Plus(60, 80)
let recordingDiameter: CGFloat = ScaleFromIPhone5To7Plus(68, 120)
let defaultDiameter: CGFloat = Values.scaleFromIPhone5To7Plus(60, 80)
let recordingDiameter: CGFloat = Values.scaleFromIPhone5To7Plus(68, 120)
var innerButtonSizeConstraints: [NSLayoutConstraint]!
var zoomIndicatorSizeConstraints: [NSLayoutConstraint]!
@ -448,21 +446,26 @@ class CaptureButton: UIView {
innerButton.addGestureRecognizer(longPressGesture)
addSubview(innerButton)
innerButtonSizeConstraints = autoSetDimensions(to: CGSize(width: defaultDiameter, height: defaultDiameter))
innerButtonSizeConstraints = [
set(.width, to: defaultDiameter),
set(.height, to: defaultDiameter)
]
innerButton.themeBackgroundColor = .white
innerButton.layer.shadowOffset = .zero
innerButton.layer.shadowOpacity = 0.33
innerButton.layer.shadowRadius = 2
innerButton.alpha = 0.33
innerButton.autoPinEdgesToSuperviewEdges()
innerButton.pin(to: self)
addSubview(zoomIndicator)
zoomIndicatorSizeConstraints = zoomIndicator.autoSetDimensions(to: CGSize(width: defaultDiameter, height: defaultDiameter))
zoomIndicatorSizeConstraints = [
zoomIndicator.set(.width, to: defaultDiameter),
zoomIndicator.set(.height, to: defaultDiameter)
]
zoomIndicator.isUserInteractionEnabled = false
zoomIndicator.themeBorderColor = .white
zoomIndicator.layer.borderWidth = 1.5
zoomIndicator.autoAlignAxis(.horizontal, toSameAxisOf: innerButton)
zoomIndicator.autoAlignAxis(.vertical, toSameAxisOf: innerButton)
zoomIndicator.center(in: innerButton)
}
required init?(coder aDecoder: NSCoder) {
@ -480,10 +483,8 @@ class CaptureButton: UIView {
@objc
func didLongPress(_ gesture: UILongPressGestureRecognizer) {
Logger.verbose("")
guard let gestureView = gesture.view else {
owsFailDebug("gestureView was unexpectedly nil")
Log.error("[CaptureButton] gestureView was unexpectedly nil")
return
}
@ -499,17 +500,17 @@ class CaptureButton: UIView {
}
case .changed:
guard let referenceHeight = delegate?.zoomScaleReferenceHeight else {
owsFailDebug("referenceHeight was unexpectedly nil")
Log.error("[CaptureButton] referenceHeight was unexpectedly nil")
return
}
guard referenceHeight > 0 else {
owsFailDebug("referenceHeight was unexpectedly <= 0")
Log.error("[CaptureButton] referenceHeight was unexpectedly <= 0")
return
}
guard let initialTouchLocation = initialTouchLocation else {
owsFailDebug("initialTouchLocation was unexpectedly nil")
Log.error("[CaptureButton] initialTouchLocation was unexpectedly nil")
return
}
@ -521,9 +522,9 @@ class CaptureButton: UIView {
let alpha = ratio.clamp(0, 1)
Logger.verbose("distance: \(distance), alpha: \(alpha)")
Log.verbose("[CaptureButton] distance: \(distance), alpha: \(alpha)")
let zoomIndicatorDiameter = CGFloatLerp(recordingDiameter, 3, alpha)
let zoomIndicatorDiameter = alpha.lerp(recordingDiameter, 3)
self.zoomIndicatorSizeConstraints.forEach { $0.constant = zoomIndicatorDiameter }
zoomIndicator.superview?.layoutIfNeeded()
@ -586,7 +587,7 @@ class RecordingTimerView: UIView {
stackView.spacing = stackViewSpacing
addSubview(stackView)
stackView.autoPinEdgesToSuperviewMargins()
stackView.pin(toMarginsOf: self)
updateView()
}
@ -617,7 +618,8 @@ class RecordingTimerView: UIView {
icon.layer.shadowOpacity = 0.35
icon.layer.shadowRadius = 4
icon.themeBackgroundColor = .danger
icon.autoSetDimensions(to: CGSize(width: iconWidth, height: iconWidth))
icon.set(.width, to: iconWidth)
icon.set(.height, to: iconWidth)
icon.alpha = 0
return icon
@ -670,7 +672,7 @@ class RecordingTimerView: UIView {
@objc
private func updateView() {
let recordingDuration = self.recordingDuration
Logger.verbose("recordingDuration: \(recordingDuration)")
Log.verbose("[RecordingTimerView] recordingDuration: \(recordingDuration)")
let durationDate = Date(timeIntervalSinceReferenceDate: recordingDuration)
label.text = timeFormatter.string(from: durationDate)
}

@ -4,7 +4,7 @@
import UIKit
import SessionUIKit
import SignalCoreKit
import SessionUtilitiesKit
public enum PhotoGridItemType {
case photo, animated, video
@ -82,25 +82,26 @@ public class PhotoGridViewCell: UICollectionViewCell {
self.contentView.addSubview(selectedView)
self.contentView.addSubview(selectedBadgeView)
imageView.autoPinEdgesToSuperviewEdges()
highlightedView.autoPinEdgesToSuperviewEdges()
selectedView.autoPinEdgesToSuperviewEdges()
imageView.pin(to: contentView)
highlightedView.pin(to: contentView)
selectedView.pin(to: contentView)
// Note assets were rendered to match exactly. We don't want to re-size with
// content mode lest they become less legible.
let kContentTypeBadgeSize = CGSize(width: 18, height: 12)
contentTypeBadgeView.autoPinEdge(toSuperviewEdge: .leading, withInset: 3)
contentTypeBadgeView.autoPinEdge(toSuperviewEdge: .bottom, withInset: 3)
contentTypeBadgeView.autoSetDimensions(to: kContentTypeBadgeSize)
selectedBadgeView.autoPinEdge(toSuperviewEdge: .trailing, withInset: Values.verySmallSpacing)
selectedBadgeView.autoPinEdge(toSuperviewEdge: .bottom, withInset: Values.verySmallSpacing)
selectedBadgeView.autoSetDimensions(to: kSelectedBadgeSize)
contentTypeBadgeView.pin(.leading, to: .leading, of: contentView, withInset: 3)
contentTypeBadgeView.pin(.bottom, to: .bottom, of: contentView, withInset: -3)
contentTypeBadgeView.set(.width, to: 18)
contentTypeBadgeView.set(.height, to: 12)
selectedBadgeView.pin(.trailing, to: .trailing, of: contentView, withInset: -Values.verySmallSpacing)
selectedBadgeView.pin(.bottom, to: .bottom, of: contentView, withInset: -Values.verySmallSpacing)
selectedBadgeView.set(.width, to: kSelectedBadgeSize.width)
selectedBadgeView.set(.height, to: kSelectedBadgeSize.height)
}
@available(*, unavailable, message: "Unimplemented")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
fatalError("init(coder:) has not been implemented")
}
var image: UIImage? {
@ -127,7 +128,7 @@ public class PhotoGridViewCell: UICollectionViewCell {
guard currentItem === item else { return }
if image == nil {
Logger.debug("image == nil")
Log.debug("[PhotoGridViewCell] image == nil")
}
DispatchQueue.main.async {

@ -5,7 +5,6 @@ import Combine
import Photos
import CoreServices
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
protocol PhotoLibraryDelegate: AnyObject {
@ -138,7 +137,7 @@ class PhotoCollectionContents {
_ = imageManager.requestImage(for: asset, targetSize: thumbnailSize, contentMode: .aspectFill, options: nil, resultHandler: resultHandler)
}
private func requestImageDataSource(for asset: PHAsset) -> AnyPublisher<(dataSource: DataSource, dataUTI: String), Error> {
private func requestImageDataSource(for asset: PHAsset) -> AnyPublisher<(dataSource: (any DataSource), dataUTI: String), Error> {
return Deferred {
Future { [weak self] resolver in
@ -157,7 +156,7 @@ class PhotoCollectionContents {
return
}
guard let dataSource = DataSourceValue.dataSource(with: imageData, utiType: dataUTI) else {
guard let dataSource = DataSourceValue(data: imageData, utiType: dataUTI) else {
resolver(Result.failure(PhotoLibraryError.assertionError(description: "dataSource was unexpectedly nil")))
return
}
@ -169,7 +168,7 @@ class PhotoCollectionContents {
.eraseToAnyPublisher()
}
private func requestVideoDataSource(for asset: PHAsset) -> AnyPublisher<(dataSource: DataSource, dataUTI: String), Error> {
private func requestVideoDataSource(for asset: PHAsset) -> AnyPublisher<(dataSource: (any DataSource), dataUTI: String), Error> {
return Deferred {
Future { [weak self] resolver in
@ -186,17 +185,17 @@ class PhotoCollectionContents {
exportSession.outputFileType = AVFileType.mp4
exportSession.metadataItemFilter = AVMetadataItemFilter.forSharing()
let exportPath = OWSFileSystem.temporaryFilePath(withFileExtension: "mp4")
let exportPath = FileSystem.temporaryFilePath(fileExtension: "mp4")
let exportURL = URL(fileURLWithPath: exportPath)
exportSession.outputURL = exportURL
Logger.debug("starting video export")
Log.debug("[PhotoLibrary] Starting video export")
exportSession.exportAsynchronously { [weak exportSession] in
Logger.debug("Completed video export")
Log.debug("[PhotoLibrary] Completed video export")
guard
exportSession?.status == .completed,
let dataSource = DataSourcePath.dataSource(with: exportURL, shouldDeleteOnDeallocation: true)
let dataSource = DataSourcePath(fileUrl: exportURL, shouldDeleteOnDeinit: true)
else {
resolver(Result.failure(PhotoLibraryError.assertionError(description: "Failed to build data source for exported video URL")))
return
@ -316,8 +315,8 @@ class PhotoLibrary: NSObject, PHPhotoLibraryChangeObserver {
}
guard let photoCollection = fetchedCollection else {
Logger.info("Using empty photo collection.")
assert(PHPhotoLibrary.authorizationStatus() == .denied)
Log.debug("[PhotoLibrary] Using empty photo collection.")
Log.assert(PHPhotoLibrary.authorizationStatus() == .denied)
return PhotoCollection.empty
}
@ -338,7 +337,7 @@ class PhotoLibrary: NSObject, PHPhotoLibraryChangeObserver {
collectionIds.insert(collectionId)
guard let assetCollection = collection as? PHAssetCollection else {
owsFailDebug("Asset collection has unexpected type: \(type(of: collection))")
Log.error("[PhotoLibrary] Asset collection has unexpected type: \(type(of: collection))")
return
}
let photoCollection = PhotoCollection(id: collectionId, collection: assetCollection)

@ -4,7 +4,6 @@ import UIKit
import Combine
import Photos
import SignalUtilitiesKit
import SignalCoreKit
import SessionUIKit
import SessionUtilitiesKit
@ -17,13 +16,15 @@ class SendMediaNavigationController: UINavigationController {
// on iPhone5, 6, 6+, X, layouts.
static let bottomButtonsCenterOffset: CGFloat = -50
private let dependencies: Dependencies
private let threadId: String
private let threadVariant: SessionThread.Variant
private var disposables: Set<AnyCancellable> = Set()
// MARK: - Initialization
init(threadId: String, threadVariant: SessionThread.Variant) {
init(threadId: String, threadVariant: SessionThread.Variant, using dependencies: Dependencies) {
self.dependencies = dependencies
self.threadId = threadId
self.threadVariant = threadVariant
@ -44,19 +45,19 @@ class SendMediaNavigationController: UINavigationController {
let bottomButtonsCenterOffset = SendMediaNavigationController.bottomButtonsCenterOffset
view.addSubview(batchModeButton)
batchModeButton.setCompressionResistanceHigh()
batchModeButton.setCompressionResistance(to: .required)
batchModeButton.centerYAnchor.constraint(equalTo: view.layoutMarginsGuide.bottomAnchor, constant: bottomButtonsCenterOffset).isActive = true
batchModeButton.centerXAnchor
.constraint(equalTo: view.layoutMarginsGuide.trailingAnchor, constant: -20)
.isActive = true
view.addSubview(doneButton)
doneButton.setCompressionResistanceHigh()
doneButton.setCompressionResistance(to: .required)
doneButton.centerYAnchor.constraint(equalTo: view.layoutMarginsGuide.bottomAnchor, constant: bottomButtonsCenterOffset).isActive = true
doneButton.autoPinEdge(toSuperviewMargin: .trailing)
doneButton.pin(.trailing, toMargin: .trailing, of: view)
view.addSubview(cameraModeButton)
cameraModeButton.setCompressionResistanceHigh()
cameraModeButton.setCompressionResistance(to: .required)
cameraModeButton.centerYAnchor
.constraint(equalTo: view.layoutMarginsGuide.bottomAnchor, constant: bottomButtonsCenterOffset)
.isActive = true
@ -65,7 +66,7 @@ class SendMediaNavigationController: UINavigationController {
.isActive = true
view.addSubview(mediaLibraryModeButton)
mediaLibraryModeButton.setCompressionResistanceHigh()
mediaLibraryModeButton.setCompressionResistance(to: .required)
mediaLibraryModeButton.centerYAnchor.constraint(equalTo: view.layoutMarginsGuide.bottomAnchor, constant: bottomButtonsCenterOffset).isActive = true
mediaLibraryModeButton.centerXAnchor
.constraint(equalTo: view.layoutMarginsGuide.leadingAnchor, constant: 20)
@ -76,15 +77,15 @@ class SendMediaNavigationController: UINavigationController {
public weak var sendMediaNavDelegate: SendMediaNavDelegate?
public class func showingCameraFirst(threadId: String, threadVariant: SessionThread.Variant) -> SendMediaNavigationController {
let navController = SendMediaNavigationController(threadId: threadId, threadVariant: threadVariant)
public class func showingCameraFirst(threadId: String, threadVariant: SessionThread.Variant, using dependencies: Dependencies) -> SendMediaNavigationController {
let navController = SendMediaNavigationController(threadId: threadId, threadVariant: threadVariant, using: dependencies)
navController.viewControllers = [navController.captureViewController]
return navController
}
public class func showingMediaLibraryFirst(threadId: String, threadVariant: SessionThread.Variant) -> SendMediaNavigationController {
let navController = SendMediaNavigationController(threadId: threadId, threadVariant: threadVariant)
public class func showingMediaLibraryFirst(threadId: String, threadVariant: SessionThread.Variant, using dependencies: Dependencies) -> SendMediaNavigationController {
let navController = SendMediaNavigationController(threadId: threadId, threadVariant: threadVariant, using: dependencies)
navController.viewControllers = [navController.mediaLibraryViewController]
return navController
@ -123,7 +124,7 @@ class SendMediaNavigationController: UINavigationController {
mediaLibraryModeButton.isHidden = false
default:
owsFailDebug("unexpected topViewController: \(topViewController)")
Log.error("[SendMediaNavigationController] unexpected topViewController: \(topViewController)")
}
doneButton.updateCount()
@ -224,22 +225,27 @@ class SendMediaNavigationController: UINavigationController {
return vc
}()
private func pushApprovalViewController() {
private func pushApprovalViewController() -> Bool {
guard let sendMediaNavDelegate = self.sendMediaNavDelegate else {
owsFailDebug("sendMediaNavDelegate was unexpectedly nil")
return
Log.error("[SendMediaNavigationController] sendMediaNavDelegate was unexpectedly nil")
return false
}
let approvalViewController = AttachmentApprovalViewController(
mode: .sharedNavigation,
threadId: self.threadId,
threadVariant: self.threadVariant,
attachments: self.attachments
)
guard
let approvalViewController = AttachmentApprovalViewController(
mode: .sharedNavigation,
threadId: self.threadId,
threadVariant: self.threadVariant,
attachments: self.attachments,
using: dependencies
)
else { return false }
approvalViewController.approvalDelegate = self
approvalViewController.messageText = sendMediaNavDelegate.sendMediaNavInitialMessageText(self)
pushViewController(approvalViewController, animated: true)
return true
}
private func didRequestExit() {
@ -299,7 +305,17 @@ extension SendMediaNavigationController: PhotoCaptureViewControllerDelegate {
updateButtons(topViewController: photoCaptureViewController)
}
else {
pushApprovalViewController()
// Try push the approval controller, otherwise show an error
if !pushApprovalViewController() {
let modal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: "IMAGE_PICKER_FAILED_TO_PROCESS_ATTACHMENTS".localized(),
cancelTitle: "BUTTON_OK".localized(),
cancelStyle: .alert_text
)
)
present(modal, animated: true)
}
}
}
@ -340,7 +356,7 @@ extension SendMediaNavigationController: ImagePickerGridControllerDelegate {
switch result {
case .finished: break
case .failure(let error):
Logger.error("failed to prepare attachments. error: \(error)")
Log.error("[SendMediaNavigationController] Failed to prepare attachments. error: \(error)")
modal.dismiss { [weak self] in
let modal: ConfirmationModal = ConfirmationModal(
targetView: self?.view,
@ -355,10 +371,21 @@ extension SendMediaNavigationController: ImagePickerGridControllerDelegate {
}
},
receiveValue: { attachments in
Logger.debug("built all attachments")
Log.debug("[SendMediaNavigationController] Built all attachments")
modal.dismiss {
self?.attachmentDraftCollection.selectedFromPicker(attachments: attachments)
self?.pushApprovalViewController()
guard self?.pushApprovalViewController() == true else {
let modal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info(
title: "IMAGE_PICKER_FAILED_TO_PROCESS_ATTACHMENTS".localized(),
cancelTitle: "BUTTON_OK".localized(),
cancelStyle: .alert_text
)
)
self?.present(modal, animated: true)
return
}
}
}
)
@ -418,7 +445,7 @@ extension SendMediaNavigationController: AttachmentApprovalViewControllerDelegat
func attachmentApproval(_ attachmentApproval: AttachmentApprovalViewController, didRemoveAttachment attachment: SignalAttachment) {
guard let removedDraft = attachmentDraftCollection.attachmentDrafts.first(where: { $0.attachment == attachment}) else {
owsFailDebug("removedDraft was unexpectedly nil")
Log.error("[SendMediaNavigationController] removedDraft was unexpectedly nil")
return
}
@ -437,16 +464,14 @@ extension SendMediaNavigationController: AttachmentApprovalViewControllerDelegat
didApproveAttachments attachments: [SignalAttachment],
forThreadId threadId: String,
threadVariant: SessionThread.Variant,
messageText: String?,
using dependencies: Dependencies
messageText: String?
) {
sendMediaNavDelegate?.sendMediaNav(
self,
didApproveAttachments: attachments,
forThreadId: threadId,
threadVariant: threadVariant,
messageText: messageText,
using: dependencies
messageText: messageText
)
}
@ -665,10 +690,8 @@ private class DoneButton: UIView {
badgeLabel.pin(to: badge, withInset: 4)
// Constrain to be a pill that is at least a circle, and maybe wider.
badgeLabel.autoPin(toAspectRatio: 1.0, relation: .greaterThanOrEqual)
NSLayoutConstraint.autoSetPriority(.defaultLow) {
badgeLabel.autoPinToSquareAspectRatio()
}
badgeLabel.set(.width, greaterThanOrEqualTo: .height, of: badgeLabel, multiplier: 1)
badgeLabel.set(.width, to: .height, of: badgeLabel, multiplier: 1).setting(priority: .defaultLow)
let stackView = UIStackView(arrangedSubviews: [badge, chevron])
stackView.axis = .horizontal
@ -704,7 +727,7 @@ private class DoneButton: UIView {
themeTintColor: ThemeValue
) {
UIView.animate(withDuration: 0.25) {
self.container.transform = CGAffineTransform.identity.scale(scale)
self.container.transform = CGAffineTransform.identity.scaledBy(x: scale, y: scale)
self.badgeLabel.themeTextColor = themeTintColor
self.badge.themeBackgroundColor = themeBadgeBackgroundColor
self.container.themeBackgroundColor = themeBackgroundColor
@ -784,7 +807,7 @@ private class DoneButton: UIView {
protocol SendMediaNavDelegate: AnyObject {
func sendMediaNavDidCancel(_ sendMediaNavigationController: SendMediaNavigationController?)
func sendMediaNav(_ sendMediaNavigationController: SendMediaNavigationController, didApproveAttachments attachments: [SignalAttachment], forThreadId threadId: String, threadVariant: SessionThread.Variant, messageText: String?, using dependencies: Dependencies)
func sendMediaNav(_ sendMediaNavigationController: SendMediaNavigationController, didApproveAttachments attachments: [SignalAttachment], forThreadId threadId: String, threadVariant: SessionThread.Variant, messageText: String?)
func sendMediaNavInitialMessageText(_ sendMediaNavigationController: SendMediaNavigationController) -> String?
func sendMediaNav(_ sendMediaNavigationController: SendMediaNavigationController, didChangeMessageText newMessageText: String?)

@ -8,7 +8,6 @@ import SessionUIKit
import SessionMessagingKit
import SessionUtilitiesKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionSnodeKit
@UIApplicationMain
@ -38,8 +37,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
Singleton.setup(appContext: MainAppContext())
verifyDBKeysAvailableBeforeBackgroundLaunch()
Cryptography.seedRandom()
AppVersion.sharedInstance()
_ = AppVersion.shared
AppEnvironment.shared.pushRegistrationManager.createVoipRegistryIfNecessary()
// Prevent the device from sleeping during database view async registration
@ -210,9 +208,6 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
// but answers the call on another device
stopPollers(shouldStopUserPoller: !self.hasCallOngoing())
// FIXME: Move this to be initialised as part of `AppDelegate`
let dependencies: Dependencies = Dependencies()
// Stop all jobs except for message sending and when completed suspend the database
JobRunner.stopAndClearPendingJobs(exceptForVariant: .messageSend, using: dependencies) { neededBackgroundProcessing in
if !self.hasCallOngoing() && (!neededBackgroundProcessing || Singleton.hasAppContext && Singleton.appContext.isInBackground) {
@ -404,7 +399,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
DeviceSleepManager.sharedInstance.removeBlock(blockObject: self)
/// App launch hasn't really completed until the main screen is loaded so wait until then to register it
AppVersion.sharedInstance().mainAppLaunchDidComplete()
AppVersion.shared.mainAppLaunchDidComplete()
/// App won't be ready for extensions and no need to enqueue a config sync unless we successfully completed startup
Storage.shared.writeAsync { db in
@ -417,7 +412,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
db[.isReadyForAppExtensions] = true
if Identity.userCompletedRequiredOnboarding(db) {
let appVersion: AppVersion = AppVersion.sharedInstance()
let appVersion: AppVersion = AppVersion.shared
// If the device needs to sync config or the user updated to a new version
if
@ -672,8 +667,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
}
case .completed:
DispatchQueue.main.async {
let viewController: HomeVC = HomeVC()
DispatchQueue.main.async { [dependencies] in
let viewController: HomeVC = HomeVC(using: dependencies)
/// We want to start observing the changes for the 'HomeVC' and want to wait until we actually get data back before we
/// continue as we don't want to show a blank home screen
@ -763,8 +758,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
/// the notification or choosing a UNNotificationAction. The delegate must be set before the application returns from
/// application:didFinishLaunchingWithOptions:.
func userNotificationCenter(_ center: UNUserNotificationCenter, didReceive response: UNNotificationResponse, withCompletionHandler completionHandler: @escaping () -> Void) {
Singleton.appReadiness.runNowOrWhenAppDidBecomeReady {
AppEnvironment.shared.userNotificationActionHandler.handleNotificationResponse(response, completionHandler: completionHandler)
Singleton.appReadiness.runNowOrWhenAppDidBecomeReady { [dependencies] in
AppEnvironment.shared.userNotificationActionHandler.handleNotificationResponse(response, completionHandler: completionHandler, using: dependencies)
}
}

@ -5,7 +5,6 @@
import Foundation
import SessionUtilitiesKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionMessagingKit
public class AppEnvironment {
@ -16,7 +15,7 @@ public class AppEnvironment {
get { return _shared }
set {
guard SNUtilitiesKit.isRunningTests else {
owsFailDebug("Can only switch environments in tests.")
Log.error("[AppEnvironment] Can only switch environments in tests.")
return
}

@ -1,7 +1,6 @@
// Copyright © 2023 Rangeproof Pty Ltd. All rights reserved.
import UIKit
import SignalCoreKit
import SessionUtilitiesKit
final class MainAppContext: AppContext {
@ -10,7 +9,19 @@ final class MainAppContext: AppContext {
let appLaunchTime = Date()
let isMainApp: Bool = true
var isMainAppAndActive: Bool { UIApplication.shared.applicationState == .active }
var isMainAppAndActive: Bool {
var result: Bool = false
switch Thread.isMainThread {
case true: result = (UIApplication.shared.applicationState == .active)
case false:
DispatchQueue.main.sync {
result = (UIApplication.shared.applicationState == .active)
}
}
return result
}
var frontmostViewController: UIViewController? { UIApplication.shared.frontmostViewControllerIgnoringAlerts }
var mainWindow: UIWindow?
@ -71,7 +82,7 @@ final class MainAppContext: AppContext {
// MARK: - Notifications
@objc private func applicationWillEnterForeground(notification: NSNotification) {
AssertIsOnMainThread()
Log.assertOnMainThread()
self.reportedApplicationState = .inactive
@ -82,7 +93,7 @@ final class MainAppContext: AppContext {
}
@objc private func applicationDidEnterBackground(notification: NSNotification) {
AssertIsOnMainThread()
Log.assertOnMainThread()
self.reportedApplicationState = .background
@ -93,7 +104,7 @@ final class MainAppContext: AppContext {
}
@objc private func applicationWillResignActive(notification: NSNotification) {
AssertIsOnMainThread()
Log.assertOnMainThread()
self.reportedApplicationState = .inactive
@ -104,7 +115,7 @@ final class MainAppContext: AppContext {
}
@objc private func applicationDidBecomeActive(notification: NSNotification) {
AssertIsOnMainThread()
Log.assertOnMainThread()
self.reportedApplicationState = .active
@ -197,10 +208,9 @@ final class MainAppContext: AppContext {
else { return }
}
if (!OWSFileSystem.deleteFile(filePath)) {
// This can happen if the app launches before the phone is unlocked.
// Clean up will occur when app becomes active.
}
// This can happen if the app launches before the phone is unlocked.
// Clean up will occur when app becomes active.
try? FileSystem.deleteFile(at: filePath)
}
}
}

@ -1,9 +1,10 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
//
// stringlint:disable
import Foundation
import SessionUtilitiesKit
import SessionMessagingKit
import SignalCoreKit
import SessionUIKit
public struct SessionApp {
@ -107,10 +108,15 @@ public struct SessionApp {
// MARK: - Functions
public static func resetAppData(onReset: (() -> ())? = nil) {
LibSession.clearMemoryState()
public static func resetAppData(
using dependencies: Dependencies,
onReset: (() -> ())? = nil
) {
LibSession.clearLoggers()
LibSession.clearMemoryState(using: dependencies)
LibSession.clearSnodeCache()
LibSession.suspendNetworkAccess()
PushNotificationAPI.resetKeys()
Storage.resetAllStorage()
ProfileManager.resetProfileStorage()
Attachment.resetAttachmentStorage()
@ -123,15 +129,15 @@ public struct SessionApp {
exit(0)
}
public static func showHomeView() {
public static func showHomeView(using dependencies: Dependencies) {
guard Thread.isMainThread else {
DispatchQueue.main.async {
self.showHomeView()
self.showHomeView(using: dependencies)
}
return
}
let homeViewController: HomeVC = HomeVC()
let homeViewController: HomeVC = HomeVC(using: dependencies)
let navController: UINavigationController = StyledNavigationController(rootViewController: homeViewController)
(UIApplication.shared.delegate as? AppDelegate)?.window?.rootViewController = navController
}

@ -5,7 +5,6 @@ import Combine
import GRDB
import SessionMessagingKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
import SessionSnodeKit
@ -453,7 +452,7 @@ public class NotificationPresenter: NotificationsProtocol {
guard Storage.shared[.playNotificationSoundInForeground] else { return false }
let nowMs: UInt64 = UInt64(floor(Date().timeIntervalSince1970 * 1000))
let recentThreshold = nowMs - UInt64(kAudioNotificationsThrottleInterval * Double(kSecondInMs))
let recentThreshold = nowMs - UInt64(kAudioNotificationsThrottleInterval * 1000)
let recentNotifications = mostRecentNotifications.wrappedValue.filter { $0 > recentThreshold }
@ -558,12 +557,12 @@ class NotificationActionHandler {
.eraseToAnyPublisher()
}
func showThread(userInfo: [AnyHashable: Any]) -> AnyPublisher<Void, Never> {
func showThread(userInfo: [AnyHashable: Any], using dependencies: Dependencies) -> AnyPublisher<Void, Never> {
guard
let threadId = userInfo[AppNotificationUserInfoKey.threadId] as? String,
let threadVariantRaw = userInfo[AppNotificationUserInfoKey.threadVariantRaw] as? Int,
let threadVariant: SessionThread.Variant = SessionThread.Variant(rawValue: threadVariantRaw)
else { return showHomeVC() }
else { return showHomeVC(using: dependencies) }
// If this happens when the the app is not, visible we skip the animation so the thread
// can be visible to the user immediately upon opening the app, rather than having to watch
@ -579,8 +578,8 @@ class NotificationActionHandler {
.eraseToAnyPublisher()
}
func showHomeVC() -> AnyPublisher<Void, Never> {
SessionApp.showHomeView()
func showHomeVC(using dependencies: Dependencies) -> AnyPublisher<Void, Never> {
SessionApp.showHomeView(using: dependencies)
return Just(())
.eraseToAnyPublisher()
}
@ -625,7 +624,7 @@ enum NotificationError: Error {
extension NotificationError {
static func failDebug(_ description: String) -> NotificationError {
owsFailDebug(description)
Log.error("[NotificationActionHandler] Failed with error: \(description)")
return NotificationError.assertionError(description: description)
}
}

@ -6,7 +6,6 @@ import PushKit
import GRDB
import SessionMessagingKit
import SignalUtilitiesKit
import SignalCoreKit
import SessionUtilitiesKit
public enum PushRegistrationError: Error {
@ -52,8 +51,6 @@ public enum PushRegistrationError: Error {
// MARK: - Public interface
public func requestPushTokens() -> AnyPublisher<(pushToken: String, voipToken: String), Error> {
Logger.info("")
return registerUserNotificationSettings()
.setFailureType(to: Error.self)
.tryFlatMap { _ -> AnyPublisher<(pushToken: String, voipToken: String), Error> in
@ -77,7 +74,7 @@ public enum PushRegistrationError: Error {
// Vanilla push token is obtained from the system via AppDelegate
public func didReceiveVanillaPushToken(_ tokenData: Data, using dependencies: Dependencies = Dependencies()) {
guard let vanillaTokenResolver = self.vanillaTokenResolver else {
owsFailDebug("publisher completion in \(#function) unexpectedly nil")
Log.error("[PushRegistrationManager] Publisher completion in \(#function) unexpectedly nil")
return
}
@ -89,7 +86,7 @@ public enum PushRegistrationError: Error {
// Vanilla push token is obtained from the system via AppDelegate
public func didFailToReceiveVanillaPushToken(error: Error, using dependencies: Dependencies = Dependencies()) {
guard let vanillaTokenResolver = self.vanillaTokenResolver else {
owsFailDebug("publisher completion in \(#function) unexpectedly nil")
Log.error("[PushRegistrationManager] Publisher completion in \(#function) unexpectedly nil")
return
}
@ -232,7 +229,7 @@ public enum PushRegistrationError: Error {
createVoipRegistryIfNecessary()
guard let voipRegistry: PKPushRegistry = self.voipRegistry else {
owsFailDebug("failed to initialize voipRegistry")
Log.error("[PushRegistrationManager] Failed to initialize voipRegistry")
return Fail(
error: PushRegistrationError.assertionError(description: "failed to initialize voipRegistry")
).eraseToAnyPublisher()
@ -241,7 +238,7 @@ public enum PushRegistrationError: Error {
// If we've already completed registering for a voip token, resolve it immediately,
// rather than waiting for the delegate method to be called.
if let voipTokenData: Data = voipRegistry.pushToken(for: .voIP) {
Logger.info("using pre-registered voIP token")
Log.info("[PushRegistrationManager] Using pre-registered voIP token")
return Just(voipTokenData.toHexString())
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
@ -256,7 +253,7 @@ public enum PushRegistrationError: Error {
return publisher
.map { voipTokenData -> String? in
Logger.info("successfully registered for voip push notifications")
Log.info("[PushRegistrationManager] Successfully registered for voip push notifications")
return voipTokenData?.toHexString()
}
.handleEvents(
@ -271,18 +268,17 @@ public enum PushRegistrationError: Error {
// MARK: - PKPushRegistryDelegate
public func pushRegistry(_ registry: PKPushRegistry, didUpdate pushCredentials: PKPushCredentials, for type: PKPushType) {
Logger.info("")
owsAssertDebug(type == .voIP)
owsAssertDebug(pushCredentials.type == .voIP)
Log.assert(type == .voIP)
Log.assert(pushCredentials.type == .voIP)
voipTokenResolver?(Result.success(pushCredentials.token))
}
// NOTE: This function MUST report an incoming call.
public func pushRegistry(_ registry: PKPushRegistry, didReceiveIncomingPushWith payload: PKPushPayload, for type: PKPushType) {
SNLog("[Calls] Receive new voip notification.")
owsAssertDebug(Singleton.hasAppContext && Singleton.appContext.isMainApp)
owsAssertDebug(type == .voIP)
Log.info("[PushRegistrationManager] Receive new voip notification.")
Log.assert(Singleton.hasAppContext && Singleton.appContext.isMainApp)
Log.assert(type == .voIP)
let payload = payload.dictionaryPayload
guard

@ -3,11 +3,9 @@
import Foundation
import Combine
import GRDB
import SignalCoreKit
import SessionSnodeKit
import SessionMessagingKit
import SessionUtilitiesKit
import SignalCoreKit
public enum SyncPushTokensJob: JobExecutor {
public static let maxFailureCount: Int = -1
@ -174,7 +172,7 @@ public enum SyncPushTokensJob: JobExecutor {
Log.error("[SyncPushTokensJob] Failed to register due to error: \(error)")
case .finished:
Log.debug("Recording push tokens locally. pushToken: \(redact(pushToken)), voipToken: \(redact(voipToken))")
Log.debug("[SyncPushTokensJob] Recording push tokens locally. pushToken: \(redact(pushToken)), voipToken: \(redact(voipToken))")
Log.info("[SyncPushTokensJob] Completed")
dependencies.standardUserDefaults[.lastPushNotificationSync] = dependencies.dateNow

@ -4,7 +4,6 @@ import Foundation
import Combine
import UserNotifications
import SessionMessagingKit
import SignalCoreKit
import SignalUtilitiesKit
import SessionUtilitiesKit
@ -82,10 +81,10 @@ extension UserNotificationPresenterAdaptee: NotificationPresenterAdaptee {
if granted {}
else if let error: Error = error {
Logger.error("failed with error: \(error)")
Log.error("[UserNotificationPresenterAdaptee] Failed with error: \(error)")
}
else {
Logger.error("failed without error.")
Log.error("[UserNotificationPresenterAdaptee] Failed without error.")
}
// Note that the promise is fulfilled regardless of if notification permssions were
@ -134,12 +133,11 @@ extension UserNotificationPresenterAdaptee: NotificationPresenterAdaptee {
var trigger: UNNotificationTrigger?
if shouldPresentNotification {
if let displayableTitle = title?.filterForDisplay {
if let displayableTitle = title?.filteredForDisplay {
content.title = displayableTitle
}
if let displayableBody = body.filterForDisplay {
content.body = displayableBody
}
content.body = body.filteredForDisplay
if shouldGroupNotification {
trigger = UNTimeIntervalNotificationTrigger(
@ -171,7 +169,7 @@ extension UserNotificationPresenterAdaptee: NotificationPresenterAdaptee {
}
else {
// Play sound and vibrate, but without a `body` no banner will show.
Logger.debug("supressing notification body")
Log.debug("[UserNotificationPresenterAdaptee] Supressing notification body")
}
let request = UNNotificationRequest(
@ -180,7 +178,7 @@ extension UserNotificationPresenterAdaptee: NotificationPresenterAdaptee {
trigger: trigger
)
Logger.debug("presenting notification with identifier: \(notificationIdentifier)")
Log.debug("[UserNotificationPresenterAdaptee] Presenting notification with identifier: \(notificationIdentifier)")
if isReplacingNotification { cancelNotifications(identifiers: [notificationIdentifier]) }
@ -233,7 +231,7 @@ extension UserNotificationPresenterAdaptee: NotificationPresenterAdaptee {
}
guard let notificationThreadId = userInfo[AppNotificationUserInfoKey.threadId] as? String else {
owsFailDebug("threadId was unexpectedly nil")
Log.error("[UserNotificationPresenterAdaptee] threadId was unexpectedly nil")
return true
}
@ -253,10 +251,9 @@ public class UserNotificationActionHandler: NSObject {
return NotificationActionHandler.shared
}
@objc
func handleNotificationResponse( _ response: UNNotificationResponse, completionHandler: @escaping () -> Void) {
AssertIsOnMainThread()
handleNotificationResponse(response)
func handleNotificationResponse( _ response: UNNotificationResponse, completionHandler: @escaping () -> Void, using dependencies: Dependencies) {
Log.assertOnMainThread()
handleNotificationResponse(response, using: dependencies)
.subscribe(on: DispatchQueue.global(qos: .userInitiated))
.receive(on: DispatchQueue.main)
.sinkUntilComplete(
@ -272,8 +269,8 @@ public class UserNotificationActionHandler: NSObject {
)
}
func handleNotificationResponse( _ response: UNNotificationResponse) -> AnyPublisher<Void, Error> {
AssertIsOnMainThread()
func handleNotificationResponse( _ response: UNNotificationResponse, using dependencies: Dependencies) -> AnyPublisher<Void, Error> {
Log.assertOnMainThread()
assert(Singleton.appReadiness.isAppReady)
let userInfo: [AnyHashable: Any] = response.notification.request.content.userInfo
@ -282,7 +279,7 @@ public class UserNotificationActionHandler: NSObject {
switch response.actionIdentifier {
case UNNotificationDefaultActionIdentifier:
Log.debug("Notification response: default action")
return actionHandler.showThread(userInfo: userInfo)
return actionHandler.showThread(userInfo: userInfo, using: dependencies)
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
@ -316,7 +313,7 @@ public class UserNotificationActionHandler: NSObject {
return actionHandler.reply(userInfo: userInfo, replyText: textInputResponse.userText, applicationState: applicationState)
case .showThread:
return actionHandler.showThread(userInfo: userInfo)
return actionHandler.showThread(userInfo: userInfo, using: dependencies)
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
}

@ -125,7 +125,7 @@ struct DisplayNameScreen: View {
guard self.flow == .register else {
self.flow.completeRegistration()
let homeVC: HomeVC = HomeVC(flow: self.flow)
let homeVC: HomeVC = HomeVC(flow: self.flow, using: dependencies)
self.host.controller?.navigationController?.setViewControllers([ homeVC ], animated: true)
return

@ -1,7 +1,6 @@
// Copyright © 2023 Rangeproof Pty Ltd. All rights reserved.
import SwiftUI
import Sodium
import SessionUIKit
import SignalUtilitiesKit
import SessionUtilitiesKit

@ -111,10 +111,10 @@ struct LoadingScreen: View {
withAnimation(.linear(duration: 0.3)) {
self.percentage = 1
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [dependencies] in
self.flow.completeRegistration()
let homeVC: HomeVC = HomeVC(flow: self.flow)
let homeVC: HomeVC = HomeVC(flow: self.flow, using: dependencies)
self.host.controller?.navigationController?.setViewControllers([ homeVC ], animated: true)
}

@ -2,7 +2,6 @@
import Foundation
import Combine
import Sodium
import GRDB
import SessionUtilitiesKit
import SessionMessagingKit
@ -103,7 +102,7 @@ enum Onboarding {
/// account (eg. returning from the PN setting screen to the seed entry screen when linking a device)
func unregister(using dependencies: Dependencies) {
// Clear the in-memory state from LibSession
LibSession.clearMemoryState()
LibSession.clearMemoryState(using: dependencies)
// Clear any data which gets set during Onboarding
Storage.shared.write { db in

@ -160,7 +160,7 @@ struct PNModeScreen: View {
private func finishRegister() {
self.flow.completeRegistration()
let homeVC: HomeVC = HomeVC(flow: self.flow)
let homeVC: HomeVC = HomeVC(flow: self.flow, using: dependencies)
self.host.controller?.navigationController?.setViewControllers([ homeVC ], animated: true)
return
}

@ -364,13 +364,16 @@ private final class LineView: UIView {
private func expandDot() {
UIView.animate(withDuration: 0.5) { [weak self] in
self?.dotView.transform = CGAffineTransform.scale(PathVC.expandedDotSize / PathVC.dotSize)
self?.dotView.transform = CGAffineTransform(
scaleX: PathVC.expandedDotSize / PathVC.dotSize,
y: PathVC.expandedDotSize / PathVC.dotSize
)
}
}
private func collapseDot() {
UIView.animate(withDuration: 0.5) { [weak self] in
self?.dotView.transform = CGAffineTransform.scale(1)
self?.dotView.transform = CGAffineTransform(scaleX: 1, y: 1)
}
}

@ -268,8 +268,8 @@ final class AppearanceViewController: BaseVC {
nightModeStackView.pin(.leading, to: .leading, of: contentView)
nightModeStackView.set(.width, to: .width, of: contentView)
nightModeToggleLabel.setContentHuggingVerticalHigh()
nightModeToggleLabel.setCompressionResistanceVerticalHigh()
nightModeToggleLabel.setContentHugging(.vertical, to: .required)
nightModeToggleLabel.setCompressionResistance(.vertical, to: .required)
nightModeToggleLabel.center(.vertical, in: nightModeToggleView)
nightModeToggleLabel.pin(.leading, to: .leading, of: nightModeToggleView, withInset: Values.largeSpacing)

@ -7,7 +7,6 @@ import DifferenceKit
import SessionUIKit
import SessionMessagingKit
import SessionUtilitiesKit
import SignalCoreKit
class HelpViewModel: SessionTableViewModel, NavigatableStateHolder, ObservableTableSource {
typealias TableItem = Section

@ -38,8 +38,8 @@ class ImagePickerHandler: NSObject, UIImagePickerControllerDelegate & UINavigati
guard
let resourceValues: URLResourceValues = (try? imageUrl.resourceValues(forKeys: [.typeIdentifierKey])),
let type: Any = resourceValues.allValues.first?.value,
let typeString: String = type as? String,
MIMETypeUtil.supportedAnimatedImageUTITypes().contains(typeString)
let utiTypeString: String = type as? String,
MimeTypeUtil.isAnimated(utiType: utiTypeString)
else {
let viewController: CropScaleImageViewController = CropScaleImageViewController(
srcImage: rawAvatar,

@ -289,7 +289,7 @@ final class NukeDataModal: Modal {
// profile storage
let wasUnlinked: Bool = UserDefaults.standard[.wasUnlinked]
SessionApp.resetAppData {
SessionApp.resetAppData(using: dependencies) {
// Resetting the data clears the old user defaults. We need to restore the unlink default.
UserDefaults.standard[.wasUnlinked] = wasUnlinked
}

@ -211,7 +211,10 @@ class PrivacySettingsViewModel: SessionTableViewModel, NavigationItemSource, Nav
// Use a transform scale to reduce the size of the typing indicator to the
// desired size (this way the animation remains intact)
let cell: TypingIndicatorCell = TypingIndicatorCell()
cell.transform = CGAffineTransform.scale(targetHeight / cell.bounds.height)
cell.transform = CGAffineTransform(
scaleX: targetHeight / cell.bounds.height,
y: targetHeight / cell.bounds.height
)
cell.typingIndicatorView.startAnimation()
result.addSubview(cell)

@ -3,7 +3,6 @@
import Foundation
import Combine
import GRDB
import YYImage
import DifferenceKit
import SessionUIKit
import SessionMessagingKit

@ -59,17 +59,21 @@ public class CaptionContainerView: UIView {
override init(frame: CGRect) {
super.init(frame: frame)
setContentHuggingHigh()
setCompressionResistanceHigh()
setContentHugging(to: .required)
setCompressionResistance(to: .required)
addSubview(currentCaptionView)
currentCaptionView.autoPinEdgesToSuperviewEdges(with: .zero, excludingEdge: .top)
currentCaptionView.autoPinEdge(toSuperviewEdge: .top, withInset: 0, relation: .greaterThanOrEqual)
currentCaptionView.pin(.top, greaterThanOrEqualTo: .top, of: self)
currentCaptionView.pin(.leading, to: .leading, of: self)
currentCaptionView.pin(.trailing, to: .trailing, of: self)
currentCaptionView.pin(.bottom, to: .bottom, of: self)
pendingCaptionView.alpha = 0
addSubview(pendingCaptionView)
pendingCaptionView.autoPinEdgesToSuperviewEdges(with: .zero, excludingEdge: .top)
pendingCaptionView.autoPinEdge(toSuperviewEdge: .top, withInset: 0, relation: .greaterThanOrEqual)
pendingCaptionView.pin(.top, greaterThanOrEqualTo: .top, of: self)
pendingCaptionView.pin(.leading, to: .leading, of: self)
pendingCaptionView.pin(.trailing, to: .trailing, of: self)
pendingCaptionView.pin(.bottom, to: .bottom, of: self)
}
public required init?(coder aDecoder: NSCoder) {
@ -126,11 +130,13 @@ private class CaptionView: UIView {
super.init(frame: frame)
addSubview(textView)
textView.autoPinEdgesToSuperviewMargins()
textView.pin(toMarginsOf: self)
addSubview(scrollFadeView)
scrollFadeView.autoPinEdgesToSuperviewEdges(with: .zero, excludingEdge: .top)
scrollFadeView.autoSetDimension(.height, toSize: 20)
scrollFadeView.pin(.leading, to: .leading, of: self)
scrollFadeView.pin(.trailing, to: .trailing, of: self)
scrollFadeView.pin(.bottom, to: .bottom, of: self)
scrollFadeView.set(.height, to: 20)
}
required init?(coder aDecoder: NSCoder) {
@ -147,8 +153,7 @@ private class CaptionView: UIView {
// MARK: -
class CaptionTextView: UITextView {
var kMaxHeight: CGFloat = ScaleFromIPhone5(200)
var kMaxHeight: CGFloat = Values.scaleFromIPhone5(200)
override var text: String! {
didSet {

@ -202,14 +202,14 @@ public final class FullConversationCell: UITableViewCell, SwipeActionOptimisticC
// Unread count view
unreadCountView.addSubview(unreadCountLabel)
unreadCountLabel.setCompressionResistanceHigh()
unreadCountView.setCompressionResistance(to: .required)
unreadCountLabel.pin([ VerticalEdge.top, VerticalEdge.bottom ], to: unreadCountView)
unreadCountView.pin(.leading, to: .leading, of: unreadCountLabel, withInset: -4)
unreadCountView.pin(.trailing, to: .trailing, of: unreadCountLabel, withInset: 4)
// Has mention view
hasMentionView.addSubview(hasMentionLabel)
hasMentionLabel.setCompressionResistanceHigh()
hasMentionLabel.setCompressionResistance(to: .required)
hasMentionLabel.pin(to: hasMentionView)
// Label stack view

@ -94,24 +94,24 @@ public class LoadingViewController: UIViewController {
// Layout
self.logoView.autoCenterInSuperview()
self.logoView.autoSetDimension(.width, toSize: 64)
self.logoView.autoSetDimension(.height, toSize: 64)
self.logoView.center(in: self.view)
self.logoView.set(.width, to: 64)
self.logoView.set(.height, to: 64)
self.progressBar.set(.height, to: (self.progressBar.layer.cornerRadius * 2))
self.progressBar.set(.width, to: .width, of: self.view, multiplier: 0.5)
self.labelStack.pin(.top, to: .bottom, of: self.logoView, withInset: 40)
self.labelStack.pin(.left, to: .left, of: self.view)
self.labelStack.pin(.right, to: .right, of: self.view)
self.labelStack.setCompressionResistanceHigh()
self.labelStack.setContentHuggingHigh()
self.labelStack.setContentHugging(to: .required)
self.labelStack.setCompressionResistance(to: .required)
self.bottomLabel.pin(.top, to: .bottom, of: self.labelStack, withInset: 10)
self.bottomLabel.pin(.left, to: .left, of: self.view)
self.bottomLabel.pin(.right, to: .right, of: self.view)
self.bottomLabel.setCompressionResistanceHigh()
self.bottomLabel.setContentHuggingHigh()
self.bottomLabel.setContentHugging(to: .required)
self.bottomLabel.setCompressionResistance(to: .required)
}
// MARK: - Functions

@ -4,7 +4,6 @@
#import <UIKit/UIKit.h>
#import "OWSBezierPathView.h"
#import <SignalCoreKit/OWSAsserts.h>
NS_ASSUME_NONNULL_BEGIN
@ -61,15 +60,11 @@ NS_ASSUME_NONNULL_BEGIN
- (void)setConfigureShapeLayerBlock:(ConfigureShapeLayerBlock)configureShapeLayerBlock
{
OWSAssertDebug(configureShapeLayerBlock);
[self setConfigureShapeLayerBlocks:@[ configureShapeLayerBlock ]];
}
- (void)setConfigureShapeLayerBlocks:(NSArray<ConfigureShapeLayerBlock> *)configureShapeLayerBlocks
{
OWSAssertDebug(configureShapeLayerBlocks.count > 0);
_configureShapeLayerBlocks = configureShapeLayerBlocks;
[self updateLayers];

@ -40,9 +40,9 @@ final class ScanQRCodeWrapperVC: BaseVC {
scanQRCodeVC.scanDelegate = delegate
let scanQRCodeVCView = scanQRCodeVC.view!
view.addSubview(scanQRCodeVCView)
scanQRCodeVCView.pin(.top, to: .top, of: view)
scanQRCodeVCView.pin(.leading, to: .leading, of: view)
scanQRCodeVCView.pin(.trailing, to: .trailing, of: view)
scanQRCodeVCView.autoPinEdge(.top, to: .top, of: view)
if let message = message {
scanQRCodeVCView.set(.height, lessThanOrEqualTo: UIScreen.main.bounds.width)
@ -65,11 +65,11 @@ final class ScanQRCodeWrapperVC: BaseVC {
explanationLabel.numberOfLines = 0
bottomView.addSubview(explanationLabel)
explanationLabel.autoPinWidthToSuperview(withMargin: 32)
explanationLabel.autoPinHeightToSuperview(withMargin: 32)
explanationLabel.set(.width, to: .width, of: bottomView, withOffset: 32)
explanationLabel.set(.height, to: .height, of: bottomView, withOffset: 32)
}
else {
scanQRCodeVCView.autoPinEdge(.bottom, to: .bottom, of: view)
scanQRCodeVCView.pin(.bottom, to: .bottom, of: view)
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save