Merge branch 'dev' into onion-requests-2

pull/296/head
nielsandriesse 4 years ago
commit 07a3d61242

@ -261,7 +261,6 @@
34D1F0B01F867BFC0066283D /* OWSSystemMessageCell.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0A61F867BFC0066283D /* OWSSystemMessageCell.m */; };
34D1F0B41F86D31D0066283D /* ConversationCollectionView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0B31F86D31D0066283D /* ConversationCollectionView.m */; };
34D1F0B71F87F8850066283D /* OWSGenericAttachmentView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0B61F87F8850066283D /* OWSGenericAttachmentView.m */; };
34D1F0BA1F8800D90066283D /* OWSAudioMessageView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0B91F8800D90066283D /* OWSAudioMessageView.m */; };
34D1F0BD1F8D108C0066283D /* AttachmentUploadView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0BC1F8D108C0066283D /* AttachmentUploadView.m */; };
34D1F0C01F8EC1760066283D /* MessageRecipientStatusUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34D1F0BF1F8EC1760066283D /* MessageRecipientStatusUtils.swift */; };
34D2CCDA2062E7D000CB1A14 /* OWSScreenLockUI.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D2CCD92062E7D000CB1A14 /* OWSScreenLockUI.m */; };
@ -572,6 +571,7 @@
C31D1DDD25217014005D4DA8 /* UserCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = C31D1DDC25217014005D4DA8 /* UserCell.swift */; };
C31D1DE32521718E005D4DA8 /* UserSelectionVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = C31D1DE22521718E005D4DA8 /* UserSelectionVC.swift */; };
C31D1DE9252172D4005D4DA8 /* ContactUtilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = C31D1DE8252172D4005D4DA8 /* ContactUtilities.swift */; };
C31F812625258FB000DD9FD9 /* Storage+VolumeSamples.swift in Sources */ = {isa = PBXBuildFile; fileRef = C31F812525258FB000DD9FD9 /* Storage+VolumeSamples.swift */; };
C329FEEC24F7277900B1C64C /* LightModeSheet.swift in Sources */ = {isa = PBXBuildFile; fileRef = C329FEEB24F7277900B1C64C /* LightModeSheet.swift */; };
C329FEEF24F7743F00B1C64C /* UIViewController+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = C329FEED24F7742E00B1C64C /* UIViewController+Utilities.swift */; };
C34C8F7423A7830B00D82669 /* SpaceMono-Bold.ttf in Resources */ = {isa = PBXBuildFile; fileRef = C34C8F7323A7830A00D82669 /* SpaceMono-Bold.ttf */; };
@ -583,6 +583,8 @@
C35E8AA92485C85800ACB629 /* GeoLite2-Country-Blocks-IPv4.csv in Resources */ = {isa = PBXBuildFile; fileRef = C35E8AA62485C85600ACB629 /* GeoLite2-Country-Blocks-IPv4.csv */; };
C35E8AAE2485E51D00ACB629 /* IP2Country.swift in Sources */ = {isa = PBXBuildFile; fileRef = C35E8AAD2485E51D00ACB629 /* IP2Country.swift */; };
C3638C0524C7F0B500AF29BC /* LK002RemoveFriendRequests.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3638C0424C7F0B500AF29BC /* LK002RemoveFriendRequests.swift */; };
C3645350252449260045C478 /* VoiceMessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C364534F252449260045C478 /* VoiceMessageView.swift */; };
C364535C252467900045C478 /* AudioUtilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = C364535B252467900045C478 /* AudioUtilities.swift */; };
C369549D24D27A3500CEB4E3 /* MultiDeviceRemovalSheet.swift in Sources */ = {isa = PBXBuildFile; fileRef = C369549C24D27A3500CEB4E3 /* MultiDeviceRemovalSheet.swift */; };
C36B8707243C50C60049991D /* SignalMessaging.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 453518921FC63DBF00210559 /* SignalMessaging.framework */; };
C396DAEF2518408B00FF6DC5 /* ParsingState.swift in Sources */ = {isa = PBXBuildFile; fileRef = C396DAE82518408900FF6DC5 /* ParsingState.swift */; };
@ -1032,8 +1034,6 @@
34D1F0B31F86D31D0066283D /* ConversationCollectionView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ConversationCollectionView.m; sourceTree = "<group>"; };
34D1F0B51F87F8850066283D /* OWSGenericAttachmentView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSGenericAttachmentView.h; sourceTree = "<group>"; };
34D1F0B61F87F8850066283D /* OWSGenericAttachmentView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSGenericAttachmentView.m; sourceTree = "<group>"; };
34D1F0B81F8800D90066283D /* OWSAudioMessageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSAudioMessageView.h; sourceTree = "<group>"; };
34D1F0B91F8800D90066283D /* OWSAudioMessageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSAudioMessageView.m; sourceTree = "<group>"; };
34D1F0BB1F8D108C0066283D /* AttachmentUploadView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AttachmentUploadView.h; sourceTree = "<group>"; };
34D1F0BC1F8D108C0066283D /* AttachmentUploadView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AttachmentUploadView.m; sourceTree = "<group>"; };
34D1F0BF1F8EC1760066283D /* MessageRecipientStatusUtils.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MessageRecipientStatusUtils.swift; sourceTree = "<group>"; };
@ -1371,6 +1371,7 @@
C31D1DDC25217014005D4DA8 /* UserCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserCell.swift; sourceTree = "<group>"; };
C31D1DE22521718E005D4DA8 /* UserSelectionVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserSelectionVC.swift; sourceTree = "<group>"; };
C31D1DE8252172D4005D4DA8 /* ContactUtilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactUtilities.swift; sourceTree = "<group>"; };
C31F812525258FB000DD9FD9 /* Storage+VolumeSamples.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Storage+VolumeSamples.swift"; sourceTree = "<group>"; };
C329FEEB24F7277900B1C64C /* LightModeSheet.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LightModeSheet.swift; sourceTree = "<group>"; };
C329FEED24F7742E00B1C64C /* UIViewController+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIViewController+Utilities.swift"; sourceTree = "<group>"; };
C34C8F7323A7830A00D82669 /* SpaceMono-Bold.ttf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "SpaceMono-Bold.ttf"; sourceTree = "<group>"; };
@ -1383,6 +1384,8 @@
C35E8AA62485C85600ACB629 /* GeoLite2-Country-Blocks-IPv4.csv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = "GeoLite2-Country-Blocks-IPv4.csv"; sourceTree = "<group>"; };
C35E8AAD2485E51D00ACB629 /* IP2Country.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IP2Country.swift; sourceTree = "<group>"; };
C3638C0424C7F0B500AF29BC /* LK002RemoveFriendRequests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LK002RemoveFriendRequests.swift; sourceTree = "<group>"; };
C364534F252449260045C478 /* VoiceMessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceMessageView.swift; sourceTree = "<group>"; };
C364535B252467900045C478 /* AudioUtilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioUtilities.swift; sourceTree = "<group>"; };
C369549C24D27A3500CEB4E3 /* MultiDeviceRemovalSheet.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultiDeviceRemovalSheet.swift; sourceTree = "<group>"; };
C396469C2509D3ED00B0B9F5 /* pl */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = pl; path = translations/pl.lproj/Localizable.strings; sourceTree = "<group>"; };
C396469D2509D3F400B0B9F5 /* ja */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = ja; path = translations/ja.lproj/Localizable.strings; sourceTree = "<group>"; };
@ -2046,8 +2049,6 @@
34A8B3502190A40E00218A25 /* MediaAlbumCellView.swift */,
34EA693F2194933900702471 /* MediaDownloadView.swift */,
34EA69412194DE7F00702471 /* MediaUploadView.swift */,
34D1F0B81F8800D90066283D /* OWSAudioMessageView.h */,
34D1F0B91F8800D90066283D /* OWSAudioMessageView.m */,
34DBF005206C3CB100025978 /* OWSBubbleShapeView.h */,
34DBF006206C3CB200025978 /* OWSBubbleShapeView.m */,
34DBF002206BD5A500025978 /* OWSBubbleView.h */,
@ -2601,6 +2602,7 @@
isa = PBXGroup;
children = (
B8CCF63B239757C10091D419 /* Components */,
C31F812425258F9C00DD9FD9 /* Database */,
C32B405424A961E1001117B5 /* Dependencies */,
B8CCF63C239757DB0091D419 /* Utilities */,
B8CCF63D2397580E0091D419 /* View Controllers */,
@ -2690,6 +2692,7 @@
B8BB82B423947F2D00BA5194 /* TextField.swift */,
C3C3CF8824D8EED300E1CCE7 /* TextView.swift */,
C31D1DDC25217014005D4DA8 /* UserCell.swift */,
C364534F252449260045C478 /* VoiceMessageView.swift */,
);
path = Components;
sourceTree = "<group>";
@ -2698,6 +2701,7 @@
isa = PBXGroup;
children = (
B8544E3223D50E4900299F14 /* AppearanceUtilities.swift */,
C364535B252467900045C478 /* AudioUtilities.swift */,
C3D0972A2510499C00F6E3E4 /* BackgroundPoller.swift */,
C31A6C5B247F2CF3001123EF /* CGRect+Utilities.swift */,
C31D1DE8252172D4005D4DA8 /* ContactUtilities.swift */,
@ -2751,6 +2755,14 @@
path = "View Controllers";
sourceTree = "<group>";
};
C31F812425258F9C00DD9FD9 /* Database */ = {
isa = PBXGroup;
children = (
C31F812525258FB000DD9FD9 /* Storage+VolumeSamples.swift */,
);
path = Database;
sourceTree = "<group>";
};
C32B405424A961E1001117B5 /* Dependencies */ = {
isa = PBXGroup;
children = (
@ -3779,7 +3791,6 @@
34DC9BD921543E0C00FDDCEC /* DebugContactsUtils.m in Sources */,
34DBF007206C3CB200025978 /* OWSBubbleShapeView.m in Sources */,
4C04392A220A9EC800BAEA63 /* VoiceNoteLock.swift in Sources */,
34D1F0BA1F8800D90066283D /* OWSAudioMessageView.m in Sources */,
34D8C02B1ED3685800188D7C /* DebugUIContacts.m in Sources */,
3496956E21A301A100DCFE74 /* OWSBackupExportJob.m in Sources */,
4C1885D2218F8E1C00B67051 /* PhotoGridViewCell.swift in Sources */,
@ -3797,6 +3808,7 @@
B879D449247E1BE300DB3608 /* PathVC.swift in Sources */,
34E3EF0D1EFC235B007F6822 /* DebugUIDiskUsage.m in Sources */,
454A84042059C787008B8C75 /* MediaTileViewController.swift in Sources */,
C364535C252467900045C478 /* AudioUtilities.swift in Sources */,
340FC8B4204DAC8D007AEB0F /* OWSBackupSettingsViewController.m in Sources */,
34D1F0871F8678AA0066283D /* ConversationViewItem.m in Sources */,
451A13B11E13DED2000A50FD /* AppNotifications.swift in Sources */,
@ -3993,6 +4005,7 @@
340FC8AC204DAC8D007AEB0F /* PrivacySettingsTableViewController.m in Sources */,
B88847BC23E10BC6009836D2 /* GroupMembersVC.swift in Sources */,
B85357BF23A1AE0800AAF6CD /* SeedReminderView.swift in Sources */,
C31F812625258FB000DD9FD9 /* Storage+VolumeSamples.swift in Sources */,
B85357C723A1FB5100AAF6CD /* LinkDeviceVCDelegate.swift in Sources */,
340FC8C5204DE223007AEB0F /* DebugUIBackup.m in Sources */,
C35E8AAE2485E51D00ACB629 /* IP2Country.swift in Sources */,
@ -4014,6 +4027,7 @@
3427C64320F500E000EEC730 /* OWSMessageTimerView.m in Sources */,
B90418E6183E9DD40038554A /* DateUtil.m in Sources */,
340FC8BD204DAC8D007AEB0F /* ShowGroupMembersViewController.m in Sources */,
C3645350252449260045C478 /* VoiceMessageView.swift in Sources */,
3496956F21A301A100DCFE74 /* OWSBackupLazyRestore.swift in Sources */,
459311FC1D75C948008DD4F0 /* OWSDeviceTableViewCell.m in Sources */,
);
@ -4143,7 +4157,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@ -4157,7 +4171,7 @@
INFOPLIST_FILE = SignalShareExtension/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @executable_path/../../Frameworks";
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
MTL_ENABLE_DEBUG_INFO = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.share-extension";
PRODUCT_NAME = "$(TARGET_NAME)";
@ -4205,7 +4219,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
ENABLE_NS_ASSERTIONS = NO;
@ -4224,7 +4238,7 @@
INFOPLIST_FILE = SignalShareExtension/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @executable_path/../../Frameworks";
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
MTL_ENABLE_DEBUG_INFO = NO;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.share-extension";
PRODUCT_NAME = "$(TARGET_NAME)";
@ -4259,7 +4273,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEBUG_INFORMATION_FORMAT = dwarf;
DEFINES_MODULE = YES;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
@ -4278,7 +4292,7 @@
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
MTL_ENABLE_DEBUG_INFO = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.utilities";
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
@ -4329,7 +4343,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEFINES_MODULE = YES;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
@ -4353,7 +4367,7 @@
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
MTL_ENABLE_DEBUG_INFO = NO;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.utilities";
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
@ -4391,7 +4405,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@ -4403,7 +4417,7 @@
INFOPLIST_FILE = LokiPushNotificationService/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @executable_path/../../Frameworks";
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.push-notification-service";
@ -4454,7 +4468,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
ENABLE_NS_ASSERTIONS = NO;
@ -4471,7 +4485,7 @@
INFOPLIST_FILE = LokiPushNotificationService/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @executable_path/../../Frameworks";
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.push-notification-service";
@ -4655,7 +4669,7 @@
CODE_SIGN_ENTITLEMENTS = Signal/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -4690,7 +4704,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
OTHER_LDFLAGS = "$(inherited)";
OTHER_SWIFT_FLAGS = "$(inherited) \"-D\" \"COCOAPODS\" \"-DDEBUG\"";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
@ -4722,7 +4736,7 @@
CODE_SIGN_ENTITLEMENTS = Signal/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 127;
CURRENT_PROJECT_VERSION = 128;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -4757,7 +4771,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 1.6.1;
MARKETING_VERSION = 1.6.2;
OTHER_LDFLAGS = "$(inherited)";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
PRODUCT_NAME = Session;

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "Pause.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

@ -0,0 +1,85 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 0.000000 0.000000 cm
0.000000 0.000000 0.000000 scn
5.142858 0.000000 m
1.714286 0.000000 l
0.767857 0.000000 0.000000 0.767857 0.000000 1.714285 c
0.000000 14.285714 l
0.000000 15.232142 0.767857 16.000000 1.714286 16.000000 c
5.142858 16.000000 l
6.089286 16.000000 6.857143 15.232142 6.857143 14.285714 c
6.857143 1.714285 l
6.857143 0.767857 6.089286 0.000000 5.142858 0.000000 c
h
16.000000 1.714285 m
16.000000 14.285714 l
16.000000 15.232142 15.232143 16.000000 14.285715 16.000000 c
10.857143 16.000000 l
9.910715 16.000000 9.142858 15.232142 9.142858 14.285714 c
9.142858 1.714285 l
9.142858 0.767857 9.910715 0.000000 10.857143 0.000000 c
14.285715 0.000000 l
15.232143 0.000000 16.000000 0.767857 16.000000 1.714285 c
h
f
n
Q
endstream
endobj
3 0 obj
804
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 16.000000 16.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Type /Catalog
/Pages 5 0 R
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000000894 00000 n
0000000916 00000 n
0000001089 00000 n
0000001163 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
1222
%%EOF

File diff suppressed because one or more lines are too long

@ -0,0 +1,214 @@
import Accelerate
import NVActivityIndicatorView
@objc(LKVoiceMessageView)
final class VoiceMessageView : UIView {
private let voiceMessage: TSAttachment
private let isOutgoing: Bool
private var isLoading = false
private var isForcedAnimation = false
private var volumeSamples: [Float] = [] { didSet { updateShapeLayers() } }
@objc var progress: CGFloat = 0 { didSet { updateShapeLayers() } }
@objc var duration: Int = 0 { didSet { updateDurationLabel() } }
@objc var isPlaying = false { didSet { updateToggleImageView() } }
// MARK: Components
private lazy var toggleImageView = UIImageView(image: #imageLiteral(resourceName: "Play"))
private lazy var spinner = NVActivityIndicatorView(frame: CGRect.zero, type: .circleStrokeSpin, color: .black, padding: nil)
private lazy var durationLabel: UILabel = {
let result = UILabel()
result.textColor = Colors.text
result.font = .systemFont(ofSize: Values.mediumFontSize)
return result
}()
private lazy var backgroundShapeLayer: CAShapeLayer = {
let result = CAShapeLayer()
result.fillColor = Colors.text.cgColor
return result
}()
private lazy var foregroundShapeLayer: CAShapeLayer = {
let result = CAShapeLayer()
result.fillColor = (isLightMode && isOutgoing) ? UIColor.white.cgColor : Colors.accent.cgColor
return result
}()
// MARK: Settings
private let leadingInset: CGFloat = 0
private let sampleSpacing: CGFloat = 1
private let targetSampleCount = 48
private let toggleContainerSize: CGFloat = 32
private let vMargin: CGFloat = 0
@objc public static let contentHeight: CGFloat = 40
// MARK: Initialization
@objc(initWithVoiceMessage:isOutgoing:)
init(voiceMessage: TSAttachment, isOutgoing: Bool) {
self.voiceMessage = voiceMessage
self.isOutgoing = isOutgoing
super.init(frame: CGRect.zero)
}
override init(frame: CGRect) {
preconditionFailure("Use init(voiceMessage:associatedWith:) instead.")
}
required init?(coder: NSCoder) {
preconditionFailure("Use init(voiceMessage:associatedWith:) instead.")
}
@objc func initialize() {
setUpViewHierarchy()
if voiceMessage.isDownloaded {
guard let url = (voiceMessage as? TSAttachmentStream)?.originalMediaURL else {
return print("[Loki] Couldn't get URL for voice message.")
}
if let cachedVolumeSamples = Storage.getVolumeSamples(for: voiceMessage.uniqueId!), cachedVolumeSamples.count == targetSampleCount {
self.hideLoader()
self.volumeSamples = cachedVolumeSamples
} else {
let voiceMessageID = voiceMessage.uniqueId!
AudioUtilities.getVolumeSamples(for: url, targetSampleCount: targetSampleCount).done(on: DispatchQueue.main) { [weak self] volumeSamples in
guard let self = self else { return }
self.hideLoader()
self.isForcedAnimation = true
self.volumeSamples = volumeSamples
Storage.write { transaction in
Storage.setVolumeSamples(for: voiceMessageID, to: volumeSamples, using: transaction)
}
}.catch(on: DispatchQueue.main) { error in
print("[Loki] Couldn't sample audio file due to error: \(error).")
}
}
} else {
showLoader()
}
}
private func setUpViewHierarchy() {
set(.width, to: 200)
set(.height, to: VoiceMessageView.contentHeight)
layer.insertSublayer(backgroundShapeLayer, at: 0)
layer.insertSublayer(foregroundShapeLayer, at: 1)
let toggleContainer = UIView()
toggleContainer.clipsToBounds = false
toggleContainer.addSubview(toggleImageView)
toggleImageView.set(.width, to: 12)
toggleImageView.set(.height, to: 12)
toggleImageView.center(in: toggleContainer)
toggleContainer.addSubview(spinner)
spinner.set(.width, to: 24)
spinner.set(.height, to: 24)
spinner.center(in: toggleContainer)
toggleContainer.set(.width, to: toggleContainerSize)
toggleContainer.set(.height, to: toggleContainerSize)
toggleContainer.layer.cornerRadius = toggleContainerSize / 2
toggleContainer.backgroundColor = UIColor.white
let glowRadius: CGFloat = isLightMode ? 1 : 2
let glowColor = isLightMode ? UIColor.black.withAlphaComponent(0.4) : UIColor.black
let glowConfiguration = UIView.CircularGlowConfiguration(size: toggleContainerSize, color: glowColor, radius: glowRadius)
toggleContainer.setCircularGlow(with: glowConfiguration)
addSubview(toggleContainer)
toggleContainer.center(.vertical, in: self)
toggleContainer.pin(.leading, to: .leading, of: self, withInset: leadingInset)
addSubview(durationLabel)
durationLabel.center(.vertical, in: self)
durationLabel.pin(.trailing, to: .trailing, of: self)
}
// MARK: UI & Updating
private func showLoader() {
isLoading = true
toggleImageView.isHidden = true
spinner.startAnimating()
spinner.isHidden = false
Timer.scheduledTimer(withTimeInterval: 0.25, repeats: true) { [weak self] timer in
guard let self = self else { return timer.invalidate() }
if self.isLoading {
self.updateFakeVolumeSamples()
} else {
timer.invalidate()
}
}
updateFakeVolumeSamples()
}
private func updateFakeVolumeSamples() {
let fakeVolumeSamples = (0..<targetSampleCount).map { _ in Float.random(in: 0...1) }
volumeSamples = fakeVolumeSamples
}
private func hideLoader() {
isLoading = false
toggleImageView.isHidden = false
spinner.stopAnimating()
spinner.isHidden = true
}
override func layoutSubviews() {
super.layoutSubviews()
updateShapeLayers()
}
private func updateShapeLayers() {
clipsToBounds = false // Bit of a hack to do this here, but the containing stack view turns this off all the time
guard !volumeSamples.isEmpty else { return }
let sMin = CGFloat(volumeSamples.min()!)
let sMax = CGFloat(volumeSamples.max()!)
let w = width() - leadingInset - toggleContainerSize - durationLabel.width() - 2 * Values.smallSpacing
let h = height() - 2 * vMargin
let sW = (w - sampleSpacing * CGFloat(volumeSamples.count - 1)) / CGFloat(volumeSamples.count)
let backgroundPath = UIBezierPath()
let foregroundPath = UIBezierPath()
for (i, value) in volumeSamples.enumerated() {
let x = leadingInset + toggleContainerSize + Values.smallSpacing + CGFloat(i) * (sW + sampleSpacing)
let fraction = (CGFloat(value) - sMin) / (sMax - sMin)
let sH = max(8, h * fraction)
let y = vMargin + (h - sH) / 2
let subPath = UIBezierPath(roundedRect: CGRect(x: x, y: y, width: sW, height: sH), cornerRadius: sW / 2)
backgroundPath.append(subPath)
if progress > CGFloat(i) / CGFloat(volumeSamples.count) { foregroundPath.append(subPath) }
}
backgroundPath.close()
foregroundPath.close()
if isLoading || isForcedAnimation {
let animation = CABasicAnimation(keyPath: "path")
animation.duration = 0.25
animation.toValue = backgroundPath
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut)
backgroundShapeLayer.add(animation, forKey: "path")
backgroundShapeLayer.path = backgroundPath.cgPath
} else {
backgroundShapeLayer.path = backgroundPath.cgPath
}
foregroundShapeLayer.path = foregroundPath.cgPath
isForcedAnimation = false
}
private func updateDurationLabel() {
durationLabel.text = OWSFormat.formatDurationSeconds(duration)
updateShapeLayers()
}
private func updateToggleImageView() {
toggleImageView.image = isPlaying ? #imageLiteral(resourceName: "Pause") : #imageLiteral(resourceName: "Play")
}
// MARK: Interaction
@objc(getCurrentTime:)
func getCurrentTime(for panGestureRecognizer: UIPanGestureRecognizer) -> TimeInterval {
guard voiceMessage.isDownloaded else { return 0 }
let locationInSelf = panGestureRecognizer.location(in: self)
let waveformFrameOrigin = CGPoint(x: leadingInset + toggleContainerSize + Values.smallSpacing, y: vMargin)
let waveformFrameSize = CGSize(width: width() - leadingInset - toggleContainerSize - durationLabel.width() - 2 * Values.smallSpacing,
height: height() - 2 * vMargin)
let waveformFrame = CGRect(origin: waveformFrameOrigin, size: waveformFrameSize)
guard waveformFrame.contains(locationInSelf) else { return 0 }
let fraction = (locationInSelf.x - waveformFrame.minX) / (waveformFrame.maxX - waveformFrame.minX)
return Double(fraction) * Double(duration)
}
}

@ -0,0 +1,17 @@
extension Storage {
static let volumeSamplesCollection = "LokiVolumeSamplesCollection"
static func getVolumeSamples(for attachment: String) -> [Float]? {
var result: [Float]?
read { transaction in
result = transaction.object(forKey: attachment, inCollection: volumeSamplesCollection) as? [Float]
}
return result
}
static func setVolumeSamples(for attachment: String, to volumeSamples: [Float], using transaction: YapDatabaseReadWriteTransaction) {
transaction.setObject(volumeSamples, forKey: attachment, inCollection: volumeSamplesCollection)
}
}

@ -0,0 +1,190 @@
import Accelerate
import PromiseKit
enum AudioUtilities {
private static let noiseFloor: Float = -80
private struct FileInfo {
let sampleCount: Int
let asset: AVAsset
let track: AVAssetTrack
}
enum Error : LocalizedError {
case noAudioTrack
case noAudioFormatDescription
case loadingFailed
case parsingFailed
var errorDescription: String? {
switch self {
case .noAudioTrack: return "No audio track."
case .noAudioFormatDescription: return "No audio format description."
case .loadingFailed: return "Couldn't load asset."
case .parsingFailed: return "Couldn't parse asset."
}
}
}
static func getVolumeSamples(for audioFileURL: URL, targetSampleCount: Int) -> Promise<[Float]> {
return loadFile(audioFileURL).then { fileInfo in
AudioUtilities.parseSamples(from: fileInfo, with: targetSampleCount)
}
}
private static func loadFile(_ audioFileURL: URL, isRetry: Bool = false) -> Promise<FileInfo> {
let asset = AVURLAsset(url: audioFileURL)
guard let track = asset.tracks(withMediaType: AVMediaType.audio).first else {
if isRetry {
return Promise(error: Error.loadingFailed)
} else {
// Workaround for issue where MP3 files sent by Android get saved as M4A
var newAudioFileURL = audioFileURL.deletingPathExtension()
let fileName = newAudioFileURL.lastPathComponent
newAudioFileURL = newAudioFileURL.deletingLastPathComponent()
newAudioFileURL = newAudioFileURL.appendingPathComponent("\(fileName).mp3")
let fileManager = FileManager.default
if fileManager.fileExists(atPath: newAudioFileURL.path) {
return loadFile(newAudioFileURL, isRetry: true)
} else {
do {
try FileManager.default.copyItem(at: audioFileURL, to: newAudioFileURL)
} catch {
return Promise(error: Error.loadingFailed)
}
return loadFile(newAudioFileURL, isRetry: true)
}
}
}
let (promise, seal) = Promise<FileInfo>.pending()
asset.loadValuesAsynchronously(forKeys: [ #keyPath(AVAsset.duration) ]) {
var nsError: NSError?
let status = asset.statusOfValue(forKey: #keyPath(AVAsset.duration), error: &nsError)
switch status {
case .loaded:
guard let formatDescriptions = track.formatDescriptions as? [CMAudioFormatDescription],
let audioFormatDescription = formatDescriptions.first,
let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDescription)
else { return seal.reject(Error.noAudioFormatDescription) }
let sampleCount = Int((asbd.pointee.mSampleRate) * Float64(asset.duration.value) / Float64(asset.duration.timescale))
let fileInfo = FileInfo(sampleCount: sampleCount, asset: asset, track: track)
seal.fulfill(fileInfo)
default:
print("Couldn't load asset due to error: \(nsError?.localizedDescription ?? "no description provided").")
seal.reject(Error.loadingFailed)
}
}
return promise
}
private static func parseSamples(from fileInfo: FileInfo, with targetSampleCount: Int) -> Promise<[Float]> {
// Prepare the reader
guard let reader = try? AVAssetReader(asset: fileInfo.asset) else { return Promise(error: Error.parsingFailed) }
let range = 0..<fileInfo.sampleCount
reader.timeRange = CMTimeRange(start: CMTime(value: Int64(range.lowerBound), timescale: fileInfo.asset.duration.timescale),
duration: CMTime(value: Int64(range.count), timescale: fileInfo.asset.duration.timescale))
let outputSettings: [String:Any] = [
AVFormatIDKey : Int(kAudioFormatLinearPCM),
AVLinearPCMBitDepthKey : 16,
AVLinearPCMIsBigEndianKey : false,
AVLinearPCMIsFloatKey : false,
AVLinearPCMIsNonInterleaved : false
]
let output = AVAssetReaderTrackOutput(track: fileInfo.track, outputSettings: outputSettings)
output.alwaysCopiesSampleData = false
reader.add(output)
var channelCount = 1
let formatDescriptions = fileInfo.track.formatDescriptions as! [CMAudioFormatDescription]
for audioFormatDescription in formatDescriptions {
guard let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDescription) else {
return Promise(error: Error.parsingFailed)
}
channelCount = Int(asbd.pointee.mChannelsPerFrame)
}
let samplesPerPixel = max(1, channelCount * range.count / targetSampleCount)
let filter = [Float](repeating: 1 / Float(samplesPerPixel), count: samplesPerPixel)
var result = [Float]()
var sampleBuffer = Data()
// Read the file
reader.startReading()
defer { reader.cancelReading() }
while reader.status == .reading {
guard let readSampleBuffer = output.copyNextSampleBuffer(),
let readBuffer = CMSampleBufferGetDataBuffer(readSampleBuffer) else { break }
var readBufferLength = 0
var readBufferPointer: UnsafeMutablePointer<Int8>?
CMBlockBufferGetDataPointer(readBuffer,
atOffset: 0,
lengthAtOffsetOut: &readBufferLength,
totalLengthOut: nil,
dataPointerOut: &readBufferPointer)
sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength))
CMSampleBufferInvalidate(readSampleBuffer)
let sampleCount = sampleBuffer.count / MemoryLayout<Int16>.size
let downSampledLength = sampleCount / samplesPerPixel
let samplesToProcess = downSampledLength * samplesPerPixel
guard samplesToProcess > 0 else { continue }
processSamples(from: &sampleBuffer,
outputSamples: &result,
samplesToProcess: samplesToProcess,
downSampledLength: downSampledLength,
samplesPerPixel: samplesPerPixel,
filter: filter)
}
// Process any remaining samples
let samplesToProcess = sampleBuffer.count / MemoryLayout<Int16>.size
if samplesToProcess > 0 {
let downSampledLength = 1
let samplesPerPixel = samplesToProcess
let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel)
processSamples(from: &sampleBuffer,
outputSamples: &result,
samplesToProcess: samplesToProcess,
downSampledLength: downSampledLength,
samplesPerPixel: samplesPerPixel,
filter: filter)
}
guard reader.status == .completed else { return Promise(error: Error.parsingFailed) }
// Return
return Promise { $0.fulfill(result) }
}
private static func processSamples(from sampleBuffer: inout Data, outputSamples: inout [Float], samplesToProcess: Int,
downSampledLength: Int, samplesPerPixel: Int, filter: [Float]) {
sampleBuffer.withUnsafeBytes { (samples: UnsafeRawBufferPointer) in
var processingBuffer = [Float](repeating: 0, count: samplesToProcess)
let sampleCount = vDSP_Length(samplesToProcess)
// Create an UnsafePointer<Int16> from the samples
let unsafeBufferPointer = samples.bindMemory(to: Int16.self)
let unsafePointer = unsafeBufferPointer.baseAddress!
// Convert 16 bit int samples to floats
vDSP_vflt16(unsafePointer, 1, &processingBuffer, 1, sampleCount)
// Take the absolute values to get the amplitude
vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, sampleCount)
// Get the corresponding dB values and clip the results
getdB(from: &processingBuffer)
// Downsample and average
var downSampledData = [Float](repeating: 0, count: downSampledLength)
vDSP_desamp(processingBuffer,
vDSP_Stride(samplesPerPixel),
filter,
&downSampledData,
vDSP_Length(downSampledLength),
vDSP_Length(samplesPerPixel))
// Remove the processed samples
sampleBuffer.removeFirst(samplesToProcess * MemoryLayout<Int16>.size)
// Update the output samples
outputSamples += downSampledData
}
}
static func getdB(from normalizedSamples: inout [Float]) {
// Convert samples to a log scale
var zero: Float = 32768.0
vDSP_vdbcon(normalizedSamples, 1, &zero, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count), 1)
// Clip to [noiseFloor, 0]
var ceil: Float = 0.0
var noiseFloorMutable = AudioUtilities.noiseFloor
vDSP_vclip(normalizedSamples, 1, &noiseFloorMutable, &ceil, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count))
}
}

@ -10,7 +10,7 @@ extension UIView {
let opacity: Float
let radius: CGFloat
init(size: CGFloat, color: UIColor, isAnimated: Bool, animationDuration: TimeInterval = 0.25, offset: CGSize = CGSize(width: 0, height: 0.8), opacity: Float = isLightMode ? 0.4 : 1, radius: CGFloat) {
init(size: CGFloat, color: UIColor, isAnimated: Bool = false, animationDuration: TimeInterval = 0.25, offset: CGSize = CGSize(width: 0, height: 0.8), opacity: Float = isLightMode ? 0.4 : 1, radius: CGFloat) {
self.size = size
self.color = color
self.isAnimated = isAnimated

@ -22,6 +22,14 @@ final class EditClosedGroupVC : BaseVC, UITableViewDataSource, UITableViewDelega
return result
}()
private lazy var addMembersButton: Button = {
let result = Button(style: .prominentOutline, size: .large)
result.setTitle("Add Members", for: UIControl.State.normal)
result.addTarget(self, action: #selector(addMembers), for: UIControl.Event.touchUpInside)
result.contentEdgeInsets = UIEdgeInsets(top: 0, leading: Values.mediumSpacing, bottom: 0, trailing: Values.mediumSpacing)
return result
}()
@objc private lazy var tableView: UITableView = {
let result = UITableView()
result.dataSource = self
@ -56,13 +64,13 @@ final class EditClosedGroupVC : BaseVC, UITableViewDataSource, UITableViewDelega
let backButton = UIBarButtonItem(title: "Back", style: .plain, target: nil, action: nil)
backButton.tintColor = Colors.text
navigationItem.backBarButtonItem = backButton
setUpViewHierarchy()
updateNavigationBarButtons()
name = thread.groupModel.groupName!
func getDisplayName(for publicKey: String) -> String {
return UserDisplayNameUtilities.getPrivateChatDisplayName(for: publicKey) ?? publicKey
}
members = GroupUtilities.getClosedGroupMembers(thread).sorted { getDisplayName(for: $0) < getDisplayName(for: $1) }
setUpViewHierarchy()
updateNavigationBarButtons()
name = thread.groupModel.groupName!
}
private func setUpViewHierarchy() {
@ -88,11 +96,8 @@ final class EditClosedGroupVC : BaseVC, UITableViewDataSource, UITableViewDelega
membersLabel.font = .systemFont(ofSize: Values.mediumFontSize)
membersLabel.text = "Members"
// Add members button
let addMembersButton = Button(style: .prominentOutline, size: .large)
addMembersButton.setTitle("Add Members", for: UIControl.State.normal)
addMembersButton.addTarget(self, action: #selector(addMembers), for: UIControl.Event.touchUpInside)
addMembersButton.contentEdgeInsets = UIEdgeInsets(top: 0, leading: Values.mediumSpacing, bottom: 0, trailing: Values.mediumSpacing)
if (Set(ContactUtilities.getAllContacts()).subtracting(members).isEmpty) {
let hasContactsToAdd = !Set(ContactUtilities.getAllContacts()).subtracting(self.members).isEmpty
if (!hasContactsToAdd) {
addMembersButton.isUserInteractionEnabled = false
let disabledColor = Colors.text.withAlphaComponent(Values.unimportantElementOpacity)
addMembersButton.layer.borderColor = disabledColor.cgColor
@ -222,6 +227,11 @@ final class EditClosedGroupVC : BaseVC, UITableViewDataSource, UITableViewDelega
return UserDisplayNameUtilities.getPrivateChatDisplayName(for: publicKey) ?? publicKey
}
self.members = members.sorted { getDisplayName(for: $0) < getDisplayName(for: $1) }
let hasContactsToAdd = !Set(ContactUtilities.getAllContacts()).subtracting(self.members).isEmpty
self.addMembersButton.isUserInteractionEnabled = hasContactsToAdd
let color = hasContactsToAdd ? Colors.accent : Colors.text.withAlphaComponent(Values.unimportantElementOpacity)
self.addMembersButton.layer.borderColor = color.cgColor
self.addMembersButton.setTitleColor(color, for: UIControl.State.normal)
}
navigationController!.pushViewController(userSelectionVC, animated: true, completion: nil)
}
@ -241,13 +251,17 @@ final class EditClosedGroupVC : BaseVC, UITableViewDataSource, UITableViewDelega
guard members != Set(thread.groupModel.groupMemberIds) || name != thread.groupModel.groupName else {
return popToConversationVC(self)
}
try! Storage.writeSync { [weak self] transaction in
ClosedGroupsProtocol.update(groupPublicKey, with: members, name: name, transaction: transaction).done(on: DispatchQueue.main) {
guard let self = self else { return }
popToConversationVC(self)
}.catch(on: DispatchQueue.main) { error in
guard let self = self else { return }
self.showError(title: "Couldn't Update Group", message: "Please check your internet connection and try again.")
ModalActivityIndicatorViewController.present(fromViewController: navigationController!, canCancel: false) { [weak self] _ in
try! Storage.writeSync { [weak self] transaction in
ClosedGroupsProtocol.update(groupPublicKey, with: members, name: name, transaction: transaction).done(on: DispatchQueue.main) {
guard let self = self else { return }
self.dismiss(animated: true, completion: nil) // Dismiss the loader
popToConversationVC(self)
}.catch(on: DispatchQueue.main) { error in
guard let self = self else { return }
self.dismiss(animated: true, completion: nil) // Dismiss the loader
self.showError(title: "Couldn't Update Group", message: "Please check your internet connection and try again.")
}
}
}
}

@ -7,8 +7,8 @@ final class RestoreVC : BaseVC {
private var bottomConstraint: NSLayoutConstraint!
// MARK: Components
private lazy var mnemonicTextField: TextField = {
let result = TextField(placeholder: NSLocalizedString("vc_restore_seed_text_field_hint", comment: ""))
private lazy var mnemonicTextView: TextView = {
let result = TextView(placeholder: NSLocalizedString("vc_restore_seed_text_field_hint", comment: ""))
result.layer.borderColor = Colors.text.cgColor
return result
}()
@ -77,7 +77,7 @@ final class RestoreVC : BaseVC {
restoreButtonContainer.pin(.trailing, to: .trailing, of: restoreButton, withInset: Values.massiveSpacing)
restoreButtonContainer.pin(.bottom, to: .bottom, of: restoreButton)
// Set up top stack view
let topStackView = UIStackView(arrangedSubviews: [ titleLabel, spacer1, explanationLabel, spacer2, mnemonicTextField, spacer3, legalLabel ])
let topStackView = UIStackView(arrangedSubviews: [ titleLabel, spacer1, explanationLabel, spacer2, mnemonicTextView, spacer3, legalLabel ])
topStackView.axis = .vertical
topStackView.alignment = .fill
// Set up top stack view container
@ -111,7 +111,7 @@ final class RestoreVC : BaseVC {
// On small screens we hide the legal label when the keyboard is up, but it's important that the user sees it so
// in those instances we don't make the keyboard come up automatically
if !isIPhone5OrSmaller {
mnemonicTextField.becomeFirstResponder()
mnemonicTextView.becomeFirstResponder()
}
}
@ -121,7 +121,7 @@ final class RestoreVC : BaseVC {
// MARK: General
@objc private func dismissKeyboard() {
mnemonicTextField.resignFirstResponder()
mnemonicTextView.resignFirstResponder()
}
// MARK: Updating
@ -159,7 +159,7 @@ final class RestoreVC : BaseVC {
alert.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: ""), style: .default, handler: nil))
presentAlert(alert)
}
let mnemonic = mnemonicTextField.text!
let mnemonic = mnemonicTextView.text!
do {
let hexEncodedSeed = try Mnemonic.decode(mnemonic: mnemonic)
let seed = Data(hex: hexEncodedSeed)
@ -171,7 +171,7 @@ final class RestoreVC : BaseVC {
TSAccountManager.sharedInstance().phoneNumberAwaitingVerification = keyPair.hexEncodedPublicKey
OWSPrimaryStorage.shared().setRestorationTime(Date().timeIntervalSince1970)
UserDefaults.standard[.hasViewedSeed] = true
mnemonicTextField.resignFirstResponder()
mnemonicTextView.resignFirstResponder()
Timer.scheduledTimer(withTimeInterval: 0.25, repeats: false) { _ in
let displayNameVC = DisplayNameVC()
self.navigationController!.pushViewController(displayNameVC, animated: true)

@ -22,7 +22,6 @@
#import "NotificationSettingsViewController.h"
#import "OWSAddToContactViewController.h"
#import "OWSAnyTouchGestureRecognizer.h"
#import "OWSAudioMessageView.h"
#import "OWSAudioPlayer.h"
#import "OWSBackup.h"
#import "OWSBackupIO.h"

@ -228,6 +228,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
}
private func updateMockConversationView() {
/*
conversationStyle.viewWidth = max(bounds.size.width, kMinimumConversationWidth)
mockConversationView.subviews.forEach { $0.removeFromSuperview() }
@ -275,6 +276,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
mockConversationView.addSubview(messagesStackView)
messagesStackView.autoPinEdgesToSuperviewMargins()
*/
}
private func buildPaletteView(colorViews: [ColorView]) -> UIView {
@ -303,6 +305,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
// MARK: Mock Classes for rendering demo conversation
/*
@objc
private class MockConversationViewItem: NSObject, ConversationViewItem {
var userCanDeleteGroupMessage: Bool = false
@ -444,6 +447,7 @@ private class MockConversationViewItem: NSObject, ConversationViewItem {
return false
}
}
*/
private class MockIncomingMessage: TSIncomingMessage {
init(messageBody: String) {

@ -170,9 +170,17 @@ public class ConversationMediaView: UIView {
}
backgroundColor = (Theme.isDarkThemeEnabled ? .ows_gray90 : .ows_gray05)
let progressView = MediaDownloadView(attachmentId: attachmentId, radius: maxMessageWidth * 0.1)
self.addSubview(progressView)
progressView.autoPinEdgesToSuperviewEdges()
let view: UIView
if isOnionRouted { // Loki: Due to the way onion routing works we can't get upload progress for those attachments
let activityIndicatorView = UIActivityIndicatorView(style: .white)
activityIndicatorView.isHidden = false
activityIndicatorView.startAnimating()
view = activityIndicatorView
} else {
view = MediaDownloadView(attachmentId: attachmentId, radius: maxMessageWidth * 0.1)
}
addSubview(view)
view.autoPinEdgesToSuperviewEdges()
}
private func addUploadProgressIfNecessary(_ subview: UIView) -> Bool {

@ -1,27 +0,0 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
NS_ASSUME_NONNULL_BEGIN
@class ConversationStyle;
@class TSAttachment;
@protocol ConversationViewItem;
@interface OWSAudioMessageView : UIStackView
- (instancetype)initWithAttachment:(TSAttachment *)attachment
isIncoming:(BOOL)isIncoming
viewItem:(id<ConversationViewItem>)viewItem
conversationStyle:(ConversationStyle *)conversationStyle;
- (void)createContents;
+ (CGFloat)bubbleHeight;
- (void)updateContents;
@end
NS_ASSUME_NONNULL_END

@ -1,305 +0,0 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
#import "OWSAudioMessageView.h"
#import "ConversationViewItem.h"
#import "Session-Swift.h"
#import "UIColor+OWS.h"
#import "ViewControllerUtils.h"
#import <SignalMessaging/OWSFormat.h>
#import <SignalMessaging/UIColor+OWS.h>
#import <SessionServiceKit/MIMETypeUtil.h>
NS_ASSUME_NONNULL_BEGIN
@interface OWSAudioMessageView ()
@property (nonatomic) TSAttachment *attachment;
@property (nonatomic, nullable) TSAttachmentStream *attachmentStream;
@property (nonatomic) BOOL isIncoming;
@property (nonatomic, weak) id<ConversationViewItem> viewItem;
@property (nonatomic, readonly) ConversationStyle *conversationStyle;
@property (nonatomic, nullable) UIButton *audioPlayPauseButton;
@property (nonatomic, nullable) UILabel *audioBottomLabel;
@property (nonatomic, nullable) AudioProgressView *audioProgressView;
@end
#pragma mark -
@implementation OWSAudioMessageView
- (instancetype)initWithAttachment:(TSAttachment *)attachment
isIncoming:(BOOL)isIncoming
viewItem:(id<ConversationViewItem>)viewItem
conversationStyle:(ConversationStyle *)conversationStyle
{
self = [super init];
if (self) {
_attachment = attachment;
if ([attachment isKindOfClass:[TSAttachmentStream class]]) {
_attachmentStream = (TSAttachmentStream *)attachment;
}
_isIncoming = isIncoming;
_viewItem = viewItem;
_conversationStyle = conversationStyle;
}
return self;
}
- (void)updateContents
{
[self updateAudioProgressView];
[self updateAudioBottomLabel];
if (self.audioPlaybackState == AudioPlaybackState_Playing) {
[self setAudioIconToPause];
} else {
[self setAudioIconToPlay];
}
}
- (CGFloat)audioProgressSeconds
{
return [self.viewItem audioProgressSeconds];
}
- (CGFloat)audioDurationSeconds
{
return self.viewItem.audioDurationSeconds;
}
- (AudioPlaybackState)audioPlaybackState
{
return [self.viewItem audioPlaybackState];
}
- (BOOL)isAudioPlaying
{
return self.audioPlaybackState == AudioPlaybackState_Playing;
}
- (void)updateAudioBottomLabel
{
if (self.isAudioPlaying && self.audioProgressSeconds > 0 && self.audioDurationSeconds > 0) {
self.audioBottomLabel.text =
[NSString stringWithFormat:@"%@ / %@",
[OWSFormat formatDurationSeconds:(long)round(self.audioProgressSeconds)],
[OWSFormat formatDurationSeconds:(long)round(self.audioDurationSeconds)]];
} else {
self.audioBottomLabel.text =
[NSString stringWithFormat:@"%@", [OWSFormat formatDurationSeconds:(long)round(self.audioDurationSeconds)]];
}
}
- (void)setAudioIcon:(UIImage *)icon
{
icon = [icon resizedImageToSize:CGSizeMake(self.iconSize, self.iconSize)];
[_audioPlayPauseButton setImage:icon forState:UIControlStateNormal];
[_audioPlayPauseButton setImage:icon forState:UIControlStateDisabled];
}
- (void)setAudioIconToPlay
{
[self setAudioIcon:[UIImage imageNamed:@"CirclePlay"]];
}
- (void)setAudioIconToPause
{
[self setAudioIcon:[UIImage imageNamed:@"CirclePause"]];
}
- (void)updateAudioProgressView
{
[self.audioProgressView
setProgress:(self.audioDurationSeconds > 0 ? self.audioProgressSeconds / self.audioDurationSeconds : 0.f)];
UIColor *progressColor = [self.conversationStyle bubbleSecondaryTextColorWithIsIncoming:self.isIncoming];
self.audioProgressView.horizontalBarColor = progressColor;
self.audioProgressView.progressColor = progressColor;
}
- (void)replaceIconWithDownloadProgressIfNecessary:(UIView *)iconView
{
if (!self.viewItem.attachmentPointer) {
return;
}
switch (self.viewItem.attachmentPointer.state) {
case TSAttachmentPointerStateFailed:
// We don't need to handle the "tap to retry" state here,
// only download progress.
return;
case TSAttachmentPointerStateEnqueued:
case TSAttachmentPointerStateDownloading:
break;
}
switch (self.viewItem.attachmentPointer.pointerType) {
case TSAttachmentPointerTypeRestoring:
// TODO: Show "restoring" indicator and possibly progress.
return;
case TSAttachmentPointerTypeUnknown:
case TSAttachmentPointerTypeIncoming:
break;
}
NSString *_Nullable uniqueId = self.viewItem.attachmentPointer.uniqueId;
if (uniqueId.length < 1) {
OWSFailDebug(@"Missing uniqueId.");
return;
}
CGFloat downloadViewSize = self.iconSize;
MediaDownloadView *downloadView =
[[MediaDownloadView alloc] initWithAttachmentId:uniqueId radius:downloadViewSize * 0.5f];
iconView.layer.opacity = 0.01f;
[self addSubview:downloadView];
[downloadView autoSetDimensionsToSize:CGSizeMake(downloadViewSize, downloadViewSize)];
[downloadView autoAlignAxis:ALAxisHorizontal toSameAxisOfView:iconView];
[downloadView autoAlignAxis:ALAxisVertical toSameAxisOfView:iconView];
}
#pragma mark -
- (CGFloat)hMargin
{
return 0.f;
}
- (CGFloat)hSpacing
{
return 8.f;
}
+ (CGFloat)vMargin
{
return 0.f;
}
- (CGFloat)vMargin
{
return [OWSAudioMessageView vMargin];
}
+ (CGFloat)bubbleHeight
{
CGFloat iconHeight = self.iconSize;
CGFloat labelsHeight = ([OWSAudioMessageView labelFont].lineHeight * 2 +
[OWSAudioMessageView audioProgressViewHeight] + [OWSAudioMessageView labelVSpacing] * 2);
CGFloat contentHeight = MAX(iconHeight, labelsHeight);
return contentHeight + self.vMargin * 2;
}
- (CGFloat)bubbleHeight
{
return [OWSAudioMessageView bubbleHeight];
}
+ (CGFloat)iconSize
{
return 72.f;
}
- (CGFloat)iconSize
{
return [OWSAudioMessageView iconSize];
}
- (BOOL)isVoiceMessage
{
return self.attachment.isVoiceMessage;
}
- (void)createContents
{
self.axis = UILayoutConstraintAxisHorizontal;
self.alignment = UIStackViewAlignmentCenter;
self.spacing = self.hSpacing;
self.layoutMarginsRelativeArrangement = YES;
self.layoutMargins = UIEdgeInsetsMake(self.vMargin, 0, self.vMargin, 0);
_audioPlayPauseButton = [UIButton buttonWithType:UIButtonTypeCustom];
self.audioPlayPauseButton.enabled = NO;
[self addArrangedSubview:self.audioPlayPauseButton];
self.audioPlayPauseButton.imageView.contentMode = UIViewContentModeCenter;
[self.audioPlayPauseButton autoSetDimension:ALDimensionWidth toSize:56.f];
[self.audioPlayPauseButton autoSetDimension:ALDimensionHeight toSize:56.f];
self.audioPlayPauseButton.imageView.clipsToBounds = NO;
self.audioPlayPauseButton.clipsToBounds = NO;
self.clipsToBounds = NO;
[self replaceIconWithDownloadProgressIfNecessary:self.audioPlayPauseButton];
NSString *_Nullable filename = self.attachment.sourceFilename;
if (filename.length < 1) {
filename = [self.attachmentStream.originalFilePath lastPathComponent];
}
NSString *topText = [[filename stringByDeletingPathExtension] ows_stripped];
if (topText.length < 1) {
topText = [MIMETypeUtil fileExtensionForMIMEType:self.attachment.contentType].localizedUppercaseString;
}
if (topText.length < 1) {
topText = NSLocalizedString(@"GENERIC_ATTACHMENT_LABEL", @"A label for generic attachments.");
}
if (self.isVoiceMessage) {
topText = nil;
}
UILabel *topLabel = [UILabel new];
topLabel.text = topText;
topLabel.textColor = [self.conversationStyle bubbleTextColorWithIsIncoming:self.isIncoming];
topLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
topLabel.font = [OWSAudioMessageView labelFont];
AudioProgressView *audioProgressView = [AudioProgressView new];
self.audioProgressView = audioProgressView;
[self updateAudioProgressView];
[audioProgressView autoSetDimension:ALDimensionHeight toSize:[OWSAudioMessageView audioProgressViewHeight]];
UILabel *bottomLabel = [UILabel new];
self.audioBottomLabel = bottomLabel;
[self updateAudioBottomLabel];
bottomLabel.textColor = [self.conversationStyle bubbleSecondaryTextColorWithIsIncoming:self.isIncoming];
bottomLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
bottomLabel.font = [OWSAudioMessageView labelFont];
UIStackView *labelsView = [UIStackView new];
labelsView.axis = UILayoutConstraintAxisVertical;
labelsView.spacing = [OWSAudioMessageView labelVSpacing];
[labelsView addArrangedSubview:topLabel];
[labelsView addArrangedSubview:audioProgressView];
[labelsView addArrangedSubview:bottomLabel];
// Ensure the "audio progress" and "play button" are v-center-aligned using a container.
UIView *labelsContainerView = [UIView containerView];
[self addArrangedSubview:labelsContainerView];
[labelsContainerView addSubview:labelsView];
[labelsView autoPinWidthToSuperview];
[labelsView autoPinEdgeToSuperviewMargin:ALEdgeTop relation:NSLayoutRelationGreaterThanOrEqual];
[labelsView autoPinEdgeToSuperviewMargin:ALEdgeBottom relation:NSLayoutRelationGreaterThanOrEqual];
[audioProgressView autoAlignAxis:ALAxisHorizontal toSameAxisOfView:self.audioPlayPauseButton];
[self updateContents];
}
+ (CGFloat)audioProgressViewHeight
{
return 12.f;
}
+ (UIFont *)labelFont
{
return [UIFont ows_dynamicTypeCaption2Font];
}
+ (CGFloat)labelVSpacing
{
return 2.f;
}
@end
NS_ASSUME_NONNULL_END

@ -36,6 +36,7 @@ typedef NS_ENUM(NSUInteger, OWSMessageGestureLocation) {
imageView:(UIView *)imageView;
- (void)didTapAudioViewItem:(id<ConversationViewItem>)viewItem attachmentStream:(TSAttachmentStream *)attachmentStream;
- (void)didPanAudioViewItemToCurrentTime:(NSTimeInterval)currentTime;
- (void)didTapTruncatedTextMessage:(id<ConversationViewItem>)conversationItem;
@ -102,6 +103,7 @@ typedef NS_ENUM(NSUInteger, OWSMessageGestureLocation) {
- (void)addTapGestureHandler;
- (void)handleTapGesture:(UITapGestureRecognizer *)sender;
- (void)handlePanGesture:(UIPanGestureRecognizer *)sender;
@end

@ -5,7 +5,6 @@
#import "OWSMessageBubbleView.h"
#import "AttachmentUploadView.h"
#import "ConversationViewItem.h"
#import "OWSAudioMessageView.h"
#import "OWSBubbleShapeView.h"
#import "OWSBubbleView.h"
#import "OWSContactShareButtonsView.h"
@ -840,13 +839,12 @@ NS_ASSUME_NONNULL_BEGIN
OWSAssertDebug(attachment);
OWSAssertDebug([attachment isAudio]);
OWSAudioMessageView *audioMessageView = [[OWSAudioMessageView alloc] initWithAttachment:attachment
isIncoming:self.isIncoming
viewItem:self.viewItem
conversationStyle:self.conversationStyle];
self.viewItem.lastAudioMessageView = audioMessageView;
[audioMessageView createContents];
[self addProgressViewsIfNecessary:audioMessageView shouldShowDownloadProgress:NO];
LKVoiceMessageView *voiceMessageView = [[LKVoiceMessageView alloc] initWithVoiceMessage:attachment isOutgoing:self.isOutgoing];
[voiceMessageView setDuration:(int)self.viewItem.audioDurationSeconds];
[voiceMessageView setProgress:self.viewItem.audioProgressSeconds / self.viewItem.audioDurationSeconds];
[voiceMessageView initialize];
self.viewItem.lastAudioMessageView = voiceMessageView;
self.loadCellContentBlock = ^{
// Do nothing.
@ -855,7 +853,7 @@ NS_ASSUME_NONNULL_BEGIN
// Do nothing.
};
return audioMessageView;
return voiceMessageView;
}
- (UIView *)loadViewForGenericAttachment
@ -1068,7 +1066,7 @@ NS_ASSUME_NONNULL_BEGIN
return nil;
}
case OWSMessageCellType_Audio:
result = CGSizeMake(maxMessageWidth, OWSAudioMessageView.bubbleHeight);
result = CGSizeMake(maxMessageWidth, LKVoiceMessageView.contentHeight);
break;
case OWSMessageCellType_GenericAttachment: {
TSAttachment *attachment = (self.viewItem.attachmentStream ?: self.viewItem.attachmentPointer);
@ -1534,6 +1532,22 @@ NS_ASSUME_NONNULL_BEGIN
}
}
- (void)handlePanGesture:(UIPanGestureRecognizer *)sender
{
switch (self.cellType) {
case OWSMessageCellType_Audio: {
LKVoiceMessageView *voiceMessageView = self.viewItem.lastAudioMessageView;
NSTimeInterval currentTime = [voiceMessageView getCurrentTime:sender];
[self.viewItem setAudioProgress:((CGFloat)currentTime) duration:self.viewItem.audioDurationSeconds];
CGFloat progress = self.viewItem.audioProgressSeconds / self.viewItem.audioDurationSeconds;
[voiceMessageView setProgress:progress];
[self.delegate didPanAudioViewItemToCurrentTime:currentTime];
return;
}
default: return;
}
}
- (OWSMessageGestureLocation)gestureLocationForLocation:(CGPoint)locationInMessageBubble
{
if (self.quotedMessageView) {

@ -11,7 +11,7 @@
NS_ASSUME_NONNULL_BEGIN
@interface OWSMessageCell ()
@interface OWSMessageCell () <UIGestureRecognizerDelegate>
// The nullable properties are created as needed.
// The non-nullable properties are so frequently used that it's easier
@ -78,6 +78,11 @@ NS_ASSUME_NONNULL_BEGIN
UILongPressGestureRecognizer *longPress =
[[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPressGesture:)];
[self.contentView addGestureRecognizer:longPress];
UIPanGestureRecognizer *pan =
[[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePanGesture:)];
pan.delegate = self;
[self.contentView addGestureRecognizer:pan];
}
- (void)dealloc
@ -488,6 +493,22 @@ NS_ASSUME_NONNULL_BEGIN
}
}
- (void)handlePanGesture:(UIPanGestureRecognizer *)sender
{
[self.messageBubbleView handlePanGesture:sender];
}
-(BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer
{
LKVoiceMessageView *voiceMessageView = self.viewItem.lastAudioMessageView;
if (![gestureRecognizer isKindOfClass:UIPanGestureRecognizer.class] || voiceMessageView == nil) { return NO; }
UIPanGestureRecognizer *panGestureRecognizer = (UIPanGestureRecognizer *)gestureRecognizer;
CGPoint location = [panGestureRecognizer locationInView:voiceMessageView];
if (!CGRectContainsPoint(voiceMessageView.bounds, location)) { return NO; }
CGPoint velocity = [panGestureRecognizer velocityInView:voiceMessageView];
return fabs(velocity.x) > fabs(velocity.y);
}
- (BOOL)isGestureInCellHeader:(UIGestureRecognizer *)sender
{
OWSAssertDebug(self.viewItem);

@ -2430,7 +2430,7 @@ typedef enum : NSUInteger {
NSFileManager *fileManager = [NSFileManager defaultManager];
if (![fileManager fileExistsAtPath:attachmentStream.originalFilePath]) {
OWSFailDebug(@"Missing video file: %@", attachmentStream.originalMediaURL);
OWSFailDebug(@"Missing audio file: %@", attachmentStream.originalMediaURL);
}
[self dismissKeyBoard];
@ -2452,6 +2452,12 @@ typedef enum : NSUInteger {
// Associate the player with this media adapter.
self.audioAttachmentPlayer.owner = viewItem;
[self.audioAttachmentPlayer play];
[self.audioAttachmentPlayer setCurrentTime:viewItem.audioProgressSeconds];
}
- (void)didPanAudioViewItemToCurrentTime:(NSTimeInterval)currentTime
{
[self.audioAttachmentPlayer setCurrentTime:currentTime];
}
- (void)didTapTruncatedTextMessage:(id<ConversationViewItem>)conversationItem
@ -5398,13 +5404,13 @@ typedef enum : NSUInteger {
}
dispatch_async(dispatch_get_main_queue(), ^{
__block TSInteraction *targetInteraction;
[LKStorage writeSyncWithBlock:^(YapDatabaseReadWriteTransaction *transaction) {
[LKStorage readWithBlock:^(YapDatabaseReadTransaction *transaction) {
[self.thread enumerateInteractionsWithTransaction:transaction usingBlock:^(TSInteraction *interaction, YapDatabaseReadTransaction *t) {
if (interaction.timestampForUI == timestamp.unsignedLongLongValue) {
targetInteraction = interaction;
}
}];
} error:nil];
}];
if (targetInteraction == nil || targetInteraction.interactionType != OWSInteractionType_OutgoingMessage) { return; }
NSString *hexEncodedPublicKey = targetInteraction.thread.contactIdentifier;
if (hexEncodedPublicKey == nil) { return; }

@ -24,7 +24,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType);
@class ContactShareViewModel;
@class ConversationViewCell;
@class DisplayableText;
@class OWSAudioMessageView;
@class LKVoiceMessageView;
@class OWSLinkPreview;
@class OWSQuotedReplyModel;
@class OWSUnreadIndicator;
@ -99,7 +99,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType);
#pragma mark - Audio Playback
@property (nonatomic, weak) OWSAudioMessageView *lastAudioMessageView;
@property (nonatomic, weak) LKVoiceMessageView *lastAudioMessageView;
@property (nonatomic, readonly) CGFloat audioDurationSeconds;
@property (nonatomic, readonly) CGFloat audioProgressSeconds;

@ -3,7 +3,6 @@
//
#import "ConversationViewItem.h"
#import "OWSAudioMessageView.h"
#import "OWSContactOffersCell.h"
#import "OWSMessageCell.h"
#import "OWSMessageHeaderView.h"
@ -476,7 +475,8 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType)
{
_audioPlaybackState = audioPlaybackState;
[self.lastAudioMessageView updateContents];
BOOL isPlaying = (audioPlaybackState == AudioPlaybackState_Playing);
[self.lastAudioMessageView setIsPlaying:isPlaying];
}
- (void)setAudioProgress:(CGFloat)progress duration:(CGFloat)duration
@ -485,7 +485,7 @@ NSString *NSStringForOWSMessageCellType(OWSMessageCellType cellType)
self.audioProgressSeconds = progress;
[self.lastAudioMessageView updateContents];
[self.lastAudioMessageView setProgress:progress / duration];
}
#pragma mark - Displayable Text

@ -669,6 +669,10 @@ class MessageDetailViewController: OWSViewController, MediaGalleryDataSourceDele
audioAttachmentPlayer.play()
}
func didPanAudioViewItem(toCurrentTime currentTime: TimeInterval) {
// TODO: Implement
}
func didTapTruncatedTextMessage(_ conversationItem: ConversationViewItem) {
guard let navigationController = self.navigationController else {
owsFailDebug("navigationController was unexpectedly nil")

@ -64,8 +64,8 @@ public final class Values : NSObject {
@objc public static let onboardingButtonBottomOffset = isIPhone5OrSmaller ? CGFloat(52) : CGFloat(72)
// MARK: - Animation Values
@objc public static let fakeChatStartDelay: TimeInterval = 1.5
@objc public static let fakeChatStartDelay: TimeInterval = 1
@objc public static let fakeChatAnimationDuration: TimeInterval = 0.4
@objc public static let fakeChatDelay: TimeInterval = 2
@objc public static let fakeChatDelay: TimeInterval = 1.5
@objc public static let fakeChatMessagePopAnimationStartScale: CGFloat = 0.6
}

@ -1157,43 +1157,13 @@ typedef void (^ProfileManagerFailureBlock)(NSError *error);
OWSLogVerbose(@"downloading profile avatar: %@", userProfile.uniqueId);
NSString *tempDirectory = OWSTemporaryDirectory();
NSString *tempFilePath = [tempDirectory stringByAppendingPathComponent:fileName];
NSString *profilePictureURL = userProfile.avatarUrlPath;
NSError *serializationError;
NSMutableURLRequest *request =
[self.avatarHTTPManager.requestSerializer requestWithMethod:@"GET"
URLString:profilePictureURL
parameters:nil
error:&serializationError];
if (serializationError) {
OWSFailDebug(@"serializationError: %@", serializationError);
return;
}
NSURLSession* session = [NSURLSession sharedSession];
NSURLSessionTask* downloadTask = [session downloadTaskWithRequest:request completionHandler:^(NSURL *location, NSURLResponse *response, NSError *error) {
[[LKFileServerAPI downloadAttachmentFrom:profilePictureURL].then(^(NSData *data) {
@synchronized(self.currentAvatarDownloads)
{
[self.currentAvatarDownloads removeObject:userProfile.recipientId];
}
if (error) {
OWSLogError(@"Dowload failed: %@", error);
return;
}
NSFileManager *fileManager = [NSFileManager defaultManager];
NSURL *tempFileUrl = [NSURL fileURLWithPath:tempFilePath];
NSError *moveError;
if (![fileManager moveItemAtURL:location toURL:tempFileUrl error:&moveError]) {
OWSLogError(@"MoveItemAtURL for avatar failed: %@", moveError);
return;
}
NSData *_Nullable encryptedData = (error ? nil : [NSData dataWithContentsOfFile:tempFilePath]);
NSData *_Nullable encryptedData = data;
NSData *_Nullable decryptedData = [self decryptProfileData:encryptedData profileKey:profileKeyAtStart];
UIImage *_Nullable image = nil;
if (decryptedData) {
@ -1213,19 +1183,12 @@ typedef void (^ProfileManagerFailureBlock)(NSError *error);
if (latestUserProfile.avatarUrlPath.length > 0) {
[self downloadAvatarForUserProfile:latestUserProfile];
}
} else if (error) {
if ([response isKindOfClass:NSHTTPURLResponse.class]
&& ((NSHTTPURLResponse *)response).statusCode == 403) {
OWSLogInfo(@"no avatar for: %@", userProfile.recipientId);
} else {
OWSLogError(@"avatar download for %@ failed with error: %@", userProfile.recipientId, error);
}
} else if (!encryptedData) {
OWSLogError(@"avatar encrypted data for %@ could not be read.", userProfile.recipientId);
} else if (!decryptedData) {
OWSLogError(@"avatar data for %@ could not be decrypted.", userProfile.recipientId);
} else if (!image) {
OWSLogError(@"avatar image for %@ could not be loaded with error: %@", userProfile.recipientId, error);
OWSLogError(@"avatar image for %@ could not be loaded.", userProfile.recipientId);
} else {
[self updateProfileAvatarCache:image filename:fileName];
@ -1248,9 +1211,7 @@ typedef void (^ProfileManagerFailureBlock)(NSError *error);
OWSAssertDebug(backgroundTask);
backgroundTask = nil;
}];
[downloadTask resume];
}) retainUntilComplete];
});
}

@ -46,6 +46,7 @@ typedef NS_ENUM(NSUInteger, OWSAudioBehavior) {
delegate:(id<OWSAudioPlayerDelegate>)delegate;
- (void)play;
- (void)setCurrentTime:(NSTimeInterval)currentTime;
- (void)pause;
- (void)stop;
- (void)togglePlayState;

@ -156,6 +156,11 @@ NS_ASSUME_NONNULL_BEGIN
[DeviceSleepManager.sharedInstance addBlockWithBlockObject:self];
}
- (void)setCurrentTime:(NSTimeInterval)currentTime
{
[self.audioPlayer setCurrentTime:currentTime];
}
- (void)pause
{
OWSAssertIsOnMainThread();

@ -252,7 +252,6 @@ message DataMessage {
INFO = 1; // groupPublicKey, name, senderKeys, members, admins
SENDER_KEY_REQUEST = 2; // groupPublicKey
SENDER_KEY = 3; // groupPublicKey, senderKeys
}
message SenderKey {

@ -101,6 +101,39 @@ public class DotNetAPI : NSObject {
}
// MARK: Public API
@objc(downloadAttachmentFrom:)
public static func objc_downloadAttachment(from url: String) -> AnyPromise {
return AnyPromise.from(downloadAttachment(from: url))
}
public static func downloadAttachment(from url: String) -> Promise<Data> {
var error: NSError?
var host = "https://\(URL(string: url)!.host!)"
let sanitizedURL: String
if FileServerAPI.fileStorageBucketURL.contains(host) {
sanitizedURL = url.replacingOccurrences(of: FileServerAPI.fileStorageBucketURL, with: "\(FileServerAPI.server)/loki/v1")
host = FileServerAPI.server
} else {
sanitizedURL = url.replacingOccurrences(of: host, with: "\(host)/loki/v1")
}
let request = AFHTTPRequestSerializer().request(withMethod: "GET", urlString: sanitizedURL, parameters: nil, error: &error)
if let error = error {
print("[Loki] Couldn't download attachment due to error: \(error).")
return Promise(error: error)
}
let serverPublicKeyPromise = FileServerAPI.server.contains(host) ? Promise.value(FileServerAPI.fileServerPublicKey)
: PublicChatAPI.getOpenGroupServerPublicKey(for: host)
return serverPublicKeyPromise.then2 { serverPublicKey in
return OnionRequestAPI.sendOnionRequest(request, to: host, using: serverPublicKey, isJSONRequired: false).map2 { json in
guard let body = json["body"] as? JSON, let data = body["data"] as? [UInt8] else {
print("[Loki] Couldn't parse attachment from: \(json).")
throw DotNetAPIError.parsingFailed
}
return Data(data)
}
}
}
@objc(uploadAttachment:withID:toServer:)
public static func objc_uploadAttachment(_ attachment: TSAttachmentStream, with attachmentID: String, to server: String) -> AnyPromise {
return AnyPromise.from(uploadAttachment(attachment, with: attachmentID, to: server))

@ -11,13 +11,15 @@ public final class FileServerAPI : DotNetAPI {
public static let maxFileSize = 10_000_000 // 10 MB
/// The file server has a file size limit of `maxFileSize`, which the Service Nodes try to enforce as well. However, the limit applied by the Service Nodes
/// is on the **HTTP request** and not the file size. Because of onion request encryption, a file that's about 8 MB will result in a request that's about 10 MB.
/// On average the multiplier appears to be about 1.25, so when checking whether the file will exceed the file size limit when uploading a file we just divide
/// the size of the file by this number. The alternative would be to actually check the size of the HTTP request but that's only possible after proof of work
/// has been calculated and the onion request encryption has happened, which takes several seconds.
public static let fileSizeORMultiplier: Double = 1.25
/// is on the **HTTP request** and not the actual file size. Because the file server expects the file data to be base 64 encoded, the size of the HTTP
/// request for a given file will be at least `ceil(n / 3) * 4` bytes, where n is the file size in bytes. This is the minimum size because there might also
/// be other parameters in the request. On average the multiplier appears to be about 1.5, so when checking whether the file will exceed the file size limit when
/// uploading a file we just divide the size of the file by this number. The alternative would be to actually check the size of the HTTP request but that's only
/// possible after proof of work has been calculated and the onion request encryption has happened, which takes several seconds.
public static let fileSizeORMultiplier: Double = 1.5
@objc public static let server = "https://file.getsession.org"
@objc public static let fileStorageBucketURL = "https://file-static.lokinet.org"
// MARK: Storage
override internal class var authTokenCollection: String { return "LokiStorageAuthTokenCollection" }

@ -48,7 +48,7 @@ extension OnionRequestAPI {
guard let snodeED25519PublicKey = snode.publicKeySet?.ed25519Key else { return seal.reject(Error.snodePublicKeySetMissing) }
parameters = [ "destination" : snodeED25519PublicKey ]
case .server(let host, _):
parameters = [ "host" : host, "target" : "/loki/v1/lsrpc", "method" : "POST" ]
parameters = [ "host" : host, "target" : "/loki/v2/lsrpc", "method" : "POST" ]
}
parameters["ephemeral_key"] = previousEncryptionResult.ephemeralPublicKey.toHexString()
let x25519PublicKey: String

@ -264,7 +264,68 @@ public enum OnionRequestAPI {
}.map2 { _ in (guardSnode, encryptionResult, targetSnodeSymmetricKey) }
}
private static func sendOnionRequest(with payload: JSON, to destination: Destination, isJSONRequired: Bool = true) -> Promise<JSON> {
// MARK: Internal API
/// Sends an onion request to `snode`. Builds new paths as needed.
internal static func sendOnionRequest(to snode: Snode, invoking method: Snode.Method, with parameters: JSON, associatedWith publicKey: String) -> Promise<JSON> {
let payload: JSON = [ "method" : method.rawValue, "params" : parameters ]
return sendOnionRequest(with: payload, to: Destination.snode(snode)).recover2 { error -> Promise<JSON> in
guard case OnionRequestAPI.Error.httpRequestFailedAtDestination(let statusCode, let json) = error else { throw error }
throw SnodeAPI.handleError(withStatusCode: statusCode, json: json, forSnode: snode, associatedWith: publicKey) ?? error
}
}
/// Sends an onion request to `server`. Builds new paths as needed.
internal static func sendOnionRequest(_ request: NSURLRequest, to server: String, using x25519PublicKey: String, isJSONRequired: Bool = true) -> Promise<JSON> {
let rawHeaders = request.allHTTPHeaderFields ?? [:]
var headers: JSON = rawHeaders.mapValues { value in
switch value.lowercased() {
case "true": return true
case "false": return false
default: return value
}
}
guard let url = request.url?.absoluteString, let host = request.url?.host else { return Promise(error: Error.invalidURL) }
var endpoint = ""
if server.count < url.count {
guard let serverEndIndex = url.range(of: server)?.upperBound else { return Promise(error: Error.invalidURL) }
let endpointStartIndex = url.index(after: serverEndIndex)
endpoint = String(url[endpointStartIndex..<url.endIndex])
}
let parametersAsString: String
if let tsRequest = request as? TSRequest {
headers["Content-Type"] = "application/json"
let tsRequestParameters = tsRequest.parameters
if !tsRequestParameters.isEmpty {
guard let parameters = try? JSONSerialization.data(withJSONObject: tsRequestParameters, options: [ .fragmentsAllowed ]) else {
return Promise(error: HTTP.Error.invalidJSON)
}
parametersAsString = String(bytes: parameters, encoding: .utf8) ?? "null"
} else {
parametersAsString = "null"
}
} else {
headers["Content-Type"] = request.allHTTPHeaderFields!["Content-Type"]
if let parametersAsInputStream = request.httpBodyStream, let parameters = try? Data(from: parametersAsInputStream) {
parametersAsString = "{ \"fileUpload\" : \"\(String(data: parameters.base64EncodedData(), encoding: .utf8) ?? "null")\" }"
} else {
parametersAsString = "null"
}
}
let payload: JSON = [
"body" : parametersAsString,
"endpoint": endpoint,
"method" : request.httpMethod,
"headers" : headers
]
let destination = Destination.server(host: host, x25519PublicKey: x25519PublicKey)
let promise = sendOnionRequest(with: payload, to: destination, isJSONRequired: isJSONRequired)
promise.catch2 { error in
print("[Loki] [Onion Request API] Couldn't reach server: \(url) due to error: \(error).")
}
return promise
}
internal static func sendOnionRequest(with payload: JSON, to destination: Destination, isJSONRequired: Bool = true) -> Promise<JSON> {
let (promise, seal) = Promise<JSON>.pending()
var guardSnode: Snode!
SnodeAPI.workQueue.async { // Avoid race conditions on `guardSnodes` and `paths`
@ -357,65 +418,4 @@ public enum OnionRequestAPI {
}
return promise
}
// MARK: Internal API
/// Sends an onion request to `snode`. Builds new paths as needed.
internal static func sendOnionRequest(to snode: Snode, invoking method: Snode.Method, with parameters: JSON, associatedWith publicKey: String) -> Promise<JSON> {
let payload: JSON = [ "method" : method.rawValue, "params" : parameters ]
return sendOnionRequest(with: payload, to: Destination.snode(snode)).recover2 { error -> Promise<JSON> in
guard case OnionRequestAPI.Error.httpRequestFailedAtDestination(let statusCode, let json) = error else { throw error }
throw SnodeAPI.handleError(withStatusCode: statusCode, json: json, forSnode: snode, associatedWith: publicKey) ?? error
}
}
/// Sends an onion request to `server`. Builds new paths as needed.
internal static func sendOnionRequest(_ request: NSURLRequest, to server: String, using x25519PublicKey: String, isJSONRequired: Bool = true) -> Promise<JSON> {
let rawHeaders = request.allHTTPHeaderFields ?? [:]
var headers: JSON = rawHeaders.mapValues { value in
switch value.lowercased() {
case "true": return true
case "false": return false
default: return value
}
}
guard let url = request.url?.absoluteString, let host = request.url?.host else { return Promise(error: Error.invalidURL) }
var endpoint = ""
if server.count < url.count {
guard let serverEndIndex = url.range(of: server)?.upperBound else { return Promise(error: Error.invalidURL) }
let endpointStartIndex = url.index(after: serverEndIndex)
endpoint = String(url[endpointStartIndex..<url.endIndex])
}
let parametersAsString: String
if let tsRequest = request as? TSRequest {
headers["Content-Type"] = "application/json"
let tsRequestParameters = tsRequest.parameters
if !tsRequestParameters.isEmpty {
guard let parameters = try? JSONSerialization.data(withJSONObject: tsRequestParameters, options: [ .fragmentsAllowed ]) else {
return Promise(error: HTTP.Error.invalidJSON)
}
parametersAsString = String(bytes: parameters, encoding: .utf8) ?? "null"
} else {
parametersAsString = "null"
}
} else {
headers["Content-Type"] = request.allHTTPHeaderFields!["Content-Type"]
if let parametersAsInputStream = request.httpBodyStream, let parameters = try? Data(from: parametersAsInputStream) {
parametersAsString = "{ \"fileUpload\" : \"\(String(data: parameters.base64EncodedData(), encoding: .utf8) ?? "null")\" }"
} else {
parametersAsString = "null"
}
}
let payload: JSON = [
"body" : parametersAsString,
"endpoint": endpoint,
"method" : request.httpMethod,
"headers" : headers
]
let destination = Destination.server(host: host, x25519PublicKey: x25519PublicKey)
let promise = sendOnionRequest(with: payload, to: destination, isJSONRequired: isJSONRequired)
promise.catch2 { error in
print("[Loki] [Onion Request API] Couldn't reach server: \(url) due to error: \(error).")
}
return promise
}
}

@ -388,27 +388,11 @@ public final class PublicChatAPI : DotNetAPI {
if oldProfilePictureURL != info.profilePictureURL || groupModel.groupImage == nil {
storage.setProfilePictureURL(info.profilePictureURL, forPublicChatWithID: publicChatID, in: transaction)
if let profilePictureURL = info.profilePictureURL {
let configuration = URLSessionConfiguration.default
let manager = AFURLSessionManager.init(sessionConfiguration: configuration)
let url = URL(string: "\(server)\(profilePictureURL)")!
let request = URLRequest(url: url)
let task = manager.downloadTask(with: request, progress: nil,
destination: { (targetPath: URL, response: URLResponse) -> URL in
let tempFilePath = URL(fileURLWithPath: OWSTemporaryDirectoryAccessibleAfterFirstAuth()).appendingPathComponent(UUID().uuidString)
return tempFilePath
},
completionHandler: { (response: URLResponse, filePath: URL?, error: Error?) in
if let error = error {
print("[Loki] Couldn't download profile picture for public chat channel with ID: \(channel) on server: \(server).")
return
}
if let filePath = filePath, let avatarData = try? Data.init(contentsOf: filePath) {
let attachmentStream = TSAttachmentStream(contentType: OWSMimeTypeImageJpeg, byteCount: UInt32(avatarData.count), sourceFilename: nil, caption: nil, albumMessageId: nil)
try! attachmentStream.write(avatarData)
groupThread.updateAvatar(with: attachmentStream)
}
})
task.resume()
FileServerAPI.downloadAttachment(from: "\(server)\(profilePictureURL)").map2 { data in
let attachmentStream = TSAttachmentStream(contentType: OWSMimeTypeImageJpeg, byteCount: UInt32(data.count), sourceFilename: nil, caption: nil, albumMessageId: nil)
try attachmentStream.write(data)
groupThread.updateAvatar(with: attachmentStream)
}
}
}
}

@ -89,7 +89,11 @@ public final class SnodeAPI : NSObject {
return Snode(address: "https://\(address)", port: UInt16(port), publicKeySet: Snode.KeySet(ed25519Key: ed25519PublicKey, x25519Key: x25519PublicKey))
})
// randomElement() uses the system's default random generator, which is cryptographically secure
return snodePool.randomElement()!
if !snodePool.isEmpty {
return snodePool.randomElement()!
} else {
throw SnodeAPIError.randomSnodePoolUpdatingFailed
}
}
}.done2 { snode in
seal.fulfill(snode)

@ -10,6 +10,7 @@ public final class ClosedGroupUtilities : NSObject {
@objc public static let invalidGroupPublicKey = SSKDecryptionError(domain: "SSKErrorDomain", code: 1, userInfo: [ NSLocalizedDescriptionKey : "Invalid group public key." ])
@objc public static let noData = SSKDecryptionError(domain: "SSKErrorDomain", code: 2, userInfo: [ NSLocalizedDescriptionKey : "Received an empty envelope." ])
@objc public static let noGroupPrivateKey = SSKDecryptionError(domain: "SSKErrorDomain", code: 3, userInfo: [ NSLocalizedDescriptionKey : "Missing group private key." ])
@objc public static let selfSend = SSKDecryptionError(domain: "SSKErrorDomain", code: 4, userInfo: [ NSLocalizedDescriptionKey : "Message addressed at self." ])
}
@objc(encryptData:usingGroupPublicKey:transaction:error:)
@ -59,6 +60,7 @@ public final class ClosedGroupUtilities : NSObject {
// 4. ) Parse the closed group ciphertext message
let closedGroupCiphertextMessage = ClosedGroupCiphertextMessage(_throws_with: closedGroupCiphertextMessageAsData)
let senderPublicKey = closedGroupCiphertextMessage.senderPublicKey.toHexString()
guard senderPublicKey != getUserHexEncodedPublicKey() else { throw SSKDecryptionError.selfSend }
// 5. ) Use the info inside the closed group ciphertext message to decrypt the actual message content
let plaintext = try SharedSenderKeysImplementation.shared.decrypt(closedGroupCiphertextMessage.ivAndCiphertext, forGroupWithPublicKey: groupPublicKey,
senderPublicKey: senderPublicKey, keyIndex: UInt(closedGroupCiphertextMessage.keyIndex), protocolContext: transaction)

@ -118,13 +118,25 @@ public final class ClosedGroupsProtocol : NSObject {
print("[Loki] Can't remove self and others simultaneously.")
return Promise(error: Error.invalidUpdate)
}
// Send the update to the group (don't include new ratchets as everyone should regenerate new ratchets individually)
let closedGroupUpdateMessageKind = ClosedGroupUpdateMessage.Kind.info(groupPublicKey: Data(hex: groupPublicKey), name: name, senderKeys: [],
members: membersAsData, admins: adminsAsData)
let closedGroupUpdateMessage = ClosedGroupUpdateMessage(thread: thread, kind: closedGroupUpdateMessageKind)
SSKEnvironment.shared.messageSender.send(closedGroupUpdateMessage, success: { seal.fulfill(()) }, failure: { seal.reject($0) })
// Establish sessions if needed
establishSessionsIfNeeded(with: [String](members), using: transaction)
// Send the update to the existing members using established channels (don't include new ratchets as everyone should regenerate new ratchets individually)
let promises: [Promise<Void>] = oldMembers.map { member in
let thread = TSContactThread.getOrCreateThread(withContactId: member, transaction: transaction)
thread.save(with: transaction)
let closedGroupUpdateMessageKind = ClosedGroupUpdateMessage.Kind.info(groupPublicKey: Data(hex: groupPublicKey), name: name, senderKeys: [],
members: membersAsData, admins: adminsAsData)
let closedGroupUpdateMessage = ClosedGroupUpdateMessage(thread: thread, kind: closedGroupUpdateMessageKind)
return SSKEnvironment.shared.messageSender.sendPromise(message: closedGroupUpdateMessage)
}
when(resolved: promises).done2 { _ in seal.fulfill(()) }.catch2 { seal.reject($0) }
promise.done {
try! Storage.writeSync { transaction in
let allOldRatchets = Storage.getAllClosedGroupRatchets(for: groupPublicKey)
for (senderPublicKey, oldRatchet) in allOldRatchets {
let collection = Storage.ClosedGroupRatchetCollectionType.old
Storage.setClosedGroupRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, ratchet: oldRatchet, in: collection, using: transaction)
}
// Delete all ratchets (it's important that this happens * after * sending out the update)
Storage.removeAllClosedGroupRatchets(for: groupPublicKey, using: transaction)
// Remove the group from the user's set of public keys to poll for if the user is leaving. Otherwise generate a new ratchet and
@ -134,8 +146,6 @@ public final class ClosedGroupsProtocol : NSObject {
// Notify the PN server
LokiPushNotificationManager.performOperation(.unsubscribe, for: groupPublicKey, publicKey: userPublicKey)
} else {
// Establish sessions if needed
establishSessionsIfNeeded(with: [String](members), using: transaction)
// Send closed group update messages to any new members using established channels
for member in newMembers {
let thread = TSContactThread.getOrCreateThread(withContactId: member, transaction: transaction)
@ -203,13 +213,13 @@ public final class ClosedGroupsProtocol : NSObject {
return promise
}
/// The returned promise is fulfilled when the message has been sent **to the group**. It doesn't wait for the user's new ratchet to be distributed.
/// The returned promise is fulfilled when the group update message has been sent. It doesn't wait for the user's new ratchet to be distributed.
@objc(leaveGroupWithPublicKey:transaction:)
public static func objc_leave(_ groupPublicKey: String, using transaction: YapDatabaseReadWriteTransaction) -> AnyPromise {
return AnyPromise.from(leave(groupPublicKey, using: transaction))
}
/// The returned promise is fulfilled when the message has been sent **to the group**. It doesn't wait for the user's new ratchet to be distributed.
/// The returned promise is fulfilled when the group update message has been sent. It doesn't wait for the user's new ratchet to be distributed.
public static func leave(_ groupPublicKey: String, using transaction: YapDatabaseReadWriteTransaction) -> Promise<Void> {
let userPublicKey = UserDefaults.standard[.masterHexEncodedPublicKey] ?? getUserHexEncodedPublicKey()
let groupID = LKGroupUtilities.getEncodedClosedGroupIDAsData(groupPublicKey)
@ -358,6 +368,11 @@ public final class ClosedGroupsProtocol : NSObject {
let userPublicKey = UserDefaults.standard[.masterHexEncodedPublicKey] ?? getUserHexEncodedPublicKey()
let wasUserRemoved = !members.contains(userPublicKey)
if Set(members).intersection(oldMembers) != Set(oldMembers) {
let allOldRatchets = Storage.getAllClosedGroupRatchets(for: groupPublicKey)
for (senderPublicKey, oldRatchet) in allOldRatchets {
let collection = Storage.ClosedGroupRatchetCollectionType.old
Storage.setClosedGroupRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, ratchet: oldRatchet, in: collection, using: transaction)
}
Storage.removeAllClosedGroupRatchets(for: groupPublicKey, using: transaction)
if wasUserRemoved {
Storage.removeClosedGroupPrivateKey(for: groupPublicKey, using: transaction)

@ -40,11 +40,13 @@ public final class SharedSenderKeysImplementation : NSObject {
public enum RatchetingError : LocalizedError {
case loadingFailed(groupPublicKey: String, senderPublicKey: String)
case messageKeyMissing(targetKeyIndex: UInt, groupPublicKey: String, senderPublicKey: String)
case generic
public var errorDescription: String? {
switch self {
case .loadingFailed(let groupPublicKey, let senderPublicKey): return "Couldn't get ratchet for closed group with public key: \(groupPublicKey), sender public key: \(senderPublicKey)."
case .messageKeyMissing(let targetKeyIndex, let groupPublicKey, let senderPublicKey): return "Couldn't find message key for old key index: \(targetKeyIndex), public key: \(groupPublicKey), sender public key: \(senderPublicKey)."
case .generic: return "An error occurred"
}
}
}
@ -66,7 +68,8 @@ public final class SharedSenderKeysImplementation : NSObject {
let nextMessageKey = try HMAC(key: Data(hex: ratchet.chainKey).bytes, variant: .sha256).authenticate([ UInt8(1) ])
let nextChainKey = try HMAC(key: Data(hex: ratchet.chainKey).bytes, variant: .sha256).authenticate([ UInt8(2) ])
let nextKeyIndex = ratchet.keyIndex + 1
return ClosedGroupRatchet(chainKey: nextChainKey.toHexString(), keyIndex: nextKeyIndex, messageKeys: [ nextMessageKey.toHexString() ])
let messageKeys = ratchet.messageKeys + [ nextMessageKey.toHexString() ]
return ClosedGroupRatchet(chainKey: nextChainKey.toHexString(), keyIndex: nextKeyIndex, messageKeys: messageKeys)
}
/// - Note: Sync. Don't call from the main thread.
@ -90,11 +93,12 @@ public final class SharedSenderKeysImplementation : NSObject {
}
/// - Note: Sync. Don't call from the main thread.
private func stepRatchet(for groupPublicKey: String, senderPublicKey: String, until targetKeyIndex: UInt, using transaction: YapDatabaseReadWriteTransaction) throws -> ClosedGroupRatchet {
private func stepRatchet(for groupPublicKey: String, senderPublicKey: String, until targetKeyIndex: UInt, using transaction: YapDatabaseReadWriteTransaction, isRetry: Bool = false) throws -> ClosedGroupRatchet {
#if DEBUG
assert(!Thread.isMainThread)
#endif
guard let ratchet = Storage.getClosedGroupRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey) else {
let collection: Storage.ClosedGroupRatchetCollectionType = (isRetry) ? .old : .current
guard let ratchet = Storage.getClosedGroupRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, from: collection) else {
let error = RatchetingError.loadingFailed(groupPublicKey: groupPublicKey, senderPublicKey: senderPublicKey)
print("[Loki] \(error.errorDescription!)")
throw error
@ -109,20 +113,18 @@ public final class SharedSenderKeysImplementation : NSObject {
return ratchet
} else {
var currentKeyIndex = ratchet.keyIndex
var current = ratchet
var messageKeys: [String] = []
var result = ratchet
while currentKeyIndex < targetKeyIndex {
do {
current = try step(current)
messageKeys += current.messageKeys
currentKeyIndex = current.keyIndex
result = try step(result)
currentKeyIndex = result.keyIndex
} catch {
print("[Loki] Couldn't step ratchet due to error: \(error).")
throw error
}
}
let result = ClosedGroupRatchet(chainKey: current.chainKey, keyIndex: current.keyIndex, messageKeys: messageKeys) // Includes any skipped message keys
Storage.setClosedGroupRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, ratchet: result, using: transaction)
let collection: Storage.ClosedGroupRatchetCollectionType = (isRetry) ? .old : .current
Storage.setClosedGroupRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, ratchet: result, in: collection, using: transaction)
return result
}
}
@ -161,30 +163,49 @@ public final class SharedSenderKeysImplementation : NSObject {
return try decrypt(ivAndCiphertext, for: groupPublicKey, senderPublicKey: senderPublicKey, keyIndex: keyIndex, using: transaction)
}
public func decrypt(_ ivAndCiphertext: Data, for groupPublicKey: String, senderPublicKey: String, keyIndex: UInt, using transaction: YapDatabaseReadWriteTransaction) throws -> Data {
public func decrypt(_ ivAndCiphertext: Data, for groupPublicKey: String, senderPublicKey: String, keyIndex: UInt, using transaction: YapDatabaseReadWriteTransaction, isRetry: Bool = false) throws -> Data {
let ratchet: ClosedGroupRatchet
do {
ratchet = try stepRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, until: keyIndex, using: transaction)
ratchet = try stepRatchet(for: groupPublicKey, senderPublicKey: senderPublicKey, until: keyIndex, using: transaction, isRetry: isRetry)
} catch {
// FIXME: It'd be cleaner to handle this in OWSMessageDecrypter (where all the other decryption errors are handled), but this was a lot more
// convenient because there's an easy way to get the sender public key from here.
if case RatchetingError.loadingFailed(_, _) = error {
ClosedGroupsProtocol.requestSenderKey(for: groupPublicKey, senderPublicKey: senderPublicKey, using: transaction)
if !isRetry {
return try decrypt(ivAndCiphertext, for: groupPublicKey, senderPublicKey: senderPublicKey, keyIndex: keyIndex, using: transaction, isRetry: true)
} else {
// FIXME: It'd be cleaner to handle this in OWSMessageDecrypter (where all the other decryption errors are handled), but this was a lot more
// convenient because there's an easy way to get the sender public key from here.
if case RatchetingError.loadingFailed(_, _) = error {
ClosedGroupsProtocol.requestSenderKey(for: groupPublicKey, senderPublicKey: senderPublicKey, using: transaction)
}
throw error
}
throw error
}
let iv = ivAndCiphertext[0..<Int(SharedSenderKeysImplementation.ivSize)]
let ciphertext = ivAndCiphertext[Int(SharedSenderKeysImplementation.ivSize)...]
let gcm = GCM(iv: iv.bytes, tagLength: Int(SharedSenderKeysImplementation.gcmTagSize), mode: .combined)
guard let messageKey = ratchet.messageKeys.last else {
let messageKeys = ratchet.messageKeys
let lastNMessageKeys: [String]
if messageKeys.count > 16 { // Pick an arbitrary number of message keys to try; this helps resolve issues caused by messages arriving out of order
lastNMessageKeys = [String](messageKeys[messageKeys.index(messageKeys.endIndex, offsetBy: -16)..<messageKeys.endIndex])
} else {
lastNMessageKeys = messageKeys
}
guard !lastNMessageKeys.isEmpty else {
throw RatchetingError.messageKeyMissing(targetKeyIndex: keyIndex, groupPublicKey: groupPublicKey, senderPublicKey: senderPublicKey)
}
let aes = try AES(key: Data(hex: messageKey).bytes, blockMode: gcm, padding: .noPadding)
do {
return Data(try aes.decrypt(ciphertext.bytes))
} catch {
var error: Error?
for messageKey in lastNMessageKeys.reversed() { // Reversed because most likely the last one is the one we need
let aes = try AES(key: Data(hex: messageKey).bytes, blockMode: gcm, padding: .noPadding)
do {
return Data(try aes.decrypt(ciphertext.bytes))
} catch (let e) {
error = e
}
}
if !isRetry {
return try decrypt(ivAndCiphertext, for: groupPublicKey, senderPublicKey: senderPublicKey, keyIndex: keyIndex, using: transaction, isRetry: true)
} else {
ClosedGroupsProtocol.requestSenderKey(for: groupPublicKey, senderPublicKey: senderPublicKey, using: transaction)
throw error
throw error ?? RatchetingError.generic
}
}

@ -1,13 +1,20 @@
public extension Storage {
internal enum ClosedGroupRatchetCollectionType {
case old, current
}
// MARK: Ratchets
internal static func getClosedGroupRatchetCollection(for groupPublicKey: String) -> String {
return "LokiClosedGroupRatchetCollection.\(groupPublicKey)"
internal static func getClosedGroupRatchetCollection(_ collection: ClosedGroupRatchetCollectionType, for groupPublicKey: String) -> String {
switch collection {
case .old: return "LokiOldClosedGroupRatchetCollection.\(groupPublicKey)"
case .current: return "LokiClosedGroupRatchetCollection.\(groupPublicKey)"
}
}
internal static func getClosedGroupRatchet(for groupPublicKey: String, senderPublicKey: String) -> ClosedGroupRatchet? {
let collection = getClosedGroupRatchetCollection(for: groupPublicKey)
internal static func getClosedGroupRatchet(for groupPublicKey: String, senderPublicKey: String, from collection: ClosedGroupRatchetCollectionType = .current) -> ClosedGroupRatchet? {
let collection = getClosedGroupRatchetCollection(collection, for: groupPublicKey)
var result: ClosedGroupRatchet?
read { transaction in
result = transaction.object(forKey: senderPublicKey, inCollection: collection) as? ClosedGroupRatchet
@ -15,26 +22,31 @@ public extension Storage {
return result
}
internal static func setClosedGroupRatchet(for groupPublicKey: String, senderPublicKey: String, ratchet: ClosedGroupRatchet, using transaction: YapDatabaseReadWriteTransaction) {
let collection = getClosedGroupRatchetCollection(for: groupPublicKey)
internal static func setClosedGroupRatchet(for groupPublicKey: String, senderPublicKey: String, ratchet: ClosedGroupRatchet, in collection: ClosedGroupRatchetCollectionType = .current, using transaction: YapDatabaseReadWriteTransaction) {
let collection = getClosedGroupRatchetCollection(collection, for: groupPublicKey)
transaction.setObject(ratchet, forKey: senderPublicKey, inCollection: collection)
}
internal static func getAllClosedGroupSenderKeys(for groupPublicKey: String) -> Set<ClosedGroupSenderKey> {
let collection = getClosedGroupRatchetCollection(for: groupPublicKey)
var result: Set<ClosedGroupSenderKey> = []
internal static func getAllClosedGroupRatchets(for groupPublicKey: String, from collection: ClosedGroupRatchetCollectionType = .current) -> [(senderPublicKey: String, ratchet: ClosedGroupRatchet)] {
let collection = getClosedGroupRatchetCollection(collection, for: groupPublicKey)
var result: [(senderPublicKey: String, ratchet: ClosedGroupRatchet)] = []
read { transaction in
transaction.enumerateRows(inCollection: collection) { key, object, _, _ in
guard let publicKey = key as? String, let ratchet = object as? ClosedGroupRatchet else { return }
let senderKey = ClosedGroupSenderKey(chainKey: Data(hex: ratchet.chainKey), keyIndex: ratchet.keyIndex, publicKey: Data(hex: publicKey))
result.insert(senderKey)
guard let senderPublicKey = key as? String, let ratchet = object as? ClosedGroupRatchet else { return }
result.append((senderPublicKey: senderPublicKey, ratchet: ratchet))
}
}
return result
}
internal static func removeAllClosedGroupRatchets(for groupPublicKey: String, using transaction: YapDatabaseReadWriteTransaction) {
let collection = getClosedGroupRatchetCollection(for: groupPublicKey)
internal static func getAllClosedGroupSenderKeys(for groupPublicKey: String, from collection: ClosedGroupRatchetCollectionType = .current) -> Set<ClosedGroupSenderKey> {
return Set(getAllClosedGroupRatchets(for: groupPublicKey, from: collection).map { senderPublicKey, ratchet in
ClosedGroupSenderKey(chainKey: Data(hex: ratchet.chainKey), keyIndex: ratchet.keyIndex, publicKey: Data(hex: senderPublicKey))
})
}
internal static func removeAllClosedGroupRatchets(for groupPublicKey: String, from collection: ClosedGroupRatchetCollectionType = .current, using transaction: YapDatabaseReadWriteTransaction) {
let collection = getClosedGroupRatchetCollection(collection, for: groupPublicKey)
transaction.removeAllObjects(inCollection: collection)
}
}

@ -500,12 +500,6 @@ typedef void (^AttachmentDownloadFailure)(NSError *error);
OWSAssertDebug(job);
TSAttachmentPointer *attachmentPointer = job.attachmentPointer;
AFHTTPSessionManager *manager = [AFHTTPSessionManager manager];
manager.requestSerializer = [AFHTTPRequestSerializer serializer];
[manager.requestSerializer setValue:OWSMimeTypeApplicationOctetStream forHTTPHeaderField:@"Content-Type"];
manager.responseSerializer = [AFHTTPResponseSerializer serializer];
manager.completionQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
// We want to avoid large downloads from a compromised or buggy service.
const long kMaxDownloadSize = 10 * 1024 * 1024;
__block BOOL hasCheckedContentLength = NO;
@ -513,7 +507,6 @@ typedef void (^AttachmentDownloadFailure)(NSError *error);
NSString *tempFilePath =
[OWSTemporaryDirectoryAccessibleAfterFirstAuth() stringByAppendingPathComponent:[NSUUID UUID].UUIDString];
NSURL *tempFileURL = [NSURL fileURLWithPath:tempFilePath];
__block NSURLSessionDownloadTask *task;
void (^failureHandler)(NSError *) = ^(NSError *error) {
OWSLogError(@"Failed to download attachment with error: %@", error.description);
@ -524,125 +517,27 @@ typedef void (^AttachmentDownloadFailure)(NSError *error);
failureHandlerParam(task, error);
};
[[LKFileServerAPI downloadAttachmentFrom:location].then(^(NSData *data) {
BOOL success = [data writeToFile:tempFilePath atomically:YES];
if (success) {
successHandler(tempFilePath);
}
NSString *method = @"GET";
NSError *serializationError = nil;
NSMutableURLRequest *request = [manager.requestSerializer requestWithMethod:method
URLString:location
parameters:nil
error:&serializationError];
if (serializationError) {
return failureHandler(serializationError);
}
task = [manager downloadTaskWithRequest:request
progress:^(NSProgress *progress) {
OWSAssertDebug(progress != nil);
// Don't do anything until we've received at least one byte of data.
if (progress.completedUnitCount < 1) {
return;
}
void (^abortDownload)(void) = ^{
OWSFailDebug(@"Download aborted.");
[task cancel];
};
if (progress.totalUnitCount > kMaxDownloadSize || progress.completedUnitCount > kMaxDownloadSize) {
// A malicious service might send a misleading content length header,
// so....
//
// If the current downloaded bytes or the expected total byes
// exceed the max download size, abort the download.
OWSLogError(@"Attachment download exceed expected content length: %lld, %lld.",
(long long)progress.totalUnitCount,
(long long)progress.completedUnitCount);
abortDownload();
return;
}
job.progress = progress.fractionCompleted;
[self fireProgressNotification:MAX(kAttachmentDownloadProgressTheta, progress.fractionCompleted)
attachmentId:attachmentPointer.uniqueId];
// We only need to check the content length header once.
if (hasCheckedContentLength) {
return;
}
// Once we've received some bytes of the download, check the content length
// header for the download.
//
// If the task doesn't exist, or doesn't have a response, or is missing
// the expected headers, or has an invalid or oversize content length, etc.,
// abort the download.
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)task.response;
if (![httpResponse isKindOfClass:[NSHTTPURLResponse class]]) {
OWSLogError(@"Attachment download has missing or invalid response.");
abortDownload();
return;
}
NSDictionary *headers = [httpResponse allHeaderFields];
if (![headers isKindOfClass:[NSDictionary class]]) {
OWSLogError(@"Attachment download invalid headers.");
abortDownload();
return;
}
NSString *contentLength = headers[@"Content-Length"];
if (![contentLength isKindOfClass:[NSString class]]) {
OWSLogError(@"Attachment download missing or invalid content length.");
abortDownload();
return;
}
if (contentLength.longLongValue > kMaxDownloadSize) {
OWSLogError(@"Attachment download content length exceeds max download size.");
abortDownload();
return;
}
// This response has a valid content length that is less
// than our max download size. Proceed with the download.
hasCheckedContentLength = YES;
NSNumber *_Nullable fileSize = [OWSFileSystem fileSizeOfPath:tempFilePath];
if (!fileSize) {
OWSLogError(@"Could not determine attachment file size.");
NSError *error = OWSErrorWithCodeDescription(
OWSErrorCodeInvalidMessage, NSLocalizedString(@"ERROR_MESSAGE_INVALID_MESSAGE", @""));
return failureHandler(error);
}
destination:^(NSURL *targetPath, NSURLResponse *response) {
return tempFileURL;
if (fileSize.unsignedIntegerValue > kMaxDownloadSize) {
OWSLogError(@"Attachment download length exceeds max size.");
NSError *error = OWSErrorWithCodeDescription(
OWSErrorCodeInvalidMessage, NSLocalizedString(@"ERROR_MESSAGE_INVALID_MESSAGE", @""));
return failureHandler(error);
}
completionHandler:^(NSURLResponse *response, NSURL *_Nullable filePath, NSError *_Nullable error) {
if (error) {
failureHandler(error);
return;
}
if (![tempFileURL isEqual:filePath]) {
OWSLogError(@"Unexpected temp file path.");
NSError *error = OWSErrorWithCodeDescription(
OWSErrorCodeInvalidMessage, NSLocalizedString(@"ERROR_MESSAGE_INVALID_MESSAGE", @""));
return failureHandler(error);
}
NSNumber *_Nullable fileSize = [OWSFileSystem fileSizeOfPath:tempFilePath];
if (!fileSize) {
OWSLogError(@"Could not determine attachment file size.");
NSError *error = OWSErrorWithCodeDescription(
OWSErrorCodeInvalidMessage, NSLocalizedString(@"ERROR_MESSAGE_INVALID_MESSAGE", @""));
return failureHandler(error);
}
if (fileSize.unsignedIntegerValue > kMaxDownloadSize) {
OWSLogError(@"Attachment download length exceeds max size.");
NSError *error = OWSErrorWithCodeDescription(
OWSErrorCodeInvalidMessage, NSLocalizedString(@"ERROR_MESSAGE_INVALID_MESSAGE", @""));
return failureHandler(error);
}
successHandler(tempFilePath);
}];
[task resume];
}) retainUntilComplete];
}
- (void)fireProgressNotification:(CGFloat)progress attachmentId:(NSString *)attachmentId

Loading…
Cancel
Save