Audio farts.

slight change

modified pbxproj to clean up resources

reset developmentteam in pbxproj back to what it was before

deleted one line
pull/1/head
Joyce Yan 10 years ago committed by Frederic Jacobs
parent ccdc4b5d17
commit b494b71dbc

@ -287,6 +287,10 @@
AA0C8E498E2046B0B81EEE6E /* libPods.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8313AE91B4954215858A5662 /* libPods.a */; };
AD41D7B51A6F6F0600241130 /* play_button.png in Resources */ = {isa = PBXBuildFile; fileRef = AD41D7B31A6F6F0600241130 /* play_button.png */; };
AD41D7B61A6F6F0600241130 /* play_button@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = AD41D7B41A6F6F0600241130 /* play_button@2x.png */; };
B10C9B5F1A7049EC00ECA2BF /* pause_icon.png in Resources */ = {isa = PBXBuildFile; fileRef = B10C9B5B1A7049EC00ECA2BF /* pause_icon.png */; };
B10C9B601A7049EC00ECA2BF /* pause_icon@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B10C9B5C1A7049EC00ECA2BF /* pause_icon@2x.png */; };
B10C9B611A7049EC00ECA2BF /* play_icon.png in Resources */ = {isa = PBXBuildFile; fileRef = B10C9B5D1A7049EC00ECA2BF /* play_icon.png */; };
B10C9B621A7049EC00ECA2BF /* play_icon@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B10C9B5E1A7049EC00ECA2BF /* play_icon@2x.png */; };
B6019E971A2492AB001118DF /* NSDate+millisecondTimeStamp.mm in Sources */ = {isa = PBXBuildFile; fileRef = B6019E961A2492AB001118DF /* NSDate+millisecondTimeStamp.mm */; };
B60C16651988999D00E97A6C /* VersionMigrations.m in Sources */ = {isa = PBXBuildFile; fileRef = B60C16641988999D00E97A6C /* VersionMigrations.m */; };
B60EDE041A05A01700D73516 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B60EDE031A05A01700D73516 /* AudioToolbox.framework */; };
@ -881,6 +885,10 @@
A5E9D4BA1A65FAD800E4481C /* TSVideoAttachmentAdapter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TSVideoAttachmentAdapter.h; sourceTree = "<group>"; };
AD41D7B31A6F6F0600241130 /* play_button.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = play_button.png; sourceTree = "<group>"; };
AD41D7B41A6F6F0600241130 /* play_button@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "play_button@2x.png"; sourceTree = "<group>"; };
B10C9B5B1A7049EC00ECA2BF /* pause_icon.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = pause_icon.png; sourceTree = "<group>"; };
B10C9B5C1A7049EC00ECA2BF /* pause_icon@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "pause_icon@2x.png"; sourceTree = "<group>"; };
B10C9B5D1A7049EC00ECA2BF /* play_icon.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = play_icon.png; sourceTree = "<group>"; };
B10C9B5E1A7049EC00ECA2BF /* play_icon@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "play_icon@2x.png"; sourceTree = "<group>"; };
B6019E951A2492AB001118DF /* NSDate+millisecondTimeStamp.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "NSDate+millisecondTimeStamp.h"; sourceTree = "<group>"; };
B6019E961A2492AB001118DF /* NSDate+millisecondTimeStamp.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = "NSDate+millisecondTimeStamp.mm"; sourceTree = "<group>"; };
B60C16631988999D00E97A6C /* VersionMigrations.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VersionMigrations.h; sourceTree = "<group>"; };
@ -2128,6 +2136,10 @@
B633C4FD1A1D190B0059AC12 /* Images */ = {
isa = PBXGroup;
children = (
B10C9B5B1A7049EC00ECA2BF /* pause_icon.png */,
B10C9B5C1A7049EC00ECA2BF /* pause_icon@2x.png */,
B10C9B5D1A7049EC00ECA2BF /* play_icon.png */,
B10C9B5E1A7049EC00ECA2BF /* play_icon@2x.png */,
AD41D7B31A6F6F0600241130 /* play_button.png */,
AD41D7B41A6F6F0600241130 /* play_button@2x.png */,
FC3BD9851A30A62D005B96BB /* twitter@2x.png */,
@ -2917,6 +2929,7 @@
FC5CDF391A3393DD00B47253 /* error_white@2x.png in Resources */,
B633C5851A1D190B0059AC12 /* blue-archive@2x.png in Resources */,
B633C5D21A1D190B0059AC12 /* savephoto@2x.png in Resources */,
B10C9B611A7049EC00ECA2BF /* play_icon.png in Resources */,
B633C5921A1D190B0059AC12 /* contacts_tab@2x.png in Resources */,
B6416FB8199A0478003C5699 /* Localizable.strings in Resources */,
FCB626A51A3B00FA00FDB504 /* info@2x.png in Resources */,
@ -2945,12 +2958,14 @@
FCA52B071A2BBAE400CCADFA /* call_tab@2x.png in Resources */,
B633C58D1A1D190B0059AC12 /* contact_default_feed.png in Resources */,
FC3BD97C1A2CD385005B96BB /* signal_dotted@2x.png in Resources */,
B10C9B621A7049EC00ECA2BF /* play_icon@2x.png in Resources */,
B633C5CD1A1D190B0059AC12 /* photo@2x.png in Resources */,
A507A3B01A6C60E300BEED0D /* ContactTableViewCell.xib in Resources */,
B633C5861A1D190B0059AC12 /* call@2x.png in Resources */,
B67EBF5D19194AC60084CCFD /* Settings.bundle in Resources */,
E1370BE418A0686C00826894 /* outring.mp3 in Resources */,
B633C5841A1D190B0059AC12 /* backspace@2x.png in Resources */,
B10C9B601A7049EC00ECA2BF /* pause_icon@2x.png in Resources */,
B6C6AE551A305ED1006BAF8F /* redphone.cer in Resources */,
B633C5B71A1D190B0059AC12 /* logo_intro@2x.png in Resources */,
E1370BE518A0686C00826894 /* r.caf in Resources */,
@ -2962,6 +2977,7 @@
B633C58F1A1D190B0059AC12 /* contacts@2x.png in Resources */,
E1370BE618A0686C00826894 /* sonarping.mp3 in Resources */,
B633C5961A1D190B0059AC12 /* DefaultContactImage.png in Resources */,
B10C9B5F1A7049EC00ECA2BF /* pause_icon.png in Resources */,
E148751218A06AFD002CC4F3 /* HelveticaNeueLTStd-Bd.otf in Resources */,
FCA52AE61A2B676C00CCADFA /* call_canceled@2x.png in Resources */,
FC3BD9861A30A62D005B96BB /* twitter@2x.png in Resources */,

Binary file not shown.

After

Width:  |  Height:  |  Size: 941 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 955 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

@ -21,7 +21,7 @@
- (BOOL)isImage;
- (BOOL)isVideo;
-(NSURL*)videoURL;
-(NSURL*)mediaURL;
+ (void)deleteAttachments;
@end

@ -62,7 +62,7 @@ NSString * const TSAttachementFileRelationshipEdge = @"TSAttachementFileEdge";
}
}
-(NSURL*) videoURL {
-(NSURL*) mediaURL {
return [NSURL fileURLWithPath:[self filePath]];
}

@ -48,7 +48,7 @@ dispatch_queue_t attachmentsQueue() {
for (PushMessageContentAttachmentPointer *pointer in attachmentsToRetrieve) {
TSAttachmentPointer *attachmentPointer = (content.group != nil && (content.group.type == PushMessageContentGroupContextTypeUpdate)) ? [[TSAttachmentPointer alloc] initWithIdentifier:pointer.id key:pointer.key contentType:pointer.contentType relay:message.relay avatarOfGroupId:content.group.id] : [[TSAttachmentPointer alloc] initWithIdentifier:pointer.id key:pointer.key contentType:pointer.contentType relay:message.relay];
if ([attachmentPointer.contentType hasPrefix:@"image/"]||[attachmentPointer.contentType hasPrefix:@"video/"]) {
if ([attachmentPointer.contentType hasPrefix:@"image/"]||[attachmentPointer.contentType hasPrefix:@"video/"] || [attachmentPointer.contentType hasPrefix:@"audio/"]) {
[attachmentPointer saveWithTransaction:transaction];
dispatch_async(attachmentsQueue(), ^{

@ -85,6 +85,7 @@ typedef enum : NSUInteger {
@property (nonatomic, retain) JSQMessagesBubbleImage *outgoingBubbleImageData;
@property (nonatomic, retain) JSQMessagesBubbleImage *incomingBubbleImageData;
@property (nonatomic, retain) JSQMessagesBubbleImage *outgoingMessageFailedImageData;
@property (nonatomic, strong) NSTimer *audioPlayerPoller;
@property (nonatomic, retain) NSTimer *readTimer;
@ -105,7 +106,7 @@ typedef enum : NSUInteger {
- (void)setupWithTSGroup:(TSGroupModel*)model {
[self.editingDatabaseConnection readWriteWithBlock:^(YapDatabaseReadWriteTransaction *transaction) {
self.thread = [TSGroupThread getOrCreateThreadWithGroupModel:model transaction:transaction];
TSOutgoingMessage *message = [[TSOutgoingMessage alloc] initWithTimestamp:[NSDate ows_millisecondTimeStamp] inThread:self.thread messageBody:@"" attachments:[[NSMutableArray alloc] init]];
message.groupMetaMessage = TSGroupMessageNew;
if(model.groupImage!=nil) {
@ -131,30 +132,32 @@ typedef enum : NSUInteger {
}
}
- (void)viewDidLoad {
[super viewDidLoad];
[super viewDidLoad];
[self.navigationController.navigationBar setTranslucent:NO];
[super viewDidLoad];
[self markAllMessagesAsRead];
[self initializeBubbles];
[self initializeTextView];
self.messageMappings = [[YapDatabaseViewMappings alloc] initWithGroups:@[self.thread.uniqueId]
view:TSMessageDatabaseViewExtensionName];
self.page = 0;
[self updateRangeOptionsForPage:self.page];
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
[self.messageMappings updateWithTransaction:transaction];
}];
[self initializeToolbars];
[self initializeCollectionViewLayout];
self.senderId = ME_MESSAGE_IDENTIFIER
self.senderDisplayName = ME_MESSAGE_IDENTIFIER
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(startReadTimer)
name:UIApplicationWillEnterForegroundNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(cancelReadTimer)
@ -170,9 +173,9 @@ typedef enum : NSUInteger {
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
NSInteger numberOfMessages = (NSInteger)[self.messageMappings numberOfItemsInGroup:self.thread.uniqueId];
if (numberOfMessages > 0) {
NSIndexPath * lastCellIndexPath = [NSIndexPath indexPathForRow:numberOfMessages-1 inSection:0];
[self.collectionView scrollToItemAtIndexPath:lastCellIndexPath atScrollPosition:UICollectionViewScrollPositionBottom animated:NO];
@ -218,7 +221,7 @@ typedef enum : NSUInteger {
}
}
}
[self cancelReadTimer];
}
@ -230,11 +233,11 @@ typedef enum : NSUInteger {
- (IBAction)didSelectShow:(id)sender {
UIBarButtonItem *spaceEdge = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFixedSpace target:nil action:nil];
spaceEdge.width = 40;
UIBarButtonItem *spaceMiddleIcons = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFixedSpace target:nil action:nil];
spaceMiddleIcons.width = 61;
@ -242,12 +245,12 @@ typedef enum : NSUInteger {
if (!isGroupConversation) {
//UIBarButtonItem* contactAddOrLaunch = [[UIBarButtonItem alloc] initWithImage:[[UIImage imageNamed:@"contact-add@1x"] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal] style:UIBarButtonItemStylePlain target:self action:nil];
UIBarButtonItem* contactSecurity = [[UIBarButtonItem alloc]initWithImage:[[UIImage imageNamed:@"contact-security@1x"] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal] style:UIBarButtonItemStylePlain target:self action:@selector(showFingerprint)];
if ([self isRedPhoneReachable] && ![((TSContactThread*)_thread).contactIdentifier isEqualToString:[SignalKeyingStorage.localNumber toE164]]) {
UIBarButtonItem * callButton = [[UIBarButtonItem alloc] initWithImage:[[UIImage imageNamed:@"contact-call@1x"] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal] style:UIBarButtonItemStylePlain target:self action:@selector(callAction)];
self.navController.dropDownToolbar.items = @[spaceEdge, callButton,spaceMiddleWords, contactSecurity, spaceEdge];
@ -259,9 +262,9 @@ typedef enum : NSUInteger {
else {
UIBarButtonItem *groupUpdateButton = [[UIBarButtonItem alloc] initWithTitle:@"Update" style:UIBarButtonItemStylePlain target:self action:@selector(updateGroup)];
UIBarButtonItem *groupLeaveButton = [[UIBarButtonItem alloc] initWithTitle:@"Leave" style:UIBarButtonItemStylePlain target:self action:@selector(leaveGroup)];
UIBarButtonItem *showGroupMembersButton = [[UIBarButtonItem alloc] initWithTitle:@"Members" style:UIBarButtonItemStylePlain target:self action:@selector(showGroupMembers)];
self.navController.dropDownToolbar.items =@[spaceEdge, groupUpdateButton, spaceMiddleWords, groupLeaveButton, spaceMiddleWords, showGroupMembersButton, spaceEdge];
}
for(UIButton *button in self.navController.dropDownToolbar.items) {
@ -285,7 +288,7 @@ typedef enum : NSUInteger {
}
-(void)initializeToolbars {
self.navController = (APNavigationController*)self.navigationController;
//self.navController.activeBarButtonTitle = @"Hide";
[self setNavigationTitle];
@ -295,23 +298,22 @@ typedef enum : NSUInteger {
-(void)initializeBubbles
{
JSQMessagesBubbleImageFactory *bubbleFactory = [[JSQMessagesBubbleImageFactory alloc] init];
self.outgoingBubbleImageData = [bubbleFactory outgoingMessagesBubbleImageWithColor:[UIColor ows_materialBlueColor]];
self.incomingBubbleImageData = [bubbleFactory incomingMessagesBubbleImageWithColor:[UIColor jsq_messageBubbleLightGrayColor]];
self.outgoingMessageFailedImageData = [bubbleFactory outgoingMessageFailedBubbleImageWithColor:[UIColor ows_fadedBlueColor]];
}
-(void)initializeCollectionViewLayout
{
if (self.collectionView){
[self.collectionView.collectionViewLayout setMessageBubbleFont:[UIFont ows_regularFontWithSize:15.0f]];
self.collectionView.showsVerticalScrollIndicator = NO;
self.collectionView.showsHorizontalScrollIndicator = NO;
[self updateLoadEarlierVisible];
self.collectionView.collectionViewLayout.incomingAvatarViewSize = CGSizeZero;
self.collectionView.collectionViewLayout.outgoingAvatarViewSize = CGSizeZero;
}
@ -350,7 +352,7 @@ typedef enum : NSUInteger {
if ([self isRedPhoneReachable]) {
PhoneNumber *number = [self phoneNumberForThread];
Contact *contact = [[Environment.getCurrent contactsManager] latestContactForPhoneNumber:number];
[Environment.phoneManager initiateOutgoingCallToContact:contact atRemoteNumber:number];
} else {
DDLogWarn(@"Tried to initiate a call but contact has no RedPhone identifier");
@ -367,9 +369,9 @@ typedef enum : NSUInteger {
{
if (text.length > 0) {
[JSQSystemSoundPlayer jsq_playMessageSentSound];
TSOutgoingMessage *message = [[TSOutgoingMessage alloc] initWithTimestamp:[NSDate ows_millisecondTimeStamp] inThread:self.thread messageBody:text attachments:nil];
[[TSMessagesManager sharedManager] sendMessage:message inThread:self.thread];
[self finishSendingMessage];
}
@ -386,14 +388,14 @@ typedef enum : NSUInteger {
- (id<JSQMessageBubbleImageDataSource>)collectionView:(JSQMessagesCollectionView *)collectionView messageBubbleImageDataForItemAtIndexPath:(NSIndexPath *)indexPath
{
id<JSQMessageData> message = [self messageAtIndexPath:indexPath];
if ([message.senderId isEqualToString:self.senderId]) {
if (message.messageState == TSOutgoingMessageStateUnsent || message.messageState == TSOutgoingMessageStateAttemptingOut) {
return self.outgoingMessageFailedImageData;
}
return self.outgoingBubbleImageData;
}
return self.incomingBubbleImageData;
}
@ -407,7 +409,7 @@ typedef enum : NSUInteger {
- (UICollectionViewCell *)collectionView:(JSQMessagesCollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath
{
TSMessageAdapter * msg = [self messageAtIndexPath:indexPath];
switch (msg.messageType) {
case TSIncomingMessageAdapter:
return [self loadIncomingMessageCellForMessage:msg atIndexPath:indexPath];
@ -419,7 +421,7 @@ typedef enum : NSUInteger {
return [self loadInfoMessageCellForMessage:msg atIndexPath:indexPath];
case TSErrorMessageAdapter:
return [self loadErrorMessageCellForMessage:msg atIndexPath:indexPath];
default:
NSLog(@"Something went wrong");
return nil;
@ -437,7 +439,7 @@ typedef enum : NSUInteger {
cell.textView.linkTextAttributes = @{ NSForegroundColorAttributeName : cell.textView.textColor,
NSUnderlineStyleAttributeName : @(NSUnderlineStyleSingle | NSUnderlinePatternSolid) };
}
return cell;
}
@ -451,7 +453,7 @@ typedef enum : NSUInteger {
cell.textView.linkTextAttributes = @{ NSForegroundColorAttributeName : cell.textView.textColor,
NSUnderlineStyleAttributeName : @(NSUnderlineStyleSingle | NSUnderlinePatternSolid) };
}
return cell;
}
@ -481,7 +483,7 @@ typedef enum : NSUInteger {
if ([self showDateAtIndexPath:indexPath]) {
return kJSQMessagesCollectionViewCellLabelHeightDefault;
}
return 0.0f;
}
@ -493,9 +495,9 @@ typedef enum : NSUInteger {
}
else {
TSMessageAdapter *currentMessage = [self messageAtIndexPath:indexPath];
TSMessageAdapter *previousMessage = [self messageAtIndexPath:[NSIndexPath indexPathForItem:indexPath.row-1 inSection:indexPath.section]];
NSTimeInterval timeDifference = [currentMessage.date timeIntervalSinceDate:previousMessage.date];
if (timeDifference > kTSMessageSentDateShowTimeInterval) {
showDate = YES;
@ -506,19 +508,19 @@ typedef enum : NSUInteger {
-(NSAttributedString*)collectionView:(JSQMessagesCollectionView *)collectionView attributedTextForCellTopLabelAtIndexPath:(NSIndexPath *)indexPath
{
if ([self showDateAtIndexPath:indexPath]) {
TSMessageAdapter *currentMessage = [self messageAtIndexPath:indexPath];
return [[JSQMessagesTimestampFormatter sharedFormatter] attributedTimestampForDate:currentMessage.date];
}
return nil;
}
-(BOOL)shouldShowMessageStatusAtIndexPath:(NSIndexPath*)indexPath
{
TSMessageAdapter *currentMessage = [self messageAtIndexPath:indexPath];
if([self.thread isKindOfClass:[TSGroupThread class]]) {
return currentMessage.messageType == TSIncomingMessageAdapter;
@ -527,11 +529,11 @@ typedef enum : NSUInteger {
if (indexPath.item == [self.collectionView numberOfItemsInSection:indexPath.section]-1) {
return [self isMessageOutgoingAndDelivered:currentMessage];
}
if (![self isMessageOutgoingAndDelivered:currentMessage]) {
return NO;
}
TSMessageAdapter *nextMessage = [self nextOutgoingMessage:indexPath];
return ![self isMessageOutgoingAndDelivered:nextMessage];
}
@ -541,12 +543,12 @@ typedef enum : NSUInteger {
{
TSMessageAdapter * nextMessage = [self messageAtIndexPath:[NSIndexPath indexPathForRow:indexPath.row+1 inSection:indexPath.section]];
int i = 1;
while (indexPath.item+i < [self.collectionView numberOfItemsInSection:indexPath.section]-1 && ![self isMessageOutgoingAndDelivered:nextMessage]) {
i++;
nextMessage = [self messageAtIndexPath:[NSIndexPath indexPathForRow:indexPath.row+i inSection:indexPath.section]];
}
return nextMessage;
}
@ -566,7 +568,7 @@ typedef enum : NSUInteger {
name = name ? name : msg.senderId;
NSMutableAttributedString * attrStr = [[NSMutableAttributedString alloc]initWithString:name];
[attrStr appendAttributedString:[NSAttributedString attributedStringWithAttachment:textAttachment]];
return (NSAttributedString*)attrStr;
}
else {
@ -575,7 +577,7 @@ typedef enum : NSUInteger {
textAttachment.bounds = CGRectMake(0, 0, 11.0f, 10.0f);
NSMutableAttributedString * attrStr = [[NSMutableAttributedString alloc]initWithString:@"Delivered"];
[attrStr appendAttributedString:[NSAttributedString attributedStringWithAttachment:textAttachment]];
return (NSAttributedString*)attrStr;
}
}
@ -594,7 +596,7 @@ typedef enum : NSUInteger {
else if (msg.messageType == TSOutgoingMessageAdapter) {
return 16.0f;
}
return 0.0f;
}
@ -605,20 +607,20 @@ typedef enum : NSUInteger {
{
TSMessageAdapter *messageItem = [collectionView.dataSource collectionView:collectionView messageDataForItemAtIndexPath:indexPath];
TSInteraction *interaction = [self interactionAtIndexPath:indexPath];
switch (messageItem.messageType) {
case TSOutgoingMessageAdapter:
if (messageItem.messageState == TSOutgoingMessageStateUnsent) {
[self handleUnsentMessageTap:(TSOutgoingMessage*)interaction];
}
case TSIncomingMessageAdapter:{
BOOL isMediaMessage = [messageItem isMediaMessage];
if (isMediaMessage) {
if([[messageItem media] isKindOfClass:[TSAttachmentAdapter class]]) {
TSAttachmentAdapter* messageMedia = (TSAttachmentAdapter*)[messageItem media];
if ([messageMedia isImage]) {
tappedImage = ((UIImageView*)[messageMedia mediaView]).image;
CGRect convertedRect = [self.collectionView convertRect:[collectionView cellForItemAtIndexPath:indexPath].frame toView:nil];
@ -626,11 +628,11 @@ typedef enum : NSUInteger {
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
attachment = [TSAttachment fetchObjectWithUniqueID:messageMedia.attachmentId transaction:transaction];
}];
if ([attachment isKindOfClass:[TSAttachmentStream class]]) {
TSAttachmentStream *attStream = (TSAttachmentStream*)attachment;
FullImageViewController * vc = [[FullImageViewController alloc] initWithAttachment:attStream fromRect:convertedRect forInteraction:[self interactionAtIndexPath:indexPath]];
[self presentViewController:vc animated:YES completion:^{
[[UIApplication sharedApplication] setStatusBarStyle:UIStatusBarStyleLightContent];
}];
@ -643,39 +645,84 @@ typedef enum : NSUInteger {
// fileurl disappeared should look up in db as before. will do refactor
// full screen, check this setup with a .mov
TSVideoAttachmentAdapter* messageMedia = (TSVideoAttachmentAdapter*)[messageItem media];
__block TSAttachment *attachment = nil;
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
attachment = [TSAttachment fetchObjectWithUniqueID:messageMedia.attachmentId transaction:transaction];
}];
if ([attachment isKindOfClass:[TSAttachmentStream class]]) {
TSAttachmentStream *attStream = (TSAttachmentStream*)attachment;
NSFileManager *fileManager = [NSFileManager defaultManager];
if([messageMedia isVideo]) {
if ([fileManager fileExistsAtPath:[attStream.videoURL path]]) {
_videoPlayer = [[MPMoviePlayerController alloc] initWithContentURL:attStream.videoURL];
if ([fileManager fileExistsAtPath:[attStream.mediaURL path]]) {
_videoPlayer = [[MPMoviePlayerController alloc] initWithContentURL:attStream.mediaURL];
[_videoPlayer prepareToPlay];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(moviePlayBackDidFinish:)
name:MPMoviePlayerPlaybackDidFinishNotification
object: _videoPlayer];
_videoPlayer.controlStyle = MPMovieControlStyleDefault;
_videoPlayer.shouldAutoplay = YES;
[self.view addSubview: _videoPlayer.view];
[_videoPlayer setFullscreen:YES animated:YES];
}
}
else if([messageMedia isAudio]){
DDLogDebug(@"audio location is %@",attStream.videoURL);
NSError *error;
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:attStream.videoURL error:&error];
DDLogDebug(@"audio debug is %@",error);
[_audioPlayer prepareToPlay];
[_audioPlayer play];
} else if([messageMedia isAudio]){
if (messageMedia.isAudioPlaying) {
// if you had started playing an audio msg and now you're tapping it to pause
messageMedia.isAudioPlaying = NO;
[_audioPlayer pause];
messageMedia.isPaused = YES;
[_audioPlayerPoller invalidate];
double current = [_audioPlayer currentTime]/[_audioPlayer duration];
[messageMedia setAudioProgressFromFloat:(float)current];
[messageMedia setAudioIconToPlay];
} else {
BOOL isResuming = NO;
[_audioPlayerPoller invalidate];
// loop through all the other bubbles and set their isPlaying to false
NSInteger num_bubbles = [self collectionView:collectionView numberOfItemsInSection:0];
for (NSInteger i=0; i<num_bubbles; i++) {
NSIndexPath *index_path = [NSIndexPath indexPathForRow:i inSection:0];
TSMessageAdapter *msgAdapter = [collectionView.dataSource collectionView:collectionView messageDataForItemAtIndexPath:index_path];
if (msgAdapter.messageType == TSIncomingMessageAdapter && msgAdapter.isMediaMessage) {
TSVideoAttachmentAdapter* msgMedia = (TSVideoAttachmentAdapter*)[msgAdapter media];
if ([msgMedia isAudio]) {
if (msgMedia == messageMedia && messageMedia.isPaused) {
isResuming = YES;
} else {
msgMedia.isAudioPlaying = NO;
msgMedia.isPaused = NO;
[msgMedia setAudioIconToPlay];
[msgMedia setAudioProgressFromFloat:0];
}
}
}
}
if (isResuming) {
// if you had paused an audio msg and now you're tapping to resume
[_audioPlayer prepareToPlay];
[_audioPlayer play];
[messageMedia setAudioIconToPause];
messageMedia.isAudioPlaying = YES;
messageMedia.isPaused = NO;
_audioPlayerPoller = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:@selector(audioPlayerUpdated:) userInfo:@{@"adapter": messageMedia} repeats:YES];
} else {
// if you are tapping an audio msg for the first time to play
messageMedia.isAudioPlaying = YES;
NSError *error;
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:attStream.mediaURL error:&error];
[_audioPlayer prepareToPlay];
[_audioPlayer play];
[messageMedia setAudioIconToPause];
_audioPlayer.delegate = self;
_audioPlayerPoller = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:@selector(audioPlayerUpdated:) userInfo:@{@"adapter": messageMedia} repeats:YES];
}
}
}
}
}
@ -694,7 +741,6 @@ typedef enum : NSUInteger {
}
}
-(NSURL*) changeFile:(NSURL*)originalFile toHaveExtension:(NSString*)extension {
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString* newPath = [[originalFile path] stringByAppendingPathExtension:extension];
@ -714,47 +760,47 @@ typedef enum : NSUInteger {
if ([self shouldShowLoadEarlierMessages]) {
self.page++;
}
NSInteger item = (NSInteger)[self scrollToItem];
[self updateRangeOptionsForPage:self.page];
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
[self.messageMappings updateWithTransaction:transaction];
}];
[self updateLayoutForEarlierMessagesWithOffset:item];
}
-(BOOL)shouldShowLoadEarlierMessages
{
__block BOOL show = YES;
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction){
show = [self.messageMappings numberOfItemsInGroup:self.thread.uniqueId] < [[transaction ext:TSMessageDatabaseViewExtensionName] numberOfItemsInGroup:self.thread.uniqueId];
}];
return show;
}
-(NSUInteger)scrollToItem
{
__block NSUInteger item = kYapDatabaseRangeLength*(self.page+1) - [self.messageMappings numberOfItemsInGroup:self.thread.uniqueId];
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
NSUInteger numberOfVisibleMessages = [self.messageMappings numberOfItemsInGroup:self.thread.uniqueId] ;
NSUInteger numberOfTotalMessages = [[transaction ext:TSMessageDatabaseViewExtensionName] numberOfItemsInGroup:self.thread.uniqueId] ;
NSUInteger numberOfMessagesToLoad = numberOfTotalMessages - numberOfVisibleMessages ;
BOOL canLoadFullRange = numberOfMessagesToLoad >= kYapDatabaseRangeLength;
if (!canLoadFullRange) {
item = numberOfMessagesToLoad;
}
}];
return item == 0 ? item : item - 1;
}
@ -767,21 +813,21 @@ typedef enum : NSUInteger {
{
[self.collectionView.collectionViewLayout invalidateLayoutWithContext:[JSQMessagesCollectionViewFlowLayoutInvalidationContext context]];
[self.collectionView reloadData];
[self.collectionView scrollToItemAtIndexPath:[NSIndexPath indexPathForItem:offset inSection:0] atScrollPosition:UICollectionViewScrollPositionTop animated:NO];
[self updateLoadEarlierVisible];
}
-(void)updateRangeOptionsForPage:(NSUInteger)page
{
YapDatabaseViewRangeOptions *rangeOptions = [YapDatabaseViewRangeOptions flexibleRangeWithLength:kYapDatabaseRangeLength*(page+1) offset:0 from:YapDatabaseViewEnd];
rangeOptions.maxLength = kYapDatabaseRangeMaxLength;
rangeOptions.minLength = kYapDatabaseRangeMinLength;
[self.messageMappings setRangeOptions:rangeOptions forGroup:self.thread.uniqueId];
}
#pragma mark Bubble User Actions
@ -815,7 +861,7 @@ typedef enum : NSUInteger {
NSString *newKeyFingerprint = [errorMessage newIdentityKey];
NSString *messageString = [NSString stringWithFormat:@"Do you want to accept %@'s new identity key: %@", _thread.name, newKeyFingerprint];
NSArray *actions = @[@"Accept new identity key", @"Copy new identity key to pasteboard"];
[self.inputToolbar.contentView.textView resignFirstResponder];
[DJWActionSheet showInView:self.parentViewController.view withTitle:messageString cancelButtonTitle:@"Cancel" destructiveButtonTitle:@"Delete" otherButtonTitles:actions tapBlock:^(DJWActionSheet *actionSheet, NSInteger tappedButtonIndex) {
@ -844,7 +890,7 @@ typedef enum : NSUInteger {
#pragma mark - Navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
if ([segue.identifier isEqualToString:kFingerprintSegueIdentifier]){
FingerprintViewController *vc = [segue destinationViewController];
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
@ -878,13 +924,13 @@ typedef enum : NSUInteger {
picker.delegate = self;
picker.allowsEditing = NO;
picker.sourceType = UIImagePickerControllerSourceTypeCamera;
if ([UIImagePickerController isSourceTypeAvailable:
UIImagePickerControllerSourceTypeCamera]) {
picker.mediaTypes = @[(NSString*)kUTTypeImage,(NSString*)kUTTypeMovie];
[self presentViewController:picker animated:YES completion:NULL];
}
}
-(void)chooseFromLibrary:(kMediaTypes)mediaType
@ -892,15 +938,15 @@ typedef enum : NSUInteger {
UIImagePickerController *picker = [[UIImagePickerController alloc] init];
picker.delegate = self;
picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypePhotoLibrary])
{
NSArray* pictureTypeArray = [[NSArray alloc] initWithObjects:(NSString *)kUTTypeImage, nil];
NSArray* videoTypeArray = [[NSArray alloc] initWithObjects:(NSString *)kUTTypeMovie, (NSString*)kUTTypeVideo, nil];
picker.mediaTypes = (mediaType == kMediaTypePicture) ? pictureTypeArray : videoTypeArray;
[self presentViewController:picker animated:YES completion:nil];
}
}
@ -926,24 +972,24 @@ typedef enum : NSUInteger {
[self sendQualityAdjustedAttachment:videoURL];
}
else {
UIImage *picture_camera = [[info objectForKey:UIImagePickerControllerOriginalImage] normalizedImage];
if(picture_camera) {
DDLogVerbose(@"Sending picture attachement ...");
[self sendMessageAttachment:[self qualityAdjustedAttachmentForImage:picture_camera] ofType:@"image/jpeg"];
}
}
}
-(void) sendMessageAttachment:(NSData*)attachmentData ofType:(NSString*)attachmentType {
TSOutgoingMessage *message = [[TSOutgoingMessage alloc] initWithTimestamp:[NSDate ows_millisecondTimeStamp] inThread:self.thread messageBody:nil attachments:[NSMutableArray array]];
[[TSMessagesManager sharedManager] sendAttachment:attachmentData contentType:attachmentType inMessage:message thread:self.thread];
[self finishSendingMessage];
[self dismissViewControllerAnimated:YES completion:nil];
}
-(void)sendQualityAdjustedAttachment:(NSURL*)movieURL {
@ -952,47 +998,47 @@ typedef enum : NSUInteger {
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:video presetName:AVAssetExportPresetMediumQuality];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeMPEG4;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
basePath = [basePath stringByAppendingPathComponent:@"videos"];
if (![[NSFileManager defaultManager] fileExistsAtPath:basePath]) {
[[NSFileManager defaultManager] createDirectoryAtPath:basePath withIntermediateDirectories:YES attributes:nil error:nil];
}
NSURL *compressedVideoUrl = [NSURL fileURLWithPath:basePath];
long currentTime = [[NSDate date] timeIntervalSince1970];
NSString *strImageName = [NSString stringWithFormat:@"%ld",currentTime];
compressedVideoUrl=[compressedVideoUrl URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",strImageName]];
exportSession.outputURL = compressedVideoUrl;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
}];
while(exportSession.progress!=1){
}
[self sendMessageAttachment:[NSData dataWithContentsOfURL:compressedVideoUrl] ofType:@"video/mp4"];
#if 0
return [NSData dataWithContentsOfURL:movieURL];
#endif
#if 0
NSString *serializationQueueDescription = [NSString stringWithFormat:@"%@ serialization queue", self];
// Create the main serialization queue.
self.mainSerializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
NSString *rwAudioSerializationQueueDescription = [NSString stringWithFormat:@"%@ rw audio serialization queue", self];
// Create the serialization queue to use for reading and writing the audio data.
self.rwAudioSerializationQueue = dispatch_queue_create([rwAudioSerializationQueueDescription UTF8String], NULL);
NSString *rwVideoSerializationQueueDescription = [NSString stringWithFormat:@"%@ rw video serialization queue", self];
// Create the serialization queue to use for reading and writing the video data.
self.rwVideoSerializationQueue = dispatch_queue_create([rwVideoSerializationQueueDescription UTF8String], NULL);
int videoWidth = 1920;
int videoHeight = 1920;
int desiredKeyframeInterval = 2;
@ -1004,19 +1050,19 @@ typedef enum : NSUInteger {
error:&error];
NSParameterAssert(videoWriter);
NSDictionary* settings = @{AVVideoCodecKey:AVVideoCodecH264,
AVVideoCompressionPropertiesKey:@{AVVideoAverageBitRateKey:[NSNumber numberWithInt:desiredBitrate],AVVideoProfileLevelKey:AVVideoProfileLevelH264Main31},
AVVideoWidthKey: [NSNumber numberWithInt:videoWidth],
AVVideoHeightKey:[NSNumber numberWithInt:videoHeight]};
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
#endif
}
@ -1031,7 +1077,7 @@ typedef enum : NSUInteger {
switch ([Environment.preferences imageUploadQuality]) {
case TSImageQualityUncropped:
return image;
case TSImageQualityHigh:
correctedWidth = 2048;
break;
@ -1044,7 +1090,7 @@ typedef enum : NSUInteger {
default:
break;
}
return [self imageScaled:image toMaxSize:correctedWidth];
}
@ -1052,21 +1098,21 @@ typedef enum : NSUInteger {
{
CGFloat scaleFactor;
CGFloat aspectRatio = image.size.height / image.size.width;
if( aspectRatio > 1 ) {
scaleFactor = size / image.size.width;
}
else {
scaleFactor = size / image.size.height;
}
CGSize newSize = CGSizeMake(image.size.width * scaleFactor, image.size.height * scaleFactor);
UIGraphicsBeginImageContext(newSize);
[image drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)];
UIImage* updatedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return updatedImage;
}
@ -1122,20 +1168,20 @@ typedef enum : NSUInteger {
// and get the change-set(s) as applies to my view and mappings configuration.
NSArray *notifications = [self.uiDatabaseConnection beginLongLivedReadTransaction];
NSArray *messageRowChanges = nil;
[[self.uiDatabaseConnection ext:TSMessageDatabaseViewExtensionName] getSectionChanges:nil
rowChanges:&messageRowChanges
forNotifications:notifications
withMappings:self.messageMappings];
__block BOOL scrollToBottom = NO;
if (!messageRowChanges) {
return;
}
[self.collectionView performBatchUpdates:^{
for (YapDatabaseViewRowChange *rowChange in messageRowChanges)
{
switch (rowChange.type)
@ -1164,16 +1210,16 @@ typedef enum : NSUInteger {
case YapDatabaseViewChangeUpdate :
{
NSMutableArray *rowsToUpdate = [@[rowChange.indexPath] mutableCopy];
if (_lastDeliveredMessageIndexPath) {
[rowsToUpdate addObject:_lastDeliveredMessageIndexPath];
}
for (NSIndexPath* indexPath in rowsToUpdate) {
TSInteraction * interaction = [self interactionAtIndexPath:indexPath];
[[TSAdapterCacheManager sharedManager] cacheAdapter:[TSMessageAdapter messageViewDataWithInteraction:interaction inThread:self.thread] forInteractionId:interaction.uniqueId];
}
[self.collectionView reloadItemsAtIndexPaths:rowsToUpdate];
scrollToBottom = YES;
break;
@ -1208,24 +1254,24 @@ typedef enum : NSUInteger {
NSUInteger row = (NSUInteger)indexPath.row;
NSUInteger section = (NSUInteger)indexPath.section;
NSUInteger numberOfItemsInSection = [self.messageMappings numberOfItemsInSection:section];
NSAssert(row < numberOfItemsInSection, @"Cannot fetch message because row %d is >= numberOfItemsInSection %d", (int)row, (int)numberOfItemsInSection);
message = [viewTransaction objectAtRow:row inSection:section withMappings:self.messageMappings];
NSParameterAssert(message != nil);
}];
return message;
}
- (TSMessageAdapter*)messageAtIndexPath:(NSIndexPath *)indexPath {
TSInteraction *interaction = [self interactionAtIndexPath:indexPath];
TSAdapterCacheManager * manager = [TSAdapterCacheManager sharedManager];
if (![manager containsCacheEntryForInteractionId:interaction.uniqueId]) {
[manager cacheAdapter:[TSMessageAdapter messageViewDataWithInteraction:interaction inThread:self.thread] forInteractionId:interaction.uniqueId];
}
return [manager adapterForInteractionId:interaction.uniqueId];
}
@ -1233,15 +1279,80 @@ typedef enum : NSUInteger {
#pragma mark group action view
#pragma mark - Audio
-(void)recordAudio {
// Define the recorder setting
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
[NSString stringWithFormat:@"%lld.m4a",[NSDate ows_millisecondTimeStamp]],
nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
// Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
// Initiate and prepare the recorder
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
_audioRecorder.delegate = self;
_audioRecorder.meteringEnabled = YES;
[_audioRecorder prepareToRecord];
}
- (void)audioPlayerUpdated:(NSTimer*)timer {
NSDictionary *dict = [timer userInfo];
TSVideoAttachmentAdapter *messageMedia = dict[@"adapter"];
double current = [_audioPlayer currentTime]/[_audioPlayer duration];
[messageMedia setAudioProgressFromFloat:(float)current];
NSTimeInterval duration = ([_audioPlayer duration] - [_audioPlayer currentTime]);
[messageMedia setDurationOfAudio:duration];
}
- (void) audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag{
// stop audio polling
[_audioPlayerPoller invalidate];
// reset all audio bars to 0
JSQMessagesCollectionView *collectionView = self.collectionView;
NSInteger num_bubbles = [self collectionView:collectionView numberOfItemsInSection:0];
for (NSInteger i=0; i<num_bubbles; i++) {
NSIndexPath *index_path = [NSIndexPath indexPathForRow:i inSection:0];
TSMessageAdapter *msgAdapter = [collectionView.dataSource collectionView:collectionView messageDataForItemAtIndexPath:index_path];
if (msgAdapter.messageType == TSIncomingMessageAdapter && msgAdapter.isMediaMessage) {
TSVideoAttachmentAdapter* msgMedia = (TSVideoAttachmentAdapter*)[msgAdapter media];
if ([msgMedia isAudio]) {
[msgMedia setAudioProgressFromFloat:0];
[msgMedia setAudioIconToPlay];
[msgMedia removeDurationLabel];
}
}
}
}
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder
successfully:(BOOL)flag {
if(flag) {
[self sendMessageAttachment:[NSData dataWithContentsOfURL:recorder.url] ofType:@"audio/m4a"];
}
}
#pragma mark Accessory View
-(void)didPressAccessoryButton:(UIButton *)sender
{
[self.inputToolbar.contentView.textView resignFirstResponder];
UIView *presenter = self.parentViewController.view;
[DJWActionSheet showInView:presenter
withTitle:nil
cancelButtonTitle:@"Cancel"
@ -1274,39 +1385,6 @@ typedef enum : NSUInteger {
}];
}
-(void)recordAudio {
// Define the recorder setting
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
[NSString stringWithFormat:@"%lld.m4a",[NSDate ows_millisecondTimeStamp]],
nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
// Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
// Initiate and prepare the recorder
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
_audioRecorder.delegate = self;
_audioRecorder.meteringEnabled = YES;
[_audioRecorder prepareToRecord];
}
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder
successfully:(BOOL)flag {
if(flag) {
[self sendMessageAttachment:[NSData dataWithContentsOfURL:recorder.url] ofType:@"audio/m4a"];
}
}
- (void)markAllMessagesAsRead {
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
YapDatabaseViewTransaction *viewTransaction = [transaction ext:TSUnreadDatabaseViewExtensionName];
@ -1326,7 +1404,7 @@ typedef enum : NSUInteger {
if (action == @selector(delete:)) {
return YES;
}
return [super collectionView:collectionView canPerformAction:action forItemAtIndexPath:indexPath withSender:sender];
}
@ -1376,7 +1454,7 @@ typedef enum : NSUInteger {
else {
[[TSMessagesManager sharedManager] sendMessage:message inThread:gThread];
}
self.thread = gThread;
}];
}

@ -48,6 +48,7 @@
- (UIView *)mediaView
{
NSLog(@"attachment adapter");
if (self.image == nil) {
return nil;
}

@ -14,13 +14,18 @@
@property NSString *attachmentId;
@property (nonatomic,strong) NSString* contentType;
@property (nonatomic) BOOL isAudioPlaying;
@property (nonatomic) BOOL isPaused;
- (instancetype)initWithAttachment:(TSAttachmentStream*)attachment;
- (BOOL)isImage;
- (BOOL)isAudio;
- (BOOL)isVideo;
- (void)setAudioProgressFromFloat:(float)progress;
- (void)setAudioIconToPlay;
- (void)setAudioIconToPause;
- (void)setDurationOfAudio:(NSTimeInterval)duration;
- (void)removeDurationLabel;
@end

@ -16,31 +16,34 @@
#import "TSNetworkManager.h"
#import "UIColor+OWS.h"
#define AUDIO_BAR_HEIGHT 30;
@interface TSVideoAttachmentAdapter ()
@property UIImage *image;
@property (strong, nonatomic) UIImageView *cachedImageView;
@property (strong, nonatomic) UIImageView *playButton;
@property (strong, nonatomic) CALayer *maskLayer;
@property (strong, nonatomic) FFCircularProgressView *progressView;
@property (strong, nonatomic) TSAttachmentStream *attachment;
@property (strong, nonatomic) NSString *videoURL;
@property (strong, nonatomic) UIProgressView *audioProgress;
@property (strong, nonatomic) UIImageView *playPauseButton;
@property (nonatomic) UILabel *durationLabel;
@end
@implementation TSVideoAttachmentAdapter
- (instancetype)initWithAttachment:(TSAttachmentStream*)attachment{
self = [super initWithFileURL:[attachment videoURL] isReadyToPlay:YES];
if (self) {
NSLog(@"attach: %@", attachment);
self = [super initWithFileURL:[attachment mediaURL] isReadyToPlay:YES];
if (self) {;
_image = attachment.image;
_cachedImageView = nil;
_attachmentId = attachment.uniqueId;
_contentType = attachment.contentType;
_attachment = attachment;
}
return self;
}
@ -58,53 +61,121 @@
return [_contentType containsString:@"video/"];
}
-(void) setAudioProgressFromFloat:(float)progress {
[_audioProgress setProgress:progress];
}
-(void) setAudioIconToPause {
[_playPauseButton removeFromSuperview];
_playPauseButton = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"pause_icon"]];
_playPauseButton.frame = CGRectMake(10, 8, 10, 14);
[_audioProgress addSubview:_playPauseButton];
}
-(void) setAudioIconToPlay {
[_playPauseButton removeFromSuperview];
_playPauseButton = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"play_icon"]];
_playPauseButton.frame = CGRectMake(10, 8, 10, 14);
[_audioProgress addSubview:_playPauseButton];
}
-(void) setDurationOfAudio:(NSTimeInterval)duration {
[_durationLabel removeFromSuperview];
double dur = duration;
int minutes = (int) (dur/60);
int seconds = (int) (dur - minutes*60);
NSString *minutes_str = [NSString stringWithFormat:@"%01d", minutes];
NSString *seconds_str = [NSString stringWithFormat:@"%02d", seconds];
NSString *label_text = [NSString stringWithFormat:@"%@:%@", minutes_str, seconds_str];
CGSize size = [self mediaViewDisplaySize];
_durationLabel = [[UILabel alloc] initWithFrame:CGRectMake(size.width - 40, 0, 50, 30)];
_durationLabel.text = label_text;
_durationLabel.textColor = [UIColor whiteColor];
[_audioProgress addSubview:_durationLabel];
}
-(void) removeDurationLabel {
[_durationLabel removeFromSuperview];
}
#pragma mark - JSQMessageMediaData protocol
- (UIView *)mediaView
{
if (self.image == nil) {
return nil;
}
if (self.cachedImageView == nil) {
CGSize size = [self mediaViewDisplaySize];
UIImageView *imageView = [[UIImageView alloc] initWithImage:self.image];
imageView.frame = CGRectMake(0.0f, 0.0f, size.width, size.height);
imageView.contentMode = UIViewContentModeScaleAspectFill;
imageView.clipsToBounds = YES;
[JSQMessagesMediaViewBubbleImageMasker applyBubbleImageMaskToMediaView:imageView isOutgoing:self.appliesMediaViewMaskAsOutgoing];
self.cachedImageView = imageView;
UIImage *img = [UIImage imageNamed:@"play_button"];
_playButton = [[UIImageView alloc] initWithImage:img];
_playButton.frame = CGRectMake((size.width/2)-18, (size.height/2)-18, 37, 37);
[self.cachedImageView addSubview:_playButton];
_playButton.hidden = YES;
_maskLayer = [CALayer layer];
[_maskLayer setBackgroundColor:[UIColor ows_blackColor].CGColor];
[_maskLayer setOpacity:0.4f];
[_maskLayer setFrame:self.cachedImageView.frame];
[self.cachedImageView.layer addSublayer:_maskLayer];
_progressView = [[FFCircularProgressView alloc] initWithFrame:CGRectMake((size.width/2)-18, (size.height/2)-18, 37, 37)];
[_cachedImageView addSubview:_progressView];
if (_attachment.isDownloaded) {
_playButton.hidden = NO;
_maskLayer.hidden = YES;
_progressView.hidden = YES;
CGSize size = [self mediaViewDisplaySize];
if ([self isVideo]) {
if (self.cachedImageView == nil) {
UIImageView *imageView = [[UIImageView alloc] initWithImage:self.image];
imageView.frame = CGRectMake(0.0f, 0.0f, size.width, size.height);
imageView.contentMode = UIViewContentModeScaleAspectFill;
imageView.clipsToBounds = YES;
[JSQMessagesMediaViewBubbleImageMasker applyBubbleImageMaskToMediaView:imageView isOutgoing:self.appliesMediaViewMaskAsOutgoing];
self.cachedImageView = imageView;
UIImage *img = [UIImage imageNamed:@"play_button"];
_playButton = [[UIImageView alloc] initWithImage:img];
_playButton.frame = CGRectMake((size.width/2)-18, (size.height/2)-18, 37, 37);
[self.cachedImageView addSubview:_playButton];
_playButton.hidden = YES;
_maskLayer = [CALayer layer];
[_maskLayer setBackgroundColor:[UIColor blackColor].CGColor];
[_maskLayer setOpacity:0.4f];
[_maskLayer setFrame:self.cachedImageView.frame];
[self.cachedImageView.layer addSublayer:_maskLayer];
_progressView = [[FFCircularProgressView alloc] initWithFrame:CGRectMake((size.width/2)-18, (size.height/2)-18, 37, 37)];
[_cachedImageView addSubview:_progressView];
if (_attachment.isDownloaded) {
_playButton.hidden = NO;
_maskLayer.hidden = YES;
_progressView.hidden = YES;
}
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(attachmentUploadProgress:) name:@"attachmentUploadProgress" object:nil];
}
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(attachmentUploadProgress:) name:@"attachmentUploadProgress" object:nil];
} else if ([self isAudio]) {
UIImageView *backgroundImage = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, size.width, 30)];
backgroundImage.backgroundColor = [UIColor colorWithRed:189/255.0f green:190/255.0f blue:194/255.0f alpha:1.0f];
_audioProgress = [[UIProgressView alloc] initWithFrame:CGRectMake(0, 0, size.width, 4)];
_playPauseButton = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"play_icon"]];
_playPauseButton.frame = CGRectMake(10, 8, 10, 14);
[_audioProgress addSubview:_playPauseButton];
return _audioProgress;
}
return self.cachedImageView;
}
- (CGSize)mediaViewDisplaySize
{
CGSize mediaDisplaySize;
if ([self isVideo]) {
mediaDisplaySize = [super mediaViewDisplaySize];
} else if ([self isAudio]) {
CGSize size = [super mediaViewDisplaySize];
size.height = AUDIO_BAR_HEIGHT;
mediaDisplaySize = size;
}
return mediaDisplaySize;
}
- (UIView *)mediaPlaceholderView
{
return [self mediaView];
}
- (NSUInteger)hash
{
return [super hash];
}
- (void)attachmentUploadProgress:(NSNotification*)notification {
NSDictionary *userinfo = [notification userInfo];
double progress = [[userinfo objectForKey:@"progress"] doubleValue];
NSString *attachmentID = [userinfo objectForKey:@"attachmentID"];
if ([_attachmentId isEqualToString:attachmentID]) {
NSLog(@"is downloaded: %d", _attachment.isDownloaded);
[_progressView setProgress:progress];
[_progressView setProgress: (float)progress];
if (progress >= 1) {
_maskLayer.hidden = YES;
_progressView.hidden = YES;

Loading…
Cancel
Save