|
|
|
@ -85,6 +85,7 @@ typedef enum : NSUInteger {
|
|
|
|
|
@property (nonatomic, retain) JSQMessagesBubbleImage *outgoingBubbleImageData;
|
|
|
|
|
@property (nonatomic, retain) JSQMessagesBubbleImage *incomingBubbleImageData;
|
|
|
|
|
@property (nonatomic, retain) JSQMessagesBubbleImage *outgoingMessageFailedImageData;
|
|
|
|
|
@property (nonatomic, strong) NSTimer *audioPlayerPoller;
|
|
|
|
|
|
|
|
|
|
@property (nonatomic, retain) NSTimer *readTimer;
|
|
|
|
|
|
|
|
|
@ -134,6 +135,8 @@ typedef enum : NSUInteger {
|
|
|
|
|
[super viewDidLoad];
|
|
|
|
|
[self.navigationController.navigationBar setTranslucent:NO];
|
|
|
|
|
|
|
|
|
|
[super viewDidLoad];
|
|
|
|
|
|
|
|
|
|
[self markAllMessagesAsRead];
|
|
|
|
|
|
|
|
|
|
[self initializeBubbles];
|
|
|
|
@ -299,7 +302,6 @@ typedef enum : NSUInteger {
|
|
|
|
|
self.outgoingBubbleImageData = [bubbleFactory outgoingMessagesBubbleImageWithColor:[UIColor ows_materialBlueColor]];
|
|
|
|
|
self.incomingBubbleImageData = [bubbleFactory incomingMessagesBubbleImageWithColor:[UIColor jsq_messageBubbleLightGrayColor]];
|
|
|
|
|
self.outgoingMessageFailedImageData = [bubbleFactory outgoingMessageFailedBubbleImageWithColor:[UIColor ows_fadedBlueColor]];
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
-(void)initializeCollectionViewLayout
|
|
|
|
@ -653,8 +655,8 @@ typedef enum : NSUInteger {
|
|
|
|
|
TSAttachmentStream *attStream = (TSAttachmentStream*)attachment;
|
|
|
|
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
|
|
|
if([messageMedia isVideo]) {
|
|
|
|
|
if ([fileManager fileExistsAtPath:[attStream.videoURL path]]) {
|
|
|
|
|
_videoPlayer = [[MPMoviePlayerController alloc] initWithContentURL:attStream.videoURL];
|
|
|
|
|
if ([fileManager fileExistsAtPath:[attStream.mediaURL path]]) {
|
|
|
|
|
_videoPlayer = [[MPMoviePlayerController alloc] initWithContentURL:attStream.mediaURL];
|
|
|
|
|
[_videoPlayer prepareToPlay];
|
|
|
|
|
|
|
|
|
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
|
|
|
@ -667,15 +669,60 @@ typedef enum : NSUInteger {
|
|
|
|
|
[self.view addSubview: _videoPlayer.view];
|
|
|
|
|
[_videoPlayer setFullscreen:YES animated:YES];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if([messageMedia isAudio]){
|
|
|
|
|
DDLogDebug(@"audio location is %@",attStream.videoURL);
|
|
|
|
|
NSError *error;
|
|
|
|
|
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:attStream.videoURL error:&error];
|
|
|
|
|
DDLogDebug(@"audio debug is %@",error);
|
|
|
|
|
[_audioPlayer prepareToPlay];
|
|
|
|
|
[_audioPlayer play];
|
|
|
|
|
|
|
|
|
|
} else if([messageMedia isAudio]){
|
|
|
|
|
if (messageMedia.isAudioPlaying) {
|
|
|
|
|
// if you had started playing an audio msg and now you're tapping it to pause
|
|
|
|
|
messageMedia.isAudioPlaying = NO;
|
|
|
|
|
[_audioPlayer pause];
|
|
|
|
|
messageMedia.isPaused = YES;
|
|
|
|
|
[_audioPlayerPoller invalidate];
|
|
|
|
|
double current = [_audioPlayer currentTime]/[_audioPlayer duration];
|
|
|
|
|
[messageMedia setAudioProgressFromFloat:(float)current];
|
|
|
|
|
[messageMedia setAudioIconToPlay];
|
|
|
|
|
} else {
|
|
|
|
|
BOOL isResuming = NO;
|
|
|
|
|
[_audioPlayerPoller invalidate];
|
|
|
|
|
|
|
|
|
|
// loop through all the other bubbles and set their isPlaying to false
|
|
|
|
|
NSInteger num_bubbles = [self collectionView:collectionView numberOfItemsInSection:0];
|
|
|
|
|
for (NSInteger i=0; i<num_bubbles; i++) {
|
|
|
|
|
NSIndexPath *index_path = [NSIndexPath indexPathForRow:i inSection:0];
|
|
|
|
|
TSMessageAdapter *msgAdapter = [collectionView.dataSource collectionView:collectionView messageDataForItemAtIndexPath:index_path];
|
|
|
|
|
if (msgAdapter.messageType == TSIncomingMessageAdapter && msgAdapter.isMediaMessage) {
|
|
|
|
|
TSVideoAttachmentAdapter* msgMedia = (TSVideoAttachmentAdapter*)[msgAdapter media];
|
|
|
|
|
if ([msgMedia isAudio]) {
|
|
|
|
|
if (msgMedia == messageMedia && messageMedia.isPaused) {
|
|
|
|
|
isResuming = YES;
|
|
|
|
|
} else {
|
|
|
|
|
msgMedia.isAudioPlaying = NO;
|
|
|
|
|
msgMedia.isPaused = NO;
|
|
|
|
|
[msgMedia setAudioIconToPlay];
|
|
|
|
|
[msgMedia setAudioProgressFromFloat:0];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (isResuming) {
|
|
|
|
|
// if you had paused an audio msg and now you're tapping to resume
|
|
|
|
|
[_audioPlayer prepareToPlay];
|
|
|
|
|
[_audioPlayer play];
|
|
|
|
|
[messageMedia setAudioIconToPause];
|
|
|
|
|
messageMedia.isAudioPlaying = YES;
|
|
|
|
|
messageMedia.isPaused = NO;
|
|
|
|
|
_audioPlayerPoller = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:@selector(audioPlayerUpdated:) userInfo:@{@"adapter": messageMedia} repeats:YES];
|
|
|
|
|
} else {
|
|
|
|
|
// if you are tapping an audio msg for the first time to play
|
|
|
|
|
messageMedia.isAudioPlaying = YES;
|
|
|
|
|
NSError *error;
|
|
|
|
|
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:attStream.mediaURL error:&error];
|
|
|
|
|
[_audioPlayer prepareToPlay];
|
|
|
|
|
[_audioPlayer play];
|
|
|
|
|
[messageMedia setAudioIconToPause];
|
|
|
|
|
_audioPlayer.delegate = self;
|
|
|
|
|
_audioPlayerPoller = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:@selector(audioPlayerUpdated:) userInfo:@{@"adapter": messageMedia} repeats:YES];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -694,7 +741,6 @@ typedef enum : NSUInteger {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
-(NSURL*) changeFile:(NSURL*)originalFile toHaveExtension:(NSString*)extension {
|
|
|
|
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
|
|
|
|
NSString* newPath = [[originalFile path] stringByAppendingPathExtension:extension];
|
|
|
|
@ -1233,6 +1279,71 @@ typedef enum : NSUInteger {
|
|
|
|
|
#pragma mark group action view
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#pragma mark - Audio
|
|
|
|
|
|
|
|
|
|
-(void)recordAudio {
|
|
|
|
|
// Define the recorder setting
|
|
|
|
|
NSArray *pathComponents = [NSArray arrayWithObjects:
|
|
|
|
|
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
|
|
|
|
|
[NSString stringWithFormat:@"%lld.m4a",[NSDate ows_millisecondTimeStamp]],
|
|
|
|
|
nil];
|
|
|
|
|
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
|
|
|
|
|
|
|
|
|
|
// Setup audio session
|
|
|
|
|
AVAudioSession *session = [AVAudioSession sharedInstance];
|
|
|
|
|
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
|
|
|
|
|
|
|
|
|
|
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
|
|
|
|
|
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
|
|
|
|
|
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
|
|
|
|
|
|
|
|
|
|
// Initiate and prepare the recorder
|
|
|
|
|
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
|
|
|
|
|
_audioRecorder.delegate = self;
|
|
|
|
|
_audioRecorder.meteringEnabled = YES;
|
|
|
|
|
[_audioRecorder prepareToRecord];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
- (void)audioPlayerUpdated:(NSTimer*)timer {
|
|
|
|
|
NSDictionary *dict = [timer userInfo];
|
|
|
|
|
TSVideoAttachmentAdapter *messageMedia = dict[@"adapter"];
|
|
|
|
|
double current = [_audioPlayer currentTime]/[_audioPlayer duration];
|
|
|
|
|
[messageMedia setAudioProgressFromFloat:(float)current];
|
|
|
|
|
NSTimeInterval duration = ([_audioPlayer duration] - [_audioPlayer currentTime]);
|
|
|
|
|
[messageMedia setDurationOfAudio:duration];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
- (void) audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag{
|
|
|
|
|
// stop audio polling
|
|
|
|
|
[_audioPlayerPoller invalidate];
|
|
|
|
|
|
|
|
|
|
// reset all audio bars to 0
|
|
|
|
|
JSQMessagesCollectionView *collectionView = self.collectionView;
|
|
|
|
|
NSInteger num_bubbles = [self collectionView:collectionView numberOfItemsInSection:0];
|
|
|
|
|
for (NSInteger i=0; i<num_bubbles; i++) {
|
|
|
|
|
NSIndexPath *index_path = [NSIndexPath indexPathForRow:i inSection:0];
|
|
|
|
|
TSMessageAdapter *msgAdapter = [collectionView.dataSource collectionView:collectionView messageDataForItemAtIndexPath:index_path];
|
|
|
|
|
if (msgAdapter.messageType == TSIncomingMessageAdapter && msgAdapter.isMediaMessage) {
|
|
|
|
|
TSVideoAttachmentAdapter* msgMedia = (TSVideoAttachmentAdapter*)[msgAdapter media];
|
|
|
|
|
if ([msgMedia isAudio]) {
|
|
|
|
|
[msgMedia setAudioProgressFromFloat:0];
|
|
|
|
|
[msgMedia setAudioIconToPlay];
|
|
|
|
|
[msgMedia removeDurationLabel];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder
|
|
|
|
|
successfully:(BOOL)flag {
|
|
|
|
|
if(flag) {
|
|
|
|
|
[self sendMessageAttachment:[NSData dataWithContentsOfURL:recorder.url] ofType:@"audio/m4a"];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#pragma mark Accessory View
|
|
|
|
|
|
|
|
|
@ -1274,39 +1385,6 @@ typedef enum : NSUInteger {
|
|
|
|
|
}];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
-(void)recordAudio {
|
|
|
|
|
// Define the recorder setting
|
|
|
|
|
NSArray *pathComponents = [NSArray arrayWithObjects:
|
|
|
|
|
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
|
|
|
|
|
[NSString stringWithFormat:@"%lld.m4a",[NSDate ows_millisecondTimeStamp]],
|
|
|
|
|
nil];
|
|
|
|
|
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
|
|
|
|
|
|
|
|
|
|
// Setup audio session
|
|
|
|
|
AVAudioSession *session = [AVAudioSession sharedInstance];
|
|
|
|
|
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
|
|
|
|
|
|
|
|
|
|
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
|
|
|
|
|
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
|
|
|
|
|
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
|
|
|
|
|
|
|
|
|
|
// Initiate and prepare the recorder
|
|
|
|
|
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
|
|
|
|
|
_audioRecorder.delegate = self;
|
|
|
|
|
_audioRecorder.meteringEnabled = YES;
|
|
|
|
|
[_audioRecorder prepareToRecord];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder
|
|
|
|
|
successfully:(BOOL)flag {
|
|
|
|
|
if(flag) {
|
|
|
|
|
[self sendMessageAttachment:[NSData dataWithContentsOfURL:recorder.url] ofType:@"audio/m4a"];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
- (void)markAllMessagesAsRead {
|
|
|
|
|
[self.uiDatabaseConnection readWithBlock:^(YapDatabaseReadTransaction *transaction) {
|
|
|
|
|
YapDatabaseViewTransaction *viewTransaction = [transaction ext:TSUnreadDatabaseViewExtensionName];
|
|
|
|
|