Merge branch 'charlesmchen/refineAttachmentApproval'

pull/1/head
Matthew Chen 8 years ago
commit 39c6b5fd7b

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "cancel-cross-white@1x.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "cancel-cross-white@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "cancel-cross-white@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

@ -57,6 +57,7 @@ enum TSImageQuality {
class SignalAttachment: NSObject { class SignalAttachment: NSObject {
static let TAG = "[SignalAttachment]" static let TAG = "[SignalAttachment]"
let TAG = "[SignalAttachment]"
// MARK: Properties // MARK: Properties
@ -95,7 +96,8 @@ class SignalAttachment: NSObject {
// To avoid redundant work of repeatedly compressing/uncompressing // To avoid redundant work of repeatedly compressing/uncompressing
// images, we cache the UIImage associated with this attachment if // images, we cache the UIImage associated with this attachment if
// possible. // possible.
public var image: UIImage? private var cachedImage: UIImage?
private var cachedVideoPreview: UIImage?
private(set) public var isVoiceMessage = false private(set) public var isVoiceMessage = false
@ -152,6 +154,42 @@ class SignalAttachment: NSObject {
return SignalAttachmentError.missingData.errorDescription return SignalAttachmentError.missingData.errorDescription
} }
public func image() -> UIImage? {
if let cachedImage = cachedImage {
return cachedImage
}
guard let image = UIImage(data:dataSource.data()) else {
return nil
}
cachedImage = image
return image
}
public func videoPreview() -> UIImage? {
if let cachedVideoPreview = cachedVideoPreview {
return cachedVideoPreview
}
guard let mediaUrl = dataUrl else {
return nil
}
do {
let asset = AVURLAsset(url:mediaUrl)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let cgImage = try generator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let image = UIImage(cgImage: cgImage)
cachedVideoPreview = image
return image
} catch let error {
Logger.verbose("\(TAG) Could not generate video thumbnail: \(error.localizedDescription)")
return nil
}
}
// Returns the MIME type for this attachment or nil if no MIME type // Returns the MIME type for this attachment or nil if no MIME type
// can be identified. // can be identified.
var mimeType: String { var mimeType: String {
@ -454,7 +492,7 @@ class SignalAttachment: NSObject {
attachment.error = .couldNotParseImage attachment.error = .couldNotParseImage
return attachment return attachment
} }
attachment.image = image attachment.cachedImage = image
if isInputImageValidOutputImage(image: image, dataSource: dataSource, dataUTI: dataUTI) { if isInputImageValidOutputImage(image: image, dataSource: dataSource, dataUTI: dataUTI) {
Logger.verbose("\(TAG) Sending raw \(attachment.mimeType)") Logger.verbose("\(TAG) Sending raw \(attachment.mimeType)")
@ -513,7 +551,7 @@ class SignalAttachment: NSObject {
let dataSource = DataSourceValue.emptyDataSource() let dataSource = DataSourceValue.emptyDataSource()
dataSource.sourceFilename = filename dataSource.sourceFilename = filename
let attachment = SignalAttachment(dataSource : dataSource, dataUTI: dataUTI) let attachment = SignalAttachment(dataSource : dataSource, dataUTI: dataUTI)
attachment.image = image attachment.cachedImage = image
Logger.verbose("\(TAG) Writing \(attachment.mimeType) as image/jpeg") Logger.verbose("\(TAG) Writing \(attachment.mimeType) as image/jpeg")
return compressImageAsJPEG(image : image, attachment : attachment, filename:filename) return compressImageAsJPEG(image : image, attachment : attachment, filename:filename)
@ -545,7 +583,7 @@ class SignalAttachment: NSObject {
if UInt(jpgImageData.count) <= kMaxFileSizeImage { if UInt(jpgImageData.count) <= kMaxFileSizeImage {
let recompressedAttachment = SignalAttachment(dataSource : dataSource, dataUTI: kUTTypeJPEG as String) let recompressedAttachment = SignalAttachment(dataSource : dataSource, dataUTI: kUTTypeJPEG as String)
recompressedAttachment.image = dstImage recompressedAttachment.cachedImage = dstImage
return recompressedAttachment return recompressedAttachment
} }

@ -4,6 +4,7 @@
#import "ConversationInputToolbar.h" #import "ConversationInputToolbar.h"
#import "ConversationInputTextView.h" #import "ConversationInputTextView.h"
#import "OWSMath.h"
#import "Signal-Swift.h" #import "Signal-Swift.h"
#import "UIColor+OWS.h" #import "UIColor+OWS.h"
#import "UIFont+OWS.h" #import "UIFont+OWS.h"
@ -42,8 +43,8 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
#pragma mark - Attachment Approval #pragma mark - Attachment Approval
@property (nonatomic) UIView *attachmentApprovalView;
@property (nonatomic, nullable) MediaMessageView *attachmentView; @property (nonatomic, nullable) MediaMessageView *attachmentView;
@property (nonatomic, nullable) UIView *cancelAttachmentWrapper;
@property (nonatomic, nullable) SignalAttachment *attachmentToApprove; @property (nonatomic, nullable) SignalAttachment *attachmentToApprove;
@end @end
@ -132,10 +133,6 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
self.voiceMemoButton.imageView.tintColor = [UIColor ows_materialBlueColor]; self.voiceMemoButton.imageView.tintColor = [UIColor ows_materialBlueColor];
[self.rightButtonWrapper addSubview:self.voiceMemoButton]; [self.rightButtonWrapper addSubview:self.voiceMemoButton];
_attachmentApprovalView = [UIView containerView];
[self addSubview:self.attachmentApprovalView];
[self.attachmentApprovalView autoPinToSuperviewEdges];
// We want to be permissive about the voice message gesture, so we hang // We want to be permissive about the voice message gesture, so we hang
// the long press GR on the button's wrapper, not the button itself. // the long press GR on the button's wrapper, not the button itself.
UILongPressGestureRecognizer *longPressGestureRecognizer = UILongPressGestureRecognizer *longPressGestureRecognizer =
@ -215,12 +212,51 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
{ {
[NSLayoutConstraint deactivateConstraints:self.contentContraints]; [NSLayoutConstraint deactivateConstraints:self.contentContraints];
const int textViewVInset = 5;
const int contentHInset = 6;
const int contentHSpacing = 6;
// We want to grow the text input area to fit its content within reason.
const CGFloat kMinTextViewHeight = ceil(self.inputTextView.font.lineHeight
+ self.inputTextView.textContainerInset.top + self.inputTextView.textContainerInset.bottom
+ self.inputTextView.contentInset.top + self.inputTextView.contentInset.bottom);
const CGFloat kMaxTextViewHeight = 100.f;
const CGFloat textViewDesiredHeight = (self.inputTextView.contentSize.height + self.inputTextView.contentInset.top
+ self.inputTextView.contentInset.bottom);
const CGFloat textViewHeight = ceil(Clamp(textViewDesiredHeight, kMinTextViewHeight, kMaxTextViewHeight));
const CGFloat kMinContentHeight = kMinTextViewHeight + textViewVInset * 2;
if (self.attachmentToApprove) { if (self.attachmentToApprove) {
self.contentView.hidden = YES; OWSAssert(self.attachmentView);
self.attachmentApprovalView.hidden = NO;
self.leftButtonWrapper.hidden = YES;
self.inputTextView.hidden = YES;
self.voiceMemoButton.hidden = YES;
UIButton *rightButton = self.sendButton;
rightButton.enabled = YES;
rightButton.hidden = NO;
[rightButton setContentHuggingHigh];
[rightButton setCompressionResistanceHigh];
[self.attachmentView setContentHuggingLow];
OWSAssert(rightButton.superview == self.rightButtonWrapper);
self.contentContraints = @[ self.contentContraints = @[
[self.attachmentApprovalView autoSetDimension:ALDimensionHeight toSize:300.f], [self.attachmentView autoPinEdgeToSuperviewEdge:ALEdgeTop withInset:textViewVInset],
[self.attachmentView autoPinEdgeToSuperviewEdge:ALEdgeBottom withInset:textViewVInset],
[self.attachmentView autoPinEdgeToSuperviewEdge:ALEdgeLeft withInset:contentHInset],
[self.attachmentView autoSetDimension:ALDimensionHeight toSize:300.f],
[self.rightButtonWrapper autoPinEdge:ALEdgeLeft toEdge:ALEdgeRight ofView:self.attachmentView],
[self.rightButtonWrapper autoPinEdgeToSuperviewEdge:ALEdgeRight],
[self.rightButtonWrapper autoPinEdgeToSuperviewEdge:ALEdgeTop],
[self.rightButtonWrapper autoPinEdgeToSuperviewEdge:ALEdgeBottom],
[rightButton autoSetDimension:ALDimensionHeight toSize:kMinContentHeight],
[rightButton autoPinLeadingToSuperviewWithMargin:contentHSpacing],
[rightButton autoPinTrailingToSuperviewWithMargin:contentHInset],
[rightButton autoPinEdgeToSuperviewEdge:ALEdgeBottom],
]; ];
[self setNeedsLayout]; [self setNeedsLayout];
@ -235,26 +271,13 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
return; return;
} }
self.contentView.hidden = NO; self.leftButtonWrapper.hidden = NO;
self.attachmentApprovalView.hidden = YES; self.inputTextView.hidden = NO;
self.voiceMemoButton.hidden = NO;
[self.attachmentView removeFromSuperview];
self.attachmentView = nil; self.attachmentView = nil;
for (UIView *subview in self.attachmentApprovalView.subviews) { [self.cancelAttachmentWrapper removeFromSuperview];
[subview removeFromSuperview]; self.cancelAttachmentWrapper = nil;
}
const int textViewVInset = 5;
const int contentHInset = 6;
const int contentHSpacing = 6;
// We want to grow the text input area to fit its content within reason.
const CGFloat kMinTextViewHeight = ceil(self.inputTextView.font.lineHeight
+ self.inputTextView.textContainerInset.top + self.inputTextView.textContainerInset.bottom
+ self.inputTextView.contentInset.top + self.inputTextView.contentInset.bottom);
const CGFloat kMaxTextViewHeight = 100.f;
const CGFloat textViewDesiredHeight = (self.inputTextView.contentSize.height + self.inputTextView.contentInset.top
+ self.inputTextView.contentInset.bottom);
const CGFloat textViewHeight = ceil(MAX(kMinTextViewHeight, MIN(kMaxTextViewHeight, textViewDesiredHeight)));
const CGFloat kMinContentHeight = kMinTextViewHeight + textViewVInset * 2;
UIButton *leftButton = self.attachmentButton; UIButton *leftButton = self.attachmentButton;
UIButton *rightButton = (self.shouldShowVoiceMemoButton ? self.voiceMemoButton : self.sendButton); UIButton *rightButton = (self.shouldShowVoiceMemoButton ? self.voiceMemoButton : self.sendButton);
@ -321,7 +344,7 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
- (void)ensureShouldShowVoiceMemoButton - (void)ensureShouldShowVoiceMemoButton
{ {
self.shouldShowVoiceMemoButton = self.inputTextView.trimmedText.length < 1; self.shouldShowVoiceMemoButton = (self.attachmentToApprove != nil && self.inputTextView.trimmedText.length < 1);
} }
- (void)handleLongPress:(UIGestureRecognizer *)sender - (void)handleLongPress:(UIGestureRecognizer *)sender
@ -619,7 +642,11 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
{ {
OWSAssert(self.inputToolbarDelegate); OWSAssert(self.inputToolbarDelegate);
[self.inputToolbarDelegate sendButtonPressed]; if (self.attachmentToApprove) {
[self attachmentApprovalSendPressed];
} else {
[self.inputToolbarDelegate sendButtonPressed];
}
} }
- (void)attachmentButtonPressed - (void)attachmentButtonPressed
@ -699,58 +726,60 @@ static void *kConversationInputTextViewObservingContext = &kConversationInputTex
MediaMessageView *attachmentView = [[MediaMessageView alloc] initWithAttachment:attachment]; MediaMessageView *attachmentView = [[MediaMessageView alloc] initWithAttachment:attachment];
self.attachmentView = attachmentView; self.attachmentView = attachmentView;
[self.attachmentApprovalView addSubview:attachmentView]; [self.contentView addSubview:attachmentView];
[attachmentView autoPinEdgeToSuperviewEdge:ALEdgeTop withInset:10];
[attachmentView autoPinWidthToSuperviewWithMargin:20]; UIView *cancelAttachmentWrapper = [UIView containerView];
self.cancelAttachmentWrapper = cancelAttachmentWrapper;
UIView *buttonRow = [UIView containerView]; [cancelAttachmentWrapper
[self.attachmentApprovalView addSubview:buttonRow]; addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self
[buttonRow autoPinWidthToSuperviewWithMargin:20]; action:@selector(cancelAttachmentWrapperTapped:)]];
[buttonRow autoPinEdge:ALEdgeTop toEdge:ALEdgeBottom ofView:attachmentView withOffset:10]; UIView *_Nullable attachmentContentView = [self.attachmentView contentView];
[buttonRow autoPinEdgeToSuperviewEdge:ALEdgeBottom withInset:10]; // Place the cancel button inside the attachment view's content area,
// if possible. If not, just place it inside the attachment view.
// We use this invisible subview to ensure that the buttons are centered UIView *cancelButtonReferenceView = attachmentContentView;
// horizontally. if (attachmentContentView) {
UIView *buttonSpacer = [UIView new]; attachmentContentView.layer.borderColor = self.inputTextView.layer.borderColor;
[buttonRow addSubview:buttonSpacer]; attachmentContentView.layer.borderWidth = self.inputTextView.layer.borderWidth;
// Vertical positioning of this view doesn't matter. attachmentContentView.layer.cornerRadius = self.inputTextView.layer.cornerRadius;
[buttonSpacer autoPinEdgeToSuperviewEdge:ALEdgeTop]; attachmentContentView.clipsToBounds = YES;
[buttonSpacer autoSetDimension:ALDimensionWidth toSize:ScaleFromIPhone5To7Plus(20, 30)]; } else {
[buttonSpacer autoSetDimension:ALDimensionHeight toSize:0]; cancelButtonReferenceView = self.attachmentView;
[buttonSpacer autoHCenterInSuperview]; }
[self.contentView addSubview:cancelAttachmentWrapper];
UIView *cancelButton = [self createAttachmentApprovalButton:[CommonStrings cancelButton] [cancelAttachmentWrapper autoPinEdge:ALEdgeTop toEdge:ALEdgeTop ofView:cancelButtonReferenceView];
color:[UIColor ows_destructiveRedColor] [cancelAttachmentWrapper autoPinEdge:ALEdgeRight toEdge:ALEdgeRight ofView:cancelButtonReferenceView];
selector:@selector(attachmentApprovalCancelPressed)];
[buttonRow addSubview:cancelButton]; UIImage *cancelIcon = [UIImage imageNamed:@"cancel-cross-white"];
[cancelButton autoPinHeightToSuperview]; OWSAssert(cancelIcon);
[cancelButton autoPinEdge:ALEdgeRight toEdge:ALEdgeLeft ofView:buttonSpacer]; UIButton *cancelButton = [UIButton buttonWithType:UIButtonTypeCustom];
[cancelButton setImage:cancelIcon forState:UIControlStateNormal];
UIView *sendButton = [cancelButton setBackgroundColor:[UIColor ows_materialBlueColor]];
[self createAttachmentApprovalButton:NSLocalizedString( OWSAssert(cancelIcon.size.width == cancelIcon.size.height);
@"ATTACHMENT_APPROVAL_SEND_BUTTON", comment CGFloat cancelIconSize = MIN(cancelIcon.size.width, cancelIcon.size.height);
: @"Label for 'send' button in the 'attachment approval' dialog.") CGFloat cancelIconInset = round(cancelIconSize * 0.35f);
color:[UIColor colorWithRGBHex:0x2ecc71] [cancelButton
selector:@selector(attachmentApprovalSendPressed)]; setContentEdgeInsets:UIEdgeInsetsMake(cancelIconInset, cancelIconInset, cancelIconInset, cancelIconInset)];
[buttonRow addSubview:sendButton]; CGFloat cancelButtonRadius = cancelIconInset + cancelIconSize * 0.5f;
[sendButton autoPinHeightToSuperview]; cancelButton.layer.cornerRadius = cancelButtonRadius;
[sendButton autoPinEdge:ALEdgeLeft toEdge:ALEdgeRight ofView:buttonSpacer]; CGFloat cancelButtonInset = 10.f;
[cancelButton addTarget:self
action:@selector(attachmentApprovalCancelPressed)
forControlEvents:UIControlEventTouchUpInside];
[cancelAttachmentWrapper addSubview:cancelButton];
[cancelButton autoPinWidthToSuperviewWithMargin:cancelButtonInset];
[cancelButton autoPinHeightToSuperviewWithMargin:cancelButtonInset];
CGFloat cancelButtonSize = cancelIconSize + 2 * cancelIconInset;
[cancelButton autoSetDimension:ALDimensionWidth toSize:cancelButtonSize];
[cancelButton autoSetDimension:ALDimensionHeight toSize:cancelButtonSize];
[self ensureContentConstraints]; [self ensureContentConstraints];
} }
- (UIView *)createAttachmentApprovalButton:(NSString *)title color:(UIColor *)color selector:(SEL)selector - (void)cancelAttachmentWrapperTapped:(UIGestureRecognizer *)sender
{ {
const CGFloat buttonWidth = ScaleFromIPhone5To7Plus(110, 140); if (sender.state == UIGestureRecognizerStateRecognized) {
const CGFloat buttonHeight = ScaleFromIPhone5To7Plus(35, 45); [self attachmentApprovalCancelPressed];
}
return [OWSFlatButton buttonWithTitle:title
titleColor:[UIColor whiteColor]
backgroundColor:color
width:buttonWidth
height:buttonHeight
target:self
selector:selector];
} }
- (void)attachmentApprovalCancelPressed - (void)attachmentApprovalCancelPressed

@ -2318,15 +2318,13 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
#pragma mark GifPickerViewControllerDelegate #pragma mark GifPickerViewControllerDelegate
- (void)gifPickerWillSend - (void)gifPickerDidSelectWithAttachment:(SignalAttachment *)attachment
{ {
[ThreadUtil addThreadToProfileWhitelistIfEmptyContactThread:self.thread]; OWSAssert(attachment);
}
- (void)gifPickerDidSendWithOutgoingMessage:(TSOutgoingMessage *)message [self tryToSendAttachmentIfApproved:attachment];
{
[self messageWasSent:message];
[ThreadUtil addThreadToProfileWhitelistIfEmptyContactThread:self.thread];
[self ensureDynamicInteractions]; [self ensureDynamicInteractions];
} }

@ -6,8 +6,7 @@ import Foundation
@objc @objc
protocol GifPickerViewControllerDelegate: class { protocol GifPickerViewControllerDelegate: class {
func gifPickerWillSend() func gifPickerDidSelect(attachment: SignalAttachment)
func gifPickerDidSend(outgoingMessage: TSOutgoingMessage)
} }
class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollectionViewDataSource, UICollectionViewDelegate, GifPickerLayoutDelegate { class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollectionViewDataSource, UICollectionViewDelegate, GifPickerLayoutDelegate {
@ -359,11 +358,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
} }
let attachment = SignalAttachment(dataSource: dataSource, dataUTI: asset.rendition.utiType) let attachment = SignalAttachment(dataSource: dataSource, dataUTI: asset.rendition.utiType)
strongSelf.delegate?.gifPickerWillSend() strongSelf.delegate?.gifPickerDidSelect(attachment: attachment)
let outgoingMessage = ThreadUtil.sendMessage(with: attachment, in: strongSelf.thread, messageSender: strongSelf.messageSender)
strongSelf.delegate?.gifPickerDidSend(outgoingMessage: outgoingMessage)
strongSelf.dismiss(animated: true, completion: nil) strongSelf.dismiss(animated: true, completion: nil)
}.catch { [weak self] error in }.catch { [weak self] error in

@ -256,7 +256,6 @@ typedef NS_ENUM(NSInteger, CellState) { kArchiveState, kInboxState };
// after mappings have been set up in `showInboxGrouping` // after mappings have been set up in `showInboxGrouping`
[self tableViewSetUp]; [self tableViewSetUp];
self.segmentedControl = [[UISegmentedControl alloc] initWithItems:@[ self.segmentedControl = [[UISegmentedControl alloc] initWithItems:@[
NSLocalizedString(@"WHISPER_NAV_BAR_TITLE", nil), NSLocalizedString(@"WHISPER_NAV_BAR_TITLE", nil),
NSLocalizedString(@"ARCHIVE_NAV_BAR_TITLE", nil) NSLocalizedString(@"ARCHIVE_NAV_BAR_TITLE", nil)

@ -30,6 +30,8 @@ class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
var audioProgressSeconds: CGFloat = 0 var audioProgressSeconds: CGFloat = 0
var audioDurationSeconds: CGFloat = 0 var audioDurationSeconds: CGFloat = 0
var contentView: UIView?
// MARK: Initializers // MARK: Initializers
@available(*, unavailable, message:"use attachment: constructor instead.") @available(*, unavailable, message:"use attachment: constructor instead.")
@ -49,6 +51,10 @@ class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
createViews() createViews()
} }
deinit {
NotificationCenter.default.removeObserver(self)
}
// MARK: View Lifecycle // MARK: View Lifecycle
func viewWillAppear(_ animated: Bool) { func viewWillAppear(_ animated: Bool) {
@ -154,19 +160,36 @@ class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
createGenericPreview() createGenericPreview()
return return
} }
guard image.size.width > 0 && image.size.height > 0 else {
createGenericPreview()
return
}
let animatedImageView = YYAnimatedImageView() let animatedImageView = YYAnimatedImageView()
animatedImageView.image = image animatedImageView.image = image
animatedImageView.contentMode = .scaleAspectFit let aspectRatio = image.size.width / image.size.height
self.addSubview(animatedImageView) addSubviewWithScaleAspectFitLayout(view:animatedImageView, aspectRatio:aspectRatio)
animatedImageView.autoPinToSuperviewEdges() contentView = animatedImageView
}
private func addSubviewWithScaleAspectFitLayout(view: UIView, aspectRatio: CGFloat) {
self.addSubview(view)
// This emulates the behavior of contentMode = .scaleAspectFit using
// iOS auto layout constraints.
//
// This allows ConversationInputToolbar to place the "cancel" button
// in the upper-right hand corner of the preview content.
view.autoCenterInSuperview()
view.autoPin(toAspectRatio:aspectRatio)
view.autoMatch(.width, to: .width, of: self, withMultiplier: 1.0, relation: .lessThanOrEqual)
view.autoMatch(.height, to: .height, of: self, withMultiplier: 1.0, relation: .lessThanOrEqual)
} }
private func createImagePreview() { private func createImagePreview() {
var image = attachment.image guard let image = attachment.image() else {
if image == nil { createGenericPreview()
image = UIImage(data: attachment.data) return
} }
guard image != nil else { guard image.size.width > 0 && image.size.height > 0 else {
createGenericPreview() createGenericPreview()
return return
} }
@ -174,28 +197,35 @@ class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
let imageView = UIImageView(image: image) let imageView = UIImageView(image: image)
imageView.layer.minificationFilter = kCAFilterTrilinear imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.contentMode = .scaleAspectFit let aspectRatio = image.size.width / image.size.height
self.addSubview(imageView) addSubviewWithScaleAspectFitLayout(view:imageView, aspectRatio:aspectRatio)
imageView.autoPinToSuperviewEdges() contentView = imageView
} }
private func createVideoPreview() { private func createVideoPreview() {
guard let dataUrl = attachment.dataUrl else { guard let image = attachment.videoPreview() else {
createGenericPreview() createGenericPreview()
return return
} }
guard let videoPlayer = MPMoviePlayerController(contentURL: dataUrl) else { guard image.size.width > 0 && image.size.height > 0 else {
createGenericPreview() createGenericPreview()
return return
} }
videoPlayer.prepareToPlay()
videoPlayer.controlStyle = .default let imageView = UIImageView(image: image)
videoPlayer.shouldAutoplay = false imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
let aspectRatio = image.size.width / image.size.height
addSubviewWithScaleAspectFitLayout(view:imageView, aspectRatio:aspectRatio)
contentView = imageView
self.addSubview(videoPlayer.view) let videoPlayIcon = UIImage(named:"play_button")
self.videoPlayer = videoPlayer let videoPlayButton = UIImageView(image:videoPlayIcon)
videoPlayer.view.autoPinToSuperviewEdges() imageView.addSubview(videoPlayButton)
videoPlayButton.autoCenterInSuperview()
imageView.isUserInteractionEnabled = true
imageView.addGestureRecognizer(UITapGestureRecognizer(target:self, action:#selector(videoTapped)))
} }
private func createGenericPreview() { private func createGenericPreview() {
@ -365,4 +395,51 @@ class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
audioPlayButton?.setImage(image, for: .normal) audioPlayButton?.setImage(image, for: .normal)
audioPlayButton?.imageView?.tintColor = UIColor.ows_materialBlue() audioPlayButton?.imageView?.tintColor = UIColor.ows_materialBlue()
} }
// MARK: - Video Playback
func videoTapped(sender: UIGestureRecognizer) {
guard let dataUrl = attachment.dataUrl else {
return
}
guard sender.state == .recognized else {
return
}
guard let videoPlayer = MPMoviePlayerController(contentURL: dataUrl) else {
return
}
videoPlayer.prepareToPlay()
NotificationCenter.default.addObserver(forName: .MPMoviePlayerWillExitFullscreen, object: nil, queue: nil) { [weak self] _ in
self?.moviePlayerWillExitFullscreen()
}
NotificationCenter.default.addObserver(forName: .MPMoviePlayerDidExitFullscreen, object: nil, queue: nil) { [weak self] _ in
self?.moviePlayerDidExitFullscreen()
}
videoPlayer.controlStyle = .default
videoPlayer.shouldAutoplay = true
self.addSubview(videoPlayer.view)
videoPlayer.view.frame = self.bounds
self.videoPlayer = videoPlayer
videoPlayer.view.autoPinToSuperviewEdges()
ViewControllerUtils.setAudioIgnoresHardwareMuteSwitch(true)
videoPlayer.setFullscreen(true, animated:false)
}
private func moviePlayerWillExitFullscreen() {
clearVideoPlayer()
}
private func moviePlayerDidExitFullscreen() {
clearVideoPlayer()
}
private func clearVideoPlayer() {
videoPlayer?.stop()
videoPlayer?.view.removeFromSuperview()
videoPlayer = nil
ViewControllerUtils.setAudioIgnoresHardwareMuteSwitch(false)
}
} }

@ -297,6 +297,7 @@ NS_ASSUME_NONNULL_BEGIN
DataSourcePath *instance = [DataSourcePath new]; DataSourcePath *instance = [DataSourcePath new];
instance.filePath = filePath; instance.filePath = filePath;
OWSAssert(!instance.shouldDeleteOnDeallocation);
return instance; return instance;
} }

Loading…
Cancel
Save