Respond to CR.

pull/2/head
Matthew Chen 6 years ago
parent 69c5492fce
commit c0f907c441

@ -6,10 +6,14 @@
<dict>
<key>CarthageVersion</key>
<string>0.31.2</string>
<key>DateTime</key>
<string>Wed Feb 13 22:13:11 UTC 2019</string>
<key>OSXVersion</key>
<string>10.14.3</string>
<key>WebRTCCommit</key>
<string>55de5593cc261fa9368c5ccde98884ed1e278ba0 M72</string>
<key>XCodeVersion</key>
<string>1000.1010</string>
</dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>

@ -49,23 +49,34 @@ class ImageEditorModelTest: SignalBaseTest {
}
func testAffineTransformComposition() {
// Documents how classic SRT (scale-rotate-translate) ordering is specified
// in _reverse_ order using CGAffineTransform.
// The transformed origin should reflect ONLY the translation, not scaling or rotation.
XCTAssertEqual(+20.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+30.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).y, accuracy: 0.1)
// WRONG: the translation is being scaled.
XCTAssertEqual(+100.0, CGPoint.zero.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+150.0, CGPoint.zero.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
// The transformed origin should reflect ONLY the translation, not scaling or rotation.
XCTAssertEqual(+20.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+30.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).y, accuracy: 0.1)
// WRONG: the translation is being scaled.
XCTAssertEqual(-150.0, CGPoint.zero.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+100.0, CGPoint.zero.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
// An arbitrary point one "unit" away from the origin should be end up scaled (unit x scaling) + translation.
XCTAssertEqual(+25.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+35.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).y, accuracy: 0.1)
// WRONG: the translation is being scaled.
XCTAssertEqual(+105.0, CGPoint.unit.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+155.0, CGPoint.unit.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
// An arbitrary point one "unit" away from the origin should be end up scaled (unit x scaling ... rotated about the origin) + translation.
XCTAssertEqual(+15.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+35.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).y, accuracy: 0.1)
// WRONG: the translation is being scaled.
XCTAssertEqual(-155.0, CGPoint.unit.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+105.0, CGPoint.unit.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
}

@ -143,10 +143,12 @@ public class ImageEditorCanvasView: UIView {
let srcImageUrl = URL(fileURLWithPath: srcImagePath)
srcImageData = try Data(contentsOf: srcImageUrl)
} catch {
Logger.error("Couldn't parse srcImageUrl")
owsFailDebug("Couldn't parse srcImageUrl")
return nil
}
// We use this constructor so that we can specify the scale.
//
// UIImage(contentsOfFile:) will sometimes use device scale.
guard let srcImage = UIImage(data: srcImageData, scale: 1.0) else {
owsFailDebug("Couldn't load background image.")
return nil
@ -605,9 +607,9 @@ public class ImageEditorCanvasView: UIView {
for item in model.items() {
guard let layer = layerForItem(item: item,
model: model,
model: model,
viewSize: viewSize) else {
Logger.error("Couldn't create layer for item.")
owsFailDebug("Couldn't create layer for item.")
continue
}
layer.contentsScale = dstScale * transform.scaling * item.outputScale()

@ -296,18 +296,6 @@ class ImageEditorCropViewController: OWSViewController {
return true
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
_ = self.becomeFirstResponder()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
_ = self.becomeFirstResponder()
}
// MARK: - Pinch Gesture
@objc
@ -368,6 +356,7 @@ class ImageEditorCropViewController: OWSViewController {
// We could undo an in-progress pinch if the gesture is cancelled, but it seems gratuitous.
// Handle the GR if necessary.
switch gestureRecognizer.state {
case .began:
Logger.verbose("began: \(transform.unitTranslation)")
@ -385,6 +374,7 @@ class ImageEditorCropViewController: OWSViewController {
break
}
// Reset the GR if necessary.
switch gestureRecognizer.state {
case .ended, .failed, .cancelled, .possible:
if panCropRegion != nil {

@ -100,10 +100,6 @@ public class ImageEditorView: UIView {
updateGestureState()
DispatchQueue.main.async {
self.presentCropTool()
}
return true
}

Loading…
Cancel
Save