4

I am trying to crop a selected portion of NSImage which is fitted as per ProportionallyUpOrDown(AspectFill) Mode. I am drawing a frame using mouse dragged event like this:

class CropImageView: NSImageView {

    var startPoint: NSPoint!
    var shapeLayer: CAShapeLayer!
    var flagCheck = false
    var finalPoint: NSPoint!

    override init(frame frameRect: NSRect) {
        super.init(frame: frameRect)
    }

    required init?(coder: NSCoder) {
        super.init(coder: coder)
    }

    override func draw(_ dirtyRect: NSRect) {
        super.draw(dirtyRect)
    }
    
    override var image: NSImage? {
        set {
            self.layer = CALayer()
            self.layer?.contentsGravity = kCAGravityResizeAspectFill
            self.layer?.contents = newValue
            self.wantsLayer = true
            super.image = newValue
        }
        get {
            return super.image
        }
    }

    override func mouseDown(with event: NSEvent) {

        self.startPoint = self.convert(event.locationInWindow, from: nil)
        if self.shapeLayer != nil {
            self.shapeLayer.removeFromSuperlayer()
            self.shapeLayer = nil
        }
        self.flagCheck = true
        var pixelColor: NSColor = NSReadPixel(startPoint) ?? NSColor()
        shapeLayer = CAShapeLayer()
        shapeLayer.lineWidth = 1.0
        shapeLayer.fillColor = NSColor.clear.cgColor
        if pixelColor == NSColor.black {
            pixelColor = NSColor.color_white
        } else {
            pixelColor = NSColor.black
        }
        shapeLayer.strokeColor = pixelColor.cgColor
        shapeLayer.lineDashPattern = [1]
        self.layer?.addSublayer(shapeLayer)

        var dashAnimation = CABasicAnimation()
        dashAnimation = CABasicAnimation(keyPath: "lineDashPhase")
        dashAnimation.duration = 0.75
        dashAnimation.fromValue = 0.0
        dashAnimation.toValue = 15.0
        dashAnimation.repeatCount = 0.0
        shapeLayer.add(dashAnimation, forKey: "linePhase")
    }

    override func mouseDragged(with event: NSEvent) {
        let point: NSPoint = self.convert(event.locationInWindow, from: nil)

        var newPoint: CGPoint = self.startPoint
        
        let xDiff = point.x - self.startPoint.x
        let yDiff = point.y - self.startPoint.y
        let dist = min(abs(xDiff), abs(yDiff))
        newPoint.x += xDiff > 0 ? dist : -dist
        newPoint.y += yDiff > 0 ? dist : -dist
        
        let path = CGMutablePath()
        path.move(to: self.startPoint)
        path.addLine(to: NSPoint(x: self.startPoint.x, y: newPoint.y))
        path.addLine(to: newPoint)
        path.addLine(to: NSPoint(x: newPoint.x, y: self.startPoint.y))
        path.closeSubpath()
        self.shapeLayer.path = path
    }

    override func mouseUp(with event: NSEvent) {
        self.finalPoint = self.convert(event.locationInWindow, from: nil)
    }
}

and selected this area as shown in picture using black dotted line:

enter image description here

My Cropping Code logic is this:

// resize Image Methods
extension CropProfileView {

    func resizeImage(image: NSImage) -> Data {

        var scalingFactor: CGFloat = 0.0
        if image.size.width >= image.size.height {
            scalingFactor = image.size.width/cropImgView.size.width
        } else {
            scalingFactor = image.size.height/cropImgView.size.height
        }
        let width = (self.cropImgView.finalPoint.x - self.cropImgView.startPoint.x) * scalingFactor
        let height = (self.cropImgView.startPoint.y - self.cropImgView.finalPoint.y) * scalingFactor
        let xPos = ((image.size.width/2) - (cropImgView.bounds.midX - self.cropImgView.startPoint.x) * scalingFactor)
        let yPos = ((image.size.height/2) - (cropImgView.bounds.midY - (cropImgView.size.height - self.cropImgView.startPoint.y)) * scalingFactor)
        
        var croppedRect: NSRect = NSRect(x: xPos, y: yPos, width: width, height: height)
        let imageRef = image.cgImage(forProposedRect: &croppedRect, context: nil, hints: nil)
        guard let croppedImage = imageRef?.cropping(to: croppedRect) else {return Data()}
        let imageWithNewSize = NSImage(cgImage: croppedImage, size: NSSize(width: width, height: height))

        guard let data = imageWithNewSize.tiffRepresentation,
              let rep = NSBitmapImageRep(data: data),
            let imgData = rep.representation(using: .png, properties: [.compressionFactor: NSNumber(floatLiteral: 0.25)]) else {
            return imageWithNewSize.tiffRepresentation ?? Data()
        }
        return imgData
    }
}

With this cropping logic i am getting this output:

enter image description here

I think as image is AspectFill thats why its not getting cropped in perfect size as per selected frame. Here if you look at output: xpositon & width & heights are not perfect. Or probably i am not calculating these co-ordinates properly. Let me know the faults probably i am calculating someting wrong.

Shivam Tripathi
  • 1,405
  • 3
  • 19
  • 37
  • https://stackoverflow.com/questions/43720720/how-to-crop-a-uiimageview-to-a-new-uiimage-in-aspect-fill-mode - it is for UIKit but the principle is exactly the same. – matt Mar 25 '21 at 18:53
  • @matt i agree with you and your answer for iOS platform & i am also trying to follow the same in asked question. However we don't have UIGraphicsImageRenderer() and can't use draw function on CGImage as well. I am not dragging the view however it will be same as i am extracting co-ordinates but still i have a doubt that my x & y coordinates are calculated correctly. Also i would request its not right to make close request for this question as its a different question. – Shivam Tripathi Mar 26 '21 at 15:09
  • Are the width and height of the `NSImageView` the same? – Willeke Mar 27 '21 at 07:48
  • No width, height are not same @Willeke – Shivam Tripathi Mar 27 '21 at 12:37
  • When deciding which scaling factor to use you have to take the size of the image view into account. See https://stackoverflow.com/questions/43720720/how-to-crop-a-uiimageview-to-a-new-uiimage-in-aspect-fill-mode – Willeke Mar 27 '21 at 13:49
  • i am already deciding scaling factor using my CropImageView size. @Willeke – Shivam Tripathi Mar 27 '21 at 17:30
  • Could you post the minimal reproducible project to smth like github? – olha Mar 27 '21 at 21:47

1 Answers1

5

Note: the CropImageView class in the question is a subclass of NSImageView but the view is layer-hosting and the image is drawn by the layer, not by NSImageView. imageScaling is not used.

When deciding which scaling factor to use you have to take the size of the image view into account. If the image size is width:120, height:100 and the image view size is width:120, height 80 then image.size.width >= image.size.height is true and image.size.width/cropImgView.size.width is 1 but the image is scaled because image.size.height/cropImgView.size.height is 1.25. Calculate the horizontal and vertical scaling factors and use the largest.

See How to crop a UIImageView to a new UIImage in 'aspect fill' mode?

Here's the calculation of croppedRect assuming cropImgView.size returns self.layer!.bounds.size.

var scalingWidthFactor: CGFloat = image.size.width/cropImgView.size.width
var scalingHeightFactor: CGFloat = image.size.height/cropImgView.size.height
var xOffset: CGFloat = 0
var yOffset: CGFloat = 0
switch cropImgView.layer?.contentsGravity {
    case CALayerContentsGravity.resize: break
    case CALayerContentsGravity.resizeAspect:
        if scalingWidthFactor > scalingHeightFactor {
            scalingHeightFactor = scalingWidthFactor
            yOffset = (cropImgView.size.height - (image.size.height / scalingHeightFactor)) / 2
        }
        else {
            scalingWidthFactor = scalingHeightFactor
            xOffset = (cropImgView.size.width - (image.size.width / scalingWidthFactor)) / 2
        }
    case CALayerContentsGravity.resizeAspectFill:
        if scalingWidthFactor < scalingHeightFactor {
            scalingHeightFactor = scalingWidthFactor
            yOffset = (cropImgView.size.height - (image.size.height / scalingHeightFactor)) / 2
        }
        else {
            scalingWidthFactor = scalingHeightFactor
            xOffset = (cropImgView.size.width - (image.size.width / scalingWidthFactor)) / 2
        }
    default:
        print("contentsGravity \(String(describing: cropImgView.layer?.contentsGravity)) is not supported")
        return nil
}
let width = (self.cropImgView.finalPoint.x - self.cropImgView.startPoint.x) * scalingWidthFactor
let height = (self.cropImgView.startPoint.y - self.cropImgView.finalPoint.y) * scalingHeightFactor
let xPos = (self.cropImgView.startPoint.x - xOffset) * scalingWidthFactor
let yPos = (cropImgView.size.height - self.cropImgView.startPoint.y - yOffset) * scalingHeightFactor

var croppedRect: NSRect = NSRect(x: xPos, y: yPos, width: width, height: height)

Bugfix: cropImgView.finalPoint should be the corner of the selection, not the location of mouseUp. In CropImageView set self.finalPoint = newPoint in mouseDragged instead of mouseUp.

Willeke
  • 14,578
  • 4
  • 19
  • 47
  • @Willleke I really appreciate the time u put to think of something about the question as i am not getting much responses for MacOS, and i tried this solution but it's not making any difference. Because rarely we can find an image which has same width/height property wrt the imageView heights/width for any developer. I already tried a lot from matt's answer which u mentioned and same mentioned in question's comments as well but nothing worked out. After trying those answers this question was put. – Shivam Tripathi Apr 02 '21 at 09:34