Edit part of image and merge it with the original again

Hi guys,


I want to apply a CIFilter on just a selected part of an image. When working with CIFilter, you get an output image. This output image

is edited but is still only a part of the origin image. I want to merge it with the original image again.


At the moment, I have following code:

let colorCube = CIFilter(name: "CIColorCube")!
 colorCube.setValue(size, forKey: "inputCubeDimension")
 colorCube.setValue(data, forKey: "inputCubeData")
        
// overlay is the selected the area, which should be edited
 let partOfImage = self.imageView.image?.crop(size: overlay.frame.size, offset: overlay.frame.origin)
        
 colorCube.setValue(partOfImage, forKey: kCIInputImageKey)
 if let outImage = colorCube.outputImage {
     let context = CIContext(options: nil)
     // when applying CIFilter, we get an output image
     let outputImageRef = context.createCGImage(outImage, from: outImage.extent)
     let editedImagePart = UIImage(cgImage: outputImageRef!)
     
     // attempt to merge output image with the original image (imageView.image) 
     imageView.image?.drawImageInRect(inputImage: editedImagePart, inRect: overlay.frame)
  }
}
...

extension UIImage {
    // attempt to merge images
    func drawImageInRect(inputImage: UIImage, inRect imageRect: CGRect) -> UIImage {
        UIGraphicsBeginImageContext(self.size)
        self.draw(in: CGRect(x: 0.0, y: 0.0, width: self.size.width, height: self.size.height))
        inputImage.draw(in: imageRect)
        let newImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return newImage!
    }
    
    // function to crop part of the original image
    func crop(size: CGSize, offset: CGPoint, scale: CGFloat = 1.0) -> UIImage? {
        let rect = CGRect(x: offset.x * scale, y: offset.y * scale, width: size.width * scale, height: size.height * scale)
        if let cropped = self.cgImage?.cropping(to: rect) {
            return UIImage(cgImage: cropped)
        }
        return nil
    }
}

Does anyone have an idea, how I can do that?

Replies

You can actually achieve all this with pure Core Image by blending the cropped and filtered part over the original again before displaying it:


let original = CIImage(image: self.imageView.image!) // you should probably get that from somewhere else
let croppedImage = original.cropped(to: overlay.frame)

colorCube.setValue(croppedImage, forKey: kCIInputImageKey)
let filteredImage = colorCube.outputImage!

let composition = filteredImage.composited(over: original)

imageView.image = UIImage(ciImage: composition) // this should work, but you can also use a context to create a UIImage

Hi Frank, thanks, vielen Dank 🙂
I will try it out. Your reply made almost my day. There is one question, I still would like to ask. It's about that image cropping.

The function setupView() is called within viewDidLoad() and basically initializes some UI components such as my image view.

func setupView() {
     
    let width = self.view.frame.width
    let height = self.view.frame.height
     ..

    imageView.frame = CGRect(x: width * (20 / width), y: navigationBar.frame.maxY, width: width * (335 / width), height: height * (335 / height))
    imageView.contentMode = .scaleToFill
    imageView.clipsToBounds = true


    overlay.layer.borderColor = UIColor.black.cgColor
    overlay.backgroundColor = UIColor.clear.withAlphaComponent(0.5)
    overlay.isHidden = true

     ...
     self.imageView.addSubview(overlay)
     self.view.addSubview(imageView)
}


The frames for my overlay - which can be a rectangle drawn with the fingers - are set in following methods

override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
        let touch = touches.first!
        let location = touch.location(in: self.imageView)
        
        lastPoint = location
    }
    
    override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
        
        let touch = touches.first!
        let location = touch.location(in: self.imageView)
        
        let currentPoint = location
        reDrawSelectionArea(fromPoint: lastPoint, toPoint: currentPoint)
    }
    
    func reDrawSelectionArea(fromPoint: CGPoint, toPoint: CGPoint) {
        overlay.isHidden = false
        
        //Calculate rect from the original point and last known point
        let rect = CGRect(x: min(fromPoint.x, toPoint.x), y: min(fromPoint.y, toPoint.y), width:  fabs(fromPoint.x - toPoint.x), height: fabs(fromPoint.y - toPoint.y))
        
        overlay.frame = rect
    }


Now suppose, I drew a rectangle on my image view and this selected part should be cropped.

let partOfImage = self.imageView.image!.crop(rect: overlay.frame)

extension UIImage {

    func croppedImage(inRect rect: CGRect) -> UIImage {
        let rad: (Double) -> CGFloat = { deg in
            return CGFloat(deg / 180.0 * .pi)
        }
        var rectTransform: CGAffineTransform
        switch imageOrientation {
        case .left:
            let rotation = CGAffineTransform(rotationAngle: rad(90))
            rectTransform = rotation.translatedBy(x: 0, y: -size.height)
        case .right:
            let rotation = CGAffineTransform(rotationAngle: rad(-90))
            rectTransform = rotation.translatedBy(x: -size.width, y: 0)
        case .down:
            let rotation = CGAffineTransform(rotationAngle: rad(-180))
            rectTransform = rotation.translatedBy(x: -size.width, y: -size.height)
        default:
            rectTransform = .identity
        }
        rectTransform = rectTransform.scaledBy(x: scale, y: scale)
        let transformedRect = rect.applying(rectTransform)
        let imageRef = cgImage!.cropping(to: transformedRect)!
        let result = UIImage(cgImage: imageRef, scale: scale, orientation: imageOrientation)
        return result
    }
    
    func crop(rect: CGRect) -> UIImage? {
        var scaledRect = rect
        scaledRect.origin.x *= scale
        scaledRect.origin.y *= scale
        scaledRect.size.width *= scale
        scaledRect.size.height *= scale
        guard let imageRef: CGImage = cgImage?.cropping(to: scaledRect) else {
            return nil
        }
        return UIImage(cgImage: imageRef, scale: scale, orientation: imageOrientation)
    }
}


I tested both methods within my extension for cropping the view, however, the cropped image is not the same within the selection area. It is out of the borders of my rectangle. What can be the reason for this? Is there an easy solution?


This would help me a lot. Thanks in advance!


Best regards,

Nazar Medeiros

You probably have two challenges there.


The first being caused by this line:

imageView.contentMode = .scaleToFill

That means that that the image view can scale the image internally when displaying it, which means there is a mismatch of imageView's frame (and thus also the overlay's frame) and the actual image dimension. So you need some way to calculate the overlay's position relative to the image displayed underneath.


The other being the rotation transform you apply to your cropping rect. Applying a rotation like that rotates the rectangle around the origin [0, 0] point of your coordinate system. So when your frame, which only has positiv values, is rotated by 90°, it's no longer in the first quadrant but in the second (or forth, depending on the direction), which means it's now outside your image's bounds. Translating it by the negative size isn't enough because the midpoint still won't be the same.


You need to rotate the rect around its midpoint instead. You do so by first translating the rect's midpoint to [0, 0], apply the rotation, and then translate the rect back to it's former midpoint. This helper should give you the transform that you need to apply to your rect:

extension UIImage {

    var orientationTransform: CGAffineTransform {
        var angle: CGFloat = 0
        switch self.imageOrientation {
            case .left:
                angle = .pi / 2
            case .right:
                angle = 3 * .pi / 2
            case .down:
                angle = .pi
            default: break
        }
        let rotationTransform = CGAffineTransform(rotationAngle: angle)

        let size = self.size
        let sizeAfterRotation = size.applying(rotationTransform)

        return CGAffineTransform(translationX: -size.width/2, y: -size.height/2)
                  .concatenating(rotationTransform)
                  .concatenating(CGAffineTransform(translationX: abs(sizeAfterRotation.width/2), y: abs(sizeAfterRotation.height/2)))
    }

}