0

There seems to be a problem clicking on a view and getting the right pixel data from an image inside the view.

func handleTap(gestureRecognizer: UIGestureRecognizer) {
    print("You tapped at \(gestureRecognizer.locationInView(self.view))")
     print("You tapped at \(gestureRecognizer.locationInView(view))")
     print(pickerImage.image!.size.height)
     print(pickerImage.image!.size.width)
    //imageViewOutlet.frame = CGRectMake(0 , 0, pickerImage.image!.size.width, pickerImage.image!.size.height)
     let point = gestureRecognizer.locationInView(self.view)
     let xPoint = (point.x * 0.91777)
     let yPoint = point.y
     let image : UIImage = pickerImage.image!;
     let color = image.getPixelColor(CGPointMake(xPoint, yPoint))
     colorText.backgroundColor = color;
     colorText2.text = color.htmlRGBColor
}

What happens is the photo that is inside a container of height 300 doesn't have the same pixel data of "You tapped at (369.0, 214.33332824707)"

Instead there is about 35px of the Top Layout Guide (where the battery bar is) and the x Scale has a difference of .9177 in scale when I click on both sides compared to an image on the screen.

Here is the declaration of the container where a picture is

        if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.SavedPhotosAlbum) {
            print("Button capture")
            imagePicker.delegate = self
            imagePicker.sourceType = UIImagePickerControllerSourceType.SavedPhotosAlbum;
            imagePicker.allowsEditing = false
            self.presentViewController(imagePicker, animated: true, completion: nil)
        }
    }

    func imagePickerController(picker: UIImagePickerController!, didFinishPickingImage image: UIImage!, editingInfo: NSDictionary!){
        self.dismissViewControllerAnimated(true, completion: { () -> Void in

        })

//        imageTemp.image = image
        pickerImage.image = resizeImage(image, newHeight: CGFloat(300))
       // pickerImage.image = image
    }

And here is how the code is extended outside the file

import Foundation
import UIKit

extension UIImage {
    func getPixelColor(pos: CGPoint) -> UIColor {
        let pixelData = CGDataProviderCopyData(CGImageGetDataProvider(self.CGImage))
        let data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData)
        let pixelInfo: Int = ((Int(self.size.width) * Int(pos.y)) + Int(pos.x)) * 4
        let r = CGFloat(data[pixelInfo]) / CGFloat(255.0)
        let g = CGFloat(data[pixelInfo+1]) / CGFloat(255.0)
        let b = CGFloat(data[pixelInfo+2]) / CGFloat(255.0)
        let a = CGFloat(data[pixelInfo+3]) / CGFloat(255.0)
        return UIColor(red: r, green: g, blue: b, alpha: a);
    }
}

extension UIColor{
    func rgb() -> Int? {
        var fRed: CGFloat = 0
        var fGreen: CGFloat = 0
        var fBlue: CGFloat = 0
        var fAlpha: CGFloat = 0
        if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
            let iRed = Int(fRed * 255.0)
            let iGreen = Int(fGreen * 255.0)
            let iBlue = Int(fBlue * 255.0)
            let iAlpha = Int(fAlpha * 255.0)
            //  (Bits 24-31 are alpha, 16-23 are red, 8-15 are green, 0-7 are blue).
            let rgb = (iAlpha << 24) + (iRed << 16) + (iGreen << 8) + iBlue
            return rgb
        } else {
            // Could not extract RGBA components:
            return 4
        }
    }
}

I am also using the extensions from this answer

What I am trying to do is only allow the screen to accept touches inside the container of height 300, and the UIImage is inside that container. When a user gets the coordinates of the container, I fail at connecting those coordinates to the pixel data of the image inside that container.

Attempts: Rescale a larger image to only height 300 with scale of x coordinate Decrease the y-scale by 30( the height of the top bar) Factor the x scale of the image to the size of the coordinates

Community
  • 1
  • 1

0 Answers0