I am working on swift my requirement is to create rectangular area on
camera. where I have to capture only those portion that is inside rectangle
and remaining portion should displayed as blur.
I tried many links but most of them are in Obj-C.i know I have to add UI layer over AVCapture layer. This [Click Here] link helps me but i could not achieve my objectives.
I tried by reducing the size of ImageView in storyboard but in this case my camera adjust whole image in this small image view.
here is sample image.
Here is my existing code for camera :
class VideoFeedMicr: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{
let outputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
let device: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
var camera: AVCaptureDevice? = nil
for device in devices {
if device.position == .Back {
camera = device
}
}
return camera
}()
var input: AVCaptureDeviceInput? = nil
var delegate: VideoFeedDelegateMicr? = nil
let session: AVCaptureSession = {
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetHigh
return session
}()
let videoDataOutput: AVCaptureVideoDataOutput = {
let output = AVCaptureVideoDataOutput()
output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: NSNumber(unsignedInt: kCMPixelFormat_32BGRA) ]
output.alwaysDiscardsLateVideoFrames = true
return output
}()
func start() throws {
var error: NSError! = NSError(domain: "Migrator", code: 0, userInfo: nil)
do {
try configure()
session.startRunning()
return
} catch let error1 as NSError {
error = error1
}
throw error
}
func stop() {
session.stopRunning()
}
private func configure() throws {
var error: NSError! = NSError(domain: "Migrator", code: 0, userInfo: nil)
do {
let maybeInput: AnyObject = try AVCaptureDeviceInput(device: device!)
input = maybeInput as? AVCaptureDeviceInput
if session.canAddInput(input) {
session.addInput(input)
videoDataOutput.setSampleBufferDelegate(self, queue: outputQueue);
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
let connection = videoDataOutput.connectionWithMediaType(AVMediaTypeVideo)
connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
return
} else {
print("Video output error.");
}
} else {
print("Video input error. Maybe unauthorised or no camera.")
}
} catch let error1 as NSError {
error = error1
print("Failed to start capturing video with error: \(error)")
}
throw error
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// Update the delegate
if delegate != nil {
delegate!.videoFeedMicr(self, didUpdateWithSampleBuffer: sampleBuffer)
}
}
}
here i am calling this
class ViewMicrScanactivity: UIViewController,VideoFeedDelegateMicr
{
// @IBOutlet weak var button: UIButton!
@IBOutlet weak var button: UIButton!
// @IBOutlet weak var imageView: UIImageView!
let feed: VideoFeedMicr = VideoFeedMicr()
var chequefound :Bool = false;
var accountnumber = ""
var amountlimit = ""
@IBOutlet weak var chequeimage: UIImageView!
override func viewDidLoad()
{
super.viewDidLoad()
let value = UIInterfaceOrientation.LandscapeRight.rawValue
UIDevice.currentDevice().setValue(value, forKey: "orientation")
}
override func shouldAutorotate() -> Bool {
return true;
}
override func awakeFromNib() {
super.awakeFromNib()
feed.delegate = self
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
startVideoFeed()
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
feed.stop()
}
func startVideoFeed() {
do {
try feed.start()
print("Video started.")
}
catch {
// alert?
// need to look into device permissions
}
}
func videoFeedMicr(videoFeed: VideoFeedMicr, didUpdateWithSampleBuffer sampleBuffer: CMSampleBuffer!)
{
let filter = FaceObscurationFilterMicr(sampleBuffer: sampleBuffer)
if(!chequefound)
{
chequefound = filter.process()
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.chequeimage.image = filter.inputuiimage!
if(self.chequefound)
{
filter.cropmicr = filter.cropToBounds(filter.inputuiimage! , X:0.0 , Y:Double(filter.inputuiimage!.size.height) - Double(90.0) ,width:Double(filter.inputuiimage!.size.width) , height:Double(60.0));
self.chequeimage.image = filter.cropmicr
// let image = UIImage(named: filter.cropmicr )
//let scaledImage = scaleImage(image!, maxDimension: 640)
self.performImageRecognitionnew(filter.cropmicr!)
}
// self.chequeimage.image = filter.cropmicr!
})
}
else
{
print("chequefound = true")
}
}
}