Hi I have captured image using AVFoundation and cannot able to detect blurry image.
I have tried the proposed solution in the following link: OpenCV with Laplacian formula to detect image is blur or not in iOS but it works only on the images captured using default cameraPicker.
Used:
cv::Mat finalImage;
cv::Mat matImage = [self convertUIImageToCVMat:image];
cv::Mat matImageGrey;
cv::cvtColor(matImage, matImageGrey, CV_RGBA2GRAY);
matImage.release();
cv::Mat newEX;
const int MEDIAN_BLUR_FILTER_SIZE = 15; // odd number
cv::medianBlur(matImageGrey, newEX, MEDIAN_BLUR_FILTER_SIZE);
matImageGrey.release();
cv::Mat laplacianImage;
cv::Laplacian(newEX, laplacianImage, CV_8U); // CV_8U
newEX.release();
cv::Mat laplacianImage8bit;
laplacianImage.convertTo(laplacianImage8bit, CV_8UC1);
laplacianImage.release();
cv::cvtColor(laplacianImage8bit,finalImage,CV_GRAY2BGRA);
int rows = finalImage.rows;
int cols= finalImage.cols;
int maxLap = -16777216;
unsigned char *pixels = laplacianImage8bit.data;
for (int i = 0; i < (laplacianImage8bit.elemSize()*laplacianImage8bit.total()); i++) {
if (pixels[i] > maxLap) {
maxLap = pixels[i];
}
}
pixels=NULL;
finalImage.release();
BOOL isBlur = (maxLap <= kBlurThreshhold)? YES : NO;
NSLog(@"Max: %d, Cols: %d, Rows: %d", maxLap, cols, rows);
Please note I have to resize the image in 1500 X 1500 res. before applying blur detection on it as per requirement. A helpful solution will be appreciated.
Thanks.