I'm new in OpenCv, so I'm going to try explain my problem. (Sorry for my poor English)
Objective
An Android application with OpenCv that, with an amount of images saved or downloaded(it doesn't matter where it come from), get the application recognize every image in the frame of the camera and draw a rectangle around the image found in that frame.
Something like this example but in my case, if there is more than one 'image match', it would be drawn as well all of them on the same frame or equivalent.
Problem
I was reading many posts and doing the opencv samples I got it works in a exe but when a try to pass it to an Android app it doesn't work properly.
I tried with SURF (It`s compiled the nonfree library in Android), ORB and and FLANN matcher algorithms but I just get different inaccurate lines for every directions or just points.
My try:
Java Activity based on sample 2 of Opencv
package org.opencv.jc.tct;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.WindowManager;
public class Tutorial2Activity extends Activity implements CvCameraViewListener2 {
private static final String TAG = "TCT";
private static final int CORRECT = 0;
private static final int FAILED = 1;
private Mat mRgba;
private Mat mIntermediateMat;
private Mat mGray;
private int res = -1;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample");
//Image_Interface.imageBase(); //TODO Carga los cuadros descargados de internet.
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial2Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial2_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
Thread th = new Thread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
res = ImageInterface.imageBase();
if(res == CORRECT)
ImageInterface.NativeDataTest();
}
});
th.start();
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
}
public void onCameraViewStopped() {
mRgba.release();
mGray.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if(res == CORRECT){
ImageInterface.imageFrame(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
}else if(res == FAILED){
Log.i(TAG, "Imagenes base no está lista");
}
return mGray;
}
}
Intermediate class
package org.opencv.jc.tct;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import org.opencv.android.Utils;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Environment;
import android.util.Log;
public class ImageInterface {
private static final String TAG = "TCT";
private static long[] imaArray;
public static int imageBase(){
int nativeRes = -1;
getBitmapFromURL();
if(imaArray.length > 0 && imaArray != null){
nativeRes = ProcessImagesBase(imaArray);
}
return nativeRes;
}
public static void imageFrame(long matAddrGr, long matAddrRgba){
ProcessImageFrame( matAddrGr, matAddrRgba);
}
public static void NativeDataTest(){
TestData();
}
// download data from internet //
private static void getBitmapFromURL() {
String[] imageUrl = {"http://s7.postimg.org/3yz6bb87f/libro2.jpg"};
imaArray = new long[imageUrl.length];
for(int i=0; i<imageUrl.length; i++){
try {
URL url = new URL(imageUrl[i]);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoInput(true);
connection.connect();
InputStream input = connection.getInputStream();
Bitmap myBitmap = BitmapFactory.decodeStream(input);
if(myBitmap != null){
Mat mat = new Mat();
Utils.bitmapToMat(myBitmap, mat);
if(!mat.empty()){
mat.convertTo(mat, CvType.CV_8UC1);
Log.i("ImageInterface", "Image downloaded type: "+mat.type());
}else
Log.i("ImageInterface", "Mat: "+ i + " Vacias");
imaArray[i] = mat.getNativeObjAddr();
Log.i("ImageInterface", "BITMAP: "+ i + "LLENO");
}else
Log.i("ImageInterface", "BITMAP VACIO");
} catch (IOException e) {
e.printStackTrace();
}
}
Log.i("ImageInterface", "Array de direcciones: "+imaArray.length);
}
//Get App Data Folder in Android
public File getDataFolder(Context context) {
File dataDir = null;
if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
dataDir = new File(Environment.getExternalStorageDirectory(), "files");
if(!dataDir.isDirectory()) {
dataDir.mkdirs();
}
}
if(!dataDir.isDirectory()) {
dataDir = context.getFilesDir();
}
return dataDir;
}
public void writeDataFolder(Context context){
String[] pathList = {"http://localhost/libro1.jpg",
"http://localhost/libro2.jpg"};
URL wallpaperURL = null;
for(int i=0; i < pathList.length;i++){
try {
wallpaperURL = new URL(pathList[i]);
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
InputStream inputStream = null;
try {
URLConnection connection = wallpaperURL.openConnection();
inputStream = new BufferedInputStream(wallpaperURL.openStream(), 10240);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
File cacheDir = context.getCacheDir();
File cacheFile = new File(cacheDir, "localFileName.jpg");
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(cacheFile);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
byte buffer[] = new byte[1024];
int dataSize;
try {
while ((dataSize = inputStream.read(buffer)) != -1) {
outputStream.write(buffer, 0, dataSize);
}
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private native static int ProcessImagesBase(long[] arrayImage);
private native static void ProcessImageFrame(long matAddrGr, long matAddrRgba);
private native static void TestData();
}
C++ JNI
JNIEXPORT jint JNICALL Java_org_opencv_jc_tct_ImageInterface_ProcessImagesBase(JNIEnv* env, jobject,jlongArray traindataaddr);
JNIEXPORT void JNICALL Java_org_opencv_jc_tct_ImageInterface_ProcessImageFrame(JNIEnv*,
jobject, jlong addrGray, jlong addrRgba);
JNIEXPORT void JNICALL Java_org_opencv_jc_tct_ImageInterface_TestData(JNIEnv*,jobject);
void trainDetector();
void trainExtractor();
void trainMatches(Mat& descriptors_scene,vector<vector<vector<DMatch> > >& matches);
void getGoodMatches(vector<vector<vector<DMatch> > >& matches, vector<vector<DMatch> >& tr_good_matches);
void perspectiveScene(vector<vector<DMatch> >& tr_good_matches, vector<KeyPoint>& keypoints_scene, Mat& img_scene);
const bool testing = false;
const int CORRECT = 0;
const int FAIL = 1;
static vector<vector<KeyPoint> > train_keypoints;
static vector<Mat> train_descriptors;
static vector<Mat> trainImages;
OrbFeatureDetector detector(400);
OrbDescriptorExtractor extractor;
FlannBasedMatcher matcher;
JNIEXPORT jint JNICALL Java_org_opencv_jc_tct_ImageInterface_ProcessImagesBase(JNIEnv* env,jobject, jlongArray traindataaddr) {
jint result = -1;
jsize a_len = env->GetArrayLength(traindataaddr);
jlong *traindata = env->GetLongArrayElements(traindataaddr,0);
#pragma omp parallel for
for(int k=0;k<a_len;k++)
{
Mat & newimage=*(Mat*)traindata[k];
trainImages.push_back(newimage);
}
// do the required manipulation on the images;
env->ReleaseLongArrayElements(traindataaddr,traindata,0);
trainDetector();
trainExtractor();
if (!train_keypoints.empty()){
LOGI("Created Keypoints!!!");
result = CORRECT;
}
else{
LOGE("Error creating the keypoints");
result = FAIL;
}
return result;
}
JNIEXPORT void JNICALL Java_org_opencv_jc_tct_ImageInterface_ProcessImageFrame(JNIEnv*,
jobject, jlong addrGray, jlong addrRgba) {
Mat& img_scene = *(Mat*) addrGray;
Mat& mRgb = *(Mat*) addrRgba;
vector<KeyPoint> keypoints_scene;
detector.detect(img_scene, keypoints_scene);
Mat descriptors_scene;
extractor.compute(img_scene, keypoints_scene, descriptors_scene);
//-- Step 3: Matching descriptor vectors using FLANN matcher
if(!descriptors_scene.empty()){
vector<vector<vector<DMatch> > > matches;
trainMatches(descriptors_scene, matches);
if(!matches.empty()){
LOGI("Matches [0]: %d",matches[0].size());
vector<vector<DMatch> > tr_good_matches;
getGoodMatches( matches, tr_good_matches);
if(!tr_good_matches.empty()){
LOGI("GOOD MATCHES FRAME size %d",tr_good_matches[0].size());
perspectiveScene(tr_good_matches, keypoints_scene, img_scene);
}else{
LOGE("MATCHES FRAME emtpy!");
}
}else
LOGE("MATCHES FRAME empty!");
}else{
LOGE("MAT Descriptor FRAME empty");
}
// Mat img_matches;
// drawMatches( img_object, keypoints_object, img_scene, keypoints_scene,
// good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
// vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
}
JNIEXPORT void JNICALL Java_org_opencv_jc_tct_ImageInterface_TestData(JNIEnv*,jobject){
if(!trainImages.empty())
LOGI("TEST Trainimages %d",trainImages.size());
else
LOGI("TEST TrainImages empty");
}
void trainDetector() {
LOGI("Train Detector");
#pragma omp parallel for
for (int i = 0; i < trainImages.size(); i++) {
vector<KeyPoint> obj_kp;
detector.detect(trainImages[i], obj_kp);
if (!obj_kp.empty()) {
train_keypoints.push_back(obj_kp);
} else if (testing) {
LOGE("Error: There are not keypoints. Func: trainDetector");
}
}
LOGI("Trainimages size %d ",trainImages.size());
}
void trainExtractor() {
#pragma omp parallel for
for (int i = 0; i < trainImages.size(); i++) {
Mat* obj_desc = new Mat();
extractor.compute(trainImages[i], train_keypoints[i], *obj_desc);
if (!(*obj_desc).empty()) {
train_descriptors.push_back(*obj_desc);
} else if (testing) {
LOGI("Error: Problem with descriptors. Func: trainExtractor");
}
}
LOGE("Train descriptors: %d",train_descriptors.size());
}
void getGoodMatches(vector<vector<vector<DMatch> > >& matches,vector<vector<DMatch> >& tr_good_matches) {
//-- Quick calculation of max and min distances between keypoints
for (int i = 0; i < train_descriptors.size(); i++) {
double max_dist = 0;
double min_dist = 100;
Mat obj_desc = train_descriptors[i];
vector<DMatch> gm;
for (int j = 0; j < min(obj_desc.rows - 1, (int) matches[i].size());j++) {
if (matches[i][j][0].distance < matches[i][j][1].distance
&& ((int) matches[i][j].size() <= 2
&& (int) matches[i][j].size() > 0)) {
gm.push_back(matches[i][j][0]);
}
}
tr_good_matches.push_back(gm);
}
}
void perspectiveScene(vector<vector<DMatch> >& tr_good_matches, vector<KeyPoint>& keypoints_scene, Mat& img_scene) {
LOGI("PERS FUNCTION");
//-- Localize the object
std::vector<Point2f> obj;
std::vector<Point2f> scene;
int R, G, B;
for (int i = 0; i < tr_good_matches.size(); i++) {
LOGI("PF: For train size[%d]: %d",i,tr_good_matches[i].size());
obj.clear();
scene.clear();
if (tr_good_matches[i].size() >= 4) {
for (int j = 0; j < tr_good_matches[i].size(); j++) {
//-- Get the keypoints from the good matches
obj.push_back(train_keypoints[i][tr_good_matches[i][j].queryIdx].pt);
scene.push_back(keypoints_scene[tr_good_matches[i][j].trainIdx].pt);
}
LOGI("Obj size: %d, scene size: %d",obj.size(),scene.size());
if (!obj.empty() && !scene.empty()) {
LOGI("OBJ size: %d, scene size: %d",obj.size(),scene.size());
Mat H = findHomography(obj, scene, CV_RANSAC);
//-- Get the corners from the image_1 ( the object to be "detected" )
vector<Point2f> obj_corners(4);
obj_corners[0] = cvPoint(0, 0);
obj_corners[1] = cvPoint(trainImages[i].cols, 0);
obj_corners[2] = cvPoint(trainImages[i].cols,
trainImages[i].rows);
obj_corners[3] = cvPoint(0, trainImages[i].rows);
vector<Point2f> scene_corners(4);
perspectiveTransform(obj_corners, scene_corners, H);
//getPerspectiveTransform(obj_corners,scene_corners);
R = rand() % 256;
G = rand() % 256;
B = rand() % 256;
//-- Draw lines between the corners (the mapped object in the scene - image_2 )
line(img_scene, scene_corners[0], scene_corners[1],
Scalar(R, G, B), 4);
line(img_scene, scene_corners[1], scene_corners[2],
Scalar(R, G, B), 4);
line(img_scene, scene_corners[2], scene_corners[3],
Scalar(R, G, B), 4);
line(img_scene, scene_corners[3], scene_corners[0],
Scalar(R, G, B), 4);
} else{
LOGE("Error: Problem with goodmatches. Func: perspectiveScene.");
}
}
}
}
void trainMatches( Mat& descriptors_scene, vector<vector<vector<DMatch> > >& matches) {
for (int i = 0; i < train_descriptors.size(); i++) {
vector<vector<DMatch> > obj_matches;
Mat desc = train_descriptors[i];
if(desc.type() != CV_32F){
desc.convertTo(desc,CV_32F);
}
if(descriptors_scene.type() != CV_32F){
descriptors_scene.convertTo(descriptors_scene,CV_32F);
}
matcher.knnMatch(desc, descriptors_scene, obj_matches, 2);
if (!obj_matches.empty()) {
matches.push_back(obj_matches);
} else if (testing) {
LOGE("Error: Problem with matches. Func: trainMatches");
}
}
}
}
Could you say me what is it wrong?
Thanks for any help!