I need to read images in hbase and convert to opencv
mat for face detection.
My code as follows
public static class FaceCountMapper extends TableMapper<Text, Text> {
private CascadeClassifier faceDetector;
public void setup(Context context) throws IOException, InterruptedException {
if (context.getCacheFiles() != null && context.getCacheFiles().length > 0) {
URI mappingFileUri = context.getCacheFiles()[0];
if (mappingFileUri != null) {
System.out.println(mappingFileUri);
faceDetector = new CascadeClassifier(mappingFileUri.toString());
}
}
super.setup(context);
} // setup()
public ArrayList<Object> detectFaces(Mat image, String file_name) {
ArrayList<Object> facemap = new ArrayList<Object>();
MatOfRect faceDetections = new MatOfRect();
faceDetector.detectMultiScale(image, faceDetections);
System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));
output.put(faceDetections.toArray().length);
facemap.add(output);
}
return facemap;
}
public void map(ImmutableBytesWritable row, Result result, Context context)
throws InterruptedException, IOException {
String file_name = Bytes.toString(result.getValue(Bytes.toBytes("Filename"), Bytes.toBytes("data")));
String mimetype = Bytes.toString(result.getValue(Bytes.toBytes("mime"), Bytes.toBytes("data")));
byte[] image_data = result.getValue(Bytes.toBytes("Data"), Bytes.toBytes("data"));
BufferedImage bi = ImageIO.read(new ByteArrayInputStream(image_data));
Mat mat = new Mat(bi.getHeight(), bi.getWidth(), CvType.CV_8UC3);
mat.put(0, 0, image_data);
detectFaces(mat, file_name);
}
Job configuration as follows
Configuration conf = this.getConf();
conf.set("hbase.master", "101.192.0.122:16000");
conf.set("hbase.zookeeper.quorum", "101.192.0.122");
conf.setInt("hbase.zookeeper.property.clientPort", 2181);
conf.set("zookeeper.znode.parent", "/hbase-unsecure");
// Initialize and configure MapReduce job
Job job = Job.getInstance(conf);
job.setJarByClass(FaceCount3.class);
job.setMapperClass(FaceCountMapper.class);
job.getConfiguration().set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
job.getConfiguration().set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
Scan scan = new Scan();
scan.setCaching(500); // 1 is the default in Scan, which will be bad for
// MapReduce jobs
scan.setCacheBlocks(false); // don't set to true for MR jobs
TableMapReduceUtil.initTableMapperJob("Image", // input HBase table name
scan, // Scan instance to control CF and attribute selection
FaceCountMapper.class, // mapper
null, // mapper output key
null, // mapper output value
job);
job.setOutputFormatClass(NullOutputFormat.class); // because we aren't
// emitting anything
// from mapper
job.addCacheFile(new URI("/user/hduser/haarcascade_frontalface_alt.xml"));
job.addFileToClassPath(new Path("/user/hduser/hipi-2.1.0.jar"));
job.addFileToClassPath(new Path("/user/hduser/javacpp.jar"));
DistributedCache.addFileToClassPath(new Path("/user/hduser/haarcascade_frontalface_alt.xml"), conf);
conf.set("mapred.job.tracker", "local");
// Execute the MapReduce job and block until it complets
boolean success = job.waitForCompletion(true);
// Return success or failure
return success ? 0 : 1;
When running I am getting
java.lang.Exception: java.lang.UnsatisfiedLinkError: org.opencv.objdetect.CascadeClassifier.CascadeClassifier_1(Ljava/lang/String;)J
error.
But Opencv.jar provide in hadoop_classpath