Sunday, July 10, 2016

Face Recognition in Video with JavaCV

Few weeks ago some random guy asked me whether I can help him to do face recognition in a video. At that time I didn’t had much free time so couldn’t look much into that. In this weekend suddenly I recall that and wanted to check on that.
There can be lot of improvements to be done for this. But for someone who just wanna give a shot here is the code

import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_objdetect;
import org.bytedeco.javacv.CanvasFrame;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber;
import org.bytedeco.javacv.FrameRecorder;
import org.bytedeco.javacv.OpenCVFrameConverter;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.bytedeco.javacpp.helper.opencv_objdetect.cvHaarDetectObjects;
import static org.bytedeco.javacpp.opencv_core.*;
import static org.bytedeco.javacpp.opencv_imgproc.*;
public class HelloWC {
// Training file
private static final String CASCADE_FILE =
"/home/thusitha/Desktop/opencv/opencv-3.1.0/data/haarcascades_cuda/haarcascade_frontalface_alt.xml";
public static void main(String[] args) {
try {
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber("mytestvideo.mp4"); // Your video input file
grabber.start();
Frame grabbedImage = grabber.grab();
CanvasFrame canvasFrame = new CanvasFrame("Video with JavaCV");
canvasFrame.setCanvasSize(grabbedImage.imageWidth, grabbedImage.imageHeight);
grabber.setFrameRate(grabber.getFrameRate());
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("videowithfacedetection.mp4", grabber.getImageWidth(),
grabber.getImageHeight()); // specify your path
recorder.setFormat("mp4");
recorder.setFrameRate(30);
recorder.setVideoBitrate(10 * 1024 * 1024);
recorder.start();
OpenCVFrameConverter.ToIplImage toIplImage = new OpenCVFrameConverter.ToIplImage();
while (canvasFrame.isVisible() && (grabbedImage = grabber.grab()) != null) {
IplImage convert = toIplImage.convert(grabbedImage);
IplImage grayImage = IplImage.create(convert.width(), convert.height(), IPL_DEPTH_8U, 1);
cvCvtColor(convert, grayImage, CV_BGR2GRAY);
opencv_core.CvMemStorage storage = opencv_core.CvMemStorage.create();
opencv_objdetect.CvHaarClassifierCascade cascade =
new opencv_objdetect.CvHaarClassifierCascade(cvLoad(CASCADE_FILE));
opencv_core.CvSeq faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1, 1, 0);
for (int i = 0; i < faces.total(); i++) {
opencv_core.CvRect r = new opencv_core.CvRect(cvGetSeqElem(faces, i));
cvRectangle(convert, cvPoint(r.x(), r.y()), cvPoint(r.x() + r.width(), r.y() + r.height()),
opencv_core.CvScalar.BLUE, 5, CV_AA, 0);
}
canvasFrame.showImage(toIplImage.convert(convert));
recorder.record(toIplImage.convert(convert));
}
recorder.stop();
grabber.stop();
canvasFrame.dispose();
} catch (FrameGrabber.Exception ex) {
Logger.getLogger(HelloWC.class.getName()).log(Level.SEVERE, null, ex);
} catch (FrameRecorder.Exception ex) {
Logger.getLogger(HelloWC.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
view raw HelloWC.java hosted with ❤ by GitHub

No comments:

Post a Comment