Graduate

Update Opencv.js face detection

function Utils(errorOutputId) { // eslint-disable-line no-unused-vars
let self = this;
this.errorOutput = document.getElementById(errorOutputId);
const OPENCV_URL = 'opencv.js';
this.loadOpenCv = function(onloadCallback) {
let script = document.createElement('script');
script.setAttribute('async', '');
script.setAttribute('type', 'text/javascript');
script.addEventListener('load', () => {
if (cv.getBuildInformation)
{
console.log(cv.getBuildInformation());
onloadCallback();
}
else
{
// WASM
cv['onRuntimeInitialized']=()=>{
console.log(cv.getBuildInformation());
onloadCallback();
}
}
});
script.addEventListener('error', () => {
self.printError('Failed to load ' + OPENCV_URL);
});
script.src = OPENCV_URL;
let node = document.getElementsByTagName('script')[0];
node.parentNode.insertBefore(script, node);
};
this.createFileFromUrl = function(path, url, callback) {
let request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function(ev) {
if (request.readyState === 4) {
if (request.status === 200) {
let data = new Uint8Array(request.response);
cv.FS_createDataFile('/', path, data, true, false, false);
callback();
} else {
self.printError('Failed to load ' + url + ' status: ' + request.status);
}
}
};
request.send();
};
this.loadImageToCanvas = function(url, cavansId) {
let canvas = document.getElementById(cavansId);
let ctx = canvas.getContext('2d');
let img = new Image();
img.crossOrigin = 'anonymous';
img.onload = function() {
canvas.width = img.width;
canvas.height = img.height;
ctx.drawImage(img, 0, 0, img.width, img.height);
};
img.src = url;
};
this.executeCode = function(textAreaId) {
try {
this.clearError();
let code = document.getElementById(textAreaId).value;
eval(code);
} catch (err) {
this.printError(err);
}
};
this.clearError = function() {
this.errorOutput.innerHTML = '';
};
this.printError = function(err) {
if (typeof err === 'undefined') {
err = '';
} else if (typeof err === 'number') {
if (!isNaN(err)) {
if (typeof cv !== 'undefined') {
err = 'Exception: ' + cv.exceptionFromPtr(err).msg;
}
}
} else if (typeof err === 'string') {
let ptr = Number(err.split(' ')[0]);
if (!isNaN(ptr)) {
if (typeof cv !== 'undefined') {
err = 'Exception: ' + cv.exceptionFromPtr(ptr).msg;
}
}
} else if (err instanceof Error) {
err = err.stack.replace(/\n/g, '<br>');
}
this.errorOutput.innerHTML = err;
};
this.loadCode = function(scriptId, textAreaId) {
let scriptNode = document.getElementById(scriptId);
let textArea = document.getElementById(textAreaId);
if (scriptNode.type !== 'text/code-snippet') {
throw Error('Unknown code snippet type');
}
textArea.value = scriptNode.text.replace(/^\n/, '');
};
this.addFileInputHandler = function(fileInputId, canvasId) {
let inputElement = document.getElementById(fileInputId);
inputElement.addEventListener('change', (e) => {
let files = e.target.files;
if (files.length > 0) {
let imgUrl = URL.createObjectURL(files[0]);
self.loadImageToCanvas(imgUrl, canvasId);
}
}, false);
};
function onVideoCanPlay() {
if (self.onCameraStartedCallback) {
self.onCameraStartedCallback(self.stream, self.video);
}
};
this.startCamera = function(resolution, callback, videoId) {
const constraints = {
'qvga': {width: {exact: 320}, height: {exact: 240}},
'vga': {width: {exact: 640}, height: {exact: 480}}};
let video = document.getElementById(videoId);
if (!video) {
video = document.createElement('video');
}
let videoConstraint = constraints[resolution];
if (!videoConstraint) {
videoConstraint = true;
}
navigator.mediaDevices.getUserMedia({video: videoConstraint, audio: false})
.then(function(stream) {
video.srcObject = stream;
video.play();
self.video = video;
self.stream = stream;
self.onCameraStartedCallback = callback;
video.addEventListener('canplay', onVideoCanPlay, false);
})
.catch(function(err) {
self.printError('Camera Error: ' + err.name + ' ' + err.message);
});
};
this.stopCamera = function() {
if (this.video) {
this.video.pause();
this.video.srcObject = null;
this.video.removeEventListener('canplay', onVideoCanPlay);
}
if (this.stream) {
this.stream.getVideoTracks()[0].stop();
}
};
};
\ No newline at end of file
......@@ -8,31 +8,29 @@
#container {
margin: 0px auto;
width: 640px;
height: 480px;
height: 960px;
border: 10px #333 solid;
}
#videoElement {
#videoInput {
width: 640px;
height: 480px;
background-color: #666;
}
#canvasOutput {
width: 640px;
height: 480px;
background-color: #666;
}
</style>
</head>
<body>
<div id="container">
<video autoplay="true" id="videoElement"> <!-- style="visibility: hidden"-->
</video>
<canvas id='canvasOutput' width='640' height='480'>
</canvas>
</div>
<script type='text/javascript' src="{{url_for('static', filename='js/opencv.js')}}"></script>
<script type='text/javascript' src="{{url_for('static', filename='js/utils.js')}}"></script>
<script type='text/javascript'>
var video = document.querySelector("#videoElement");
function main()
{
let video = document.getElementById("videoInput");
let canvasOutput = document.getElementById('canvasOutput');
let canvasContext = canvasOutput.getContext('2d');
if (navigator.mediaDevices.getUserMedia){
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
......@@ -41,24 +39,22 @@ if (navigator.mediaDevices.getUserMedia){
console.log("Something went wrong!");
});
}
</script>
<script>
cv['onRuntimeInitialized']=()=>{
let video = document.getElementById('videoElement');
let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4);
let gray = new cv.Mat();
let cap = new cv.VideoCapture(video);
let faces = new cv.RectVector();
let classifier = new cv.CascadeClassifier();
var streaming = true;
if (typeof streaming === 'undefined')
{
streaming = false;
}
classifier.load("{{url_for('static', filename='js/haarcascade_frontalface_default.xml')}}")
const FPS = 30;
function processVideo() {
let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4);
let gray = new cv.Mat();
let cap = new cv.VideoCapture(video);
let faces = new cv.RectVector();
let classifier = new cv.CascadeClassifier();
var streaming = true;
let utils = new Utils('errorMessage'); //use utils class
let faceCascadeFile = "/static/js/haarcascade_frontalface_default.xml"
utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => {
alert(faceCascadeFile);
classifier.load(faceCascadeFile);
});
const FPS = 30;
function processVideo() {
try {
if (!streaming) {
// clean and stop.
......@@ -74,8 +70,10 @@ cv['onRuntimeInitialized']=()=>{
cap.read(src);
src.copyTo(dst);
cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
alert('d');
// detect faces.
classifier.detectMultiScale(gray, faces, 1.1, 3, 0);
alert('e');
// draw faces.
for (let i = 0; i < faces.size(); ++i) {
let face = faces.get(i);
......@@ -83,6 +81,7 @@ cv['onRuntimeInitialized']=()=>{
let point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(dst, point1, point2, [255, 0, 0, 255]);
}
alert('f');
cv.imshow('canvasOutput', dst);
// schedule the next one.
let delay = 1000/FPS - (Date.now() - begin);
......@@ -90,10 +89,19 @@ cv['onRuntimeInitialized']=()=>{
} catch (err) {
console.log(err);
}
}
// schedule the first one.
setTimeout(processVideo, 0);
}
setTimeout(processVideo, 0);
}
</script>
</head>
<body onload="cv['onRuntimeInitialized']=()=>{ main() }">
<div id="container">
<video autoplay="true" id="videoInput" width=640 height=480> <!-- style="visibility: hidden"-->
</video>
<canvas id='canvasOutput' width=640 height=480>
</canvas>
</div>
</body>
</html>
......