apiRouter.js
3.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
const express = require('express');
const fs = require('fs');
const {
cv,
getDataFilePath,
drawBlueRect,
drawGreenRect
} = require('./utils');
const openCV = require('opencv4nodejs');
const router = express.Router();
//================================================================
router.post('/videoResult', function (req, res) {
try {
let preview = req.body[0].preview;
str = preview.replace(/^data:(.*?);base64,/, "");
str = str.replace(/ /g, '+');
fs.writeFile(`./data/temp.mp4`, str, 'base64', function (err) {
if (err) throw err;
console.log("saved");
const vCap = new openCV.VideoCapture('./data/temp.mp4')
const delay = 1000;
let done = false;
let cnt = 0;
while (!done) {
let frame = vCap.read();
cv.imwrite('./data/' + cnt + '.jpg');
cnt++;
if (frame.empty) {
vCap.reset();
frame = vCap.read();
}
}
});
} catch (err) {
console.log("err : " + err);
}
return res.json({ data: 'myData' });
});
//================================================================
// router.post('/faceRecognition', function (req, res) {
// try {
// let preview = req.body[0].preview;
// str = preview.replace(/^data:(.*?);base64,/, "");
// str = str.replace(/ /g, '+');
// // 임시파일 저장
// fs.writeFile(`./data/temp.jpg`, str, 'base64', function (err) {
// if (err) throw err;
// console.log("saved");
// detectFaceAndEyes('./data/temp.jpg');
// });
// } catch (err) {
// console.log('err: ' + err);
// }
// return res.json({ data: 'myData' });
// });
//================================================================
function base64encode(plaintext) {
return Buffer.from(plaintext, "utf8").toString('base64');
}
function base64decode(base64text) {
console.log(base64text.length);
return Buffer.from(base64text, 'base64').toString('utf8');
}
// function detectFaceAndEyes(filePath) {
// const image = cv.imread(filePath);
// const faceClassifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_DEFAULT);
// const eyeClassifier = new cv.CascadeClassifier(cv.HAAR_EYE);
// // detect faces
// const faceResult = faceClassifier.detectMultiScale(image.bgrToGray());
// if (!faceResult.objects.length) {
// throw new Error('No faces detected!');
// }
// const sortByNumDetections = result => result.numDetections
// .map((num, idx) => ({ num, idx }))
// .sort(((n0, n1) => n1.num - n0.num))
// .map(({ idx }) => idx);
// // get best result
// const faceRect = faceResult.objects[sortByNumDetections(faceResult)[0]];
// console.log('faceRects:', faceResult.objects);
// console.log('confidences:', faceResult.numDetections);
// // detect eyes
// const faceRegion = image.getRegion(faceRect);
// const eyeResult = eyeClassifier.detectMultiScale(faceRegion);
// console.log('eyeRects:', eyeResult.objects);
// console.log('confidences:', eyeResult.numDetections);
// // get best result
// const eyeRects = sortByNumDetections(eyeResult)
// .slice(0, 2)
// .map(idx => eyeResult.objects[idx]);
// // draw face detection
// drawBlueRect(image, faceRect);
// // draw eyes detection in face region
// eyeRects.forEach(eyeRect => drawGreenRect(faceRegion, eyeRect));
// cv.imwrite(`./data/temp2.jpg`, image);
// }
module.exports = router;