Showing
5 changed files
with
83 additions
and
192 deletions
1 | const express = require('express'); | 1 | const express = require('express'); |
2 | const fs = require('fs'); | 2 | const fs = require('fs'); |
3 | -const { | 3 | +const ffmpeg = require('ffmpeg'); |
4 | - cv, | 4 | +const request = require('request') |
5 | - getDataFilePath, | ||
6 | - drawBlueRect, | ||
7 | - drawGreenRect | ||
8 | -} = require('./utils'); | ||
9 | - | ||
10 | -const openCV = require('opencv4nodejs'); | ||
11 | 5 | ||
12 | const router = express.Router(); | 6 | const router = express.Router(); |
13 | - | 7 | +const path = require('path'); |
14 | //================================================================ | 8 | //================================================================ |
15 | 9 | ||
16 | router.post('/videoResult', function (req, res) { | 10 | router.post('/videoResult', function (req, res) { |
17 | 11 | ||
18 | try { | 12 | try { |
19 | - let preview = req.body[0].preview; | 13 | + let preview = req.body[0].preview; |
20 | - | 14 | + |
21 | - str = preview.replace(/^data:(.*?);base64,/, ""); | 15 | + str = preview.replace(/^data:(.*?);base64,/, ""); |
22 | - str = str.replace(/ /g, '+'); | 16 | + str = str.replace(/ /g, '+'); |
23 | - | 17 | + |
18 | + | ||
19 | + fs.writeFileSync(`./data/temp.mp4`, str, 'base64', function (err) { | ||
20 | + if (err) throw err; | ||
21 | + console.log("video saved"); | ||
22 | + return; | ||
23 | + }) | ||
24 | + | ||
25 | + let process = new ffmpeg(`./data/temp.mp4`); | ||
26 | + | ||
27 | + | ||
28 | + detectedImgFile = "test.jpg"; // null로 바꿀 것 | ||
29 | + process.then(function (video) { | ||
30 | + video.fnExtractFrameToJPG(__dirname + "/data", | ||
31 | + { | ||
32 | + every_n_seconds: 1, | ||
33 | + file_name: 'frame_%s' | ||
34 | + }, function (error, files) { | ||
35 | + if (!error) | ||
36 | + console.log('###1 Frames =>' + files); | ||
37 | + console.log("###2 갯수 => " + files.length) | ||
38 | + | ||
39 | + | ||
40 | + console.log("###3 첫번째 파일 => " + files[0]); // 마지막 파일은 영상임 | ||
41 | + let base64str = base64_encode(files[0]); | ||
42 | + console.log("###4 base64str => " + base64str); | ||
43 | + console.log("##### for") | ||
44 | + for(var i=0;i<files.length-1;i++){ | ||
45 | + request.post({ | ||
46 | + url: 'http://101.101.210.73/process', | ||
47 | + form: { | ||
48 | + 'data': base64_encode(files[0]) | ||
49 | + }, | ||
50 | + json: true | ||
51 | + }, (err, response, body) => { | ||
52 | + console.log(body) | ||
53 | + }) | ||
54 | + } | ||
55 | + /** | ||
56 | + * TODO | ||
57 | + * 반복문돌면서 각 프레임 파이썬 api 요청하고 응답받아서 | ||
58 | + * 하나라도 true나오면 프론트에 감지된 이미지파일주소 응답 | ||
59 | + */ | ||
60 | + detectedImgFile=null; | ||
61 | + } | ||
62 | + ) | ||
63 | + }) | ||
24 | 64 | ||
25 | - fs.writeFile(`./data/temp.mp4`, str, 'base64', function (err) { | ||
26 | - if (err) throw err; | ||
27 | - console.log("saved"); | ||
28 | - const vCap = new openCV.VideoCapture('./data/temp.mp4') | ||
29 | - const delay = 1000; | ||
30 | - let done = false; | ||
31 | - let cnt = 0; | ||
32 | - while (!done) { | ||
33 | - let frame = vCap.read(); | ||
34 | - cv.imwrite('./data/' + cnt + '.jpg'); | ||
35 | - cnt++; | ||
36 | - if (frame.empty) { | ||
37 | - vCap.reset(); | ||
38 | - frame = vCap.read(); | ||
39 | - } | ||
40 | - } | ||
41 | - }); | ||
42 | } catch (err) { | 65 | } catch (err) { |
43 | - console.log("err : " + err); | 66 | + console.error(err); |
44 | } | 67 | } |
45 | 68 | ||
46 | - return res.json({ data: 'myData' }); | 69 | + return res.json({ data: detectedImgFile }); |
47 | }); | 70 | }); |
48 | 71 | ||
49 | -//================================================================ | 72 | +function base64_encode(file) { |
50 | - | 73 | + // read binary data |
51 | -// router.post('/faceRecognition', function (req, res) { | 74 | + var bitmap = fs.readFileSync(file); |
52 | - | 75 | + // convert binary data to base64 encoded string |
53 | -// try { | 76 | + return new Buffer(bitmap).toString('base64'); |
54 | -// let preview = req.body[0].preview; | ||
55 | - | ||
56 | -// str = preview.replace(/^data:(.*?);base64,/, ""); | ||
57 | -// str = str.replace(/ /g, '+'); | ||
58 | - | ||
59 | -// // 임시파일 저장 | ||
60 | -// fs.writeFile(`./data/temp.jpg`, str, 'base64', function (err) { | ||
61 | -// if (err) throw err; | ||
62 | -// console.log("saved"); | ||
63 | -// detectFaceAndEyes('./data/temp.jpg'); | ||
64 | -// }); | ||
65 | - | ||
66 | - | ||
67 | - | ||
68 | -// } catch (err) { | ||
69 | -// console.log('err: ' + err); | ||
70 | -// } | ||
71 | - | ||
72 | -// return res.json({ data: 'myData' }); | ||
73 | -// }); | ||
74 | - | ||
75 | -//================================================================ | ||
76 | - | ||
77 | -function base64encode(plaintext) { | ||
78 | - return Buffer.from(plaintext, "utf8").toString('base64'); | ||
79 | -} | ||
80 | - | ||
81 | -function base64decode(base64text) { | ||
82 | - console.log(base64text.length); | ||
83 | - return Buffer.from(base64text, 'base64').toString('utf8'); | ||
84 | } | 77 | } |
85 | 78 | ||
86 | -// function detectFaceAndEyes(filePath) { | ||
87 | -// const image = cv.imread(filePath); | ||
88 | -// const faceClassifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_DEFAULT); | ||
89 | -// const eyeClassifier = new cv.CascadeClassifier(cv.HAAR_EYE); | ||
90 | - | ||
91 | -// // detect faces | ||
92 | -// const faceResult = faceClassifier.detectMultiScale(image.bgrToGray()); | ||
93 | - | ||
94 | -// if (!faceResult.objects.length) { | ||
95 | -// throw new Error('No faces detected!'); | ||
96 | -// } | ||
97 | - | ||
98 | -// const sortByNumDetections = result => result.numDetections | ||
99 | -// .map((num, idx) => ({ num, idx })) | ||
100 | -// .sort(((n0, n1) => n1.num - n0.num)) | ||
101 | -// .map(({ idx }) => idx); | ||
102 | - | ||
103 | -// // get best result | ||
104 | -// const faceRect = faceResult.objects[sortByNumDetections(faceResult)[0]]; | ||
105 | -// console.log('faceRects:', faceResult.objects); | ||
106 | -// console.log('confidences:', faceResult.numDetections); | ||
107 | - | ||
108 | -// // detect eyes | ||
109 | -// const faceRegion = image.getRegion(faceRect); | ||
110 | -// const eyeResult = eyeClassifier.detectMultiScale(faceRegion); | ||
111 | -// console.log('eyeRects:', eyeResult.objects); | ||
112 | -// console.log('confidences:', eyeResult.numDetections); | ||
113 | - | ||
114 | -// // get best result | ||
115 | -// const eyeRects = sortByNumDetections(eyeResult) | ||
116 | -// .slice(0, 2) | ||
117 | -// .map(idx => eyeResult.objects[idx]); | ||
118 | - | ||
119 | - | ||
120 | -// // draw face detection | ||
121 | -// drawBlueRect(image, faceRect); | ||
122 | - | ||
123 | -// // draw eyes detection in face region | ||
124 | -// eyeRects.forEach(eyeRect => drawGreenRect(faceRegion, eyeRect)); | ||
125 | - | ||
126 | -// cv.imwrite(`./data/temp2.jpg`, image); | ||
127 | -// } | ||
128 | - | ||
129 | module.exports = router; | 79 | module.exports = router; |
... | \ No newline at end of file | ... | \ No newline at end of file | ... | ... |
This diff is collapsed. Click to expand it.
Back-end/package.json
0 → 100644
1 | +{ | ||
2 | + "name": "back-end", | ||
3 | + "version": "1.0.0", | ||
4 | + "description": "", | ||
5 | + "main": "apiRouter.js", | ||
6 | + "scripts": { | ||
7 | + "test": "echo \"Error: no test specified\" && exit 1", | ||
8 | + "start": "node server.js" | ||
9 | + }, | ||
10 | + "author": "", | ||
11 | + "license": "ISC", | ||
12 | + "dependencies": { | ||
13 | + "body-parser": "^1.19.0", | ||
14 | + "cors": "^2.8.5", | ||
15 | + "express": "^4.17.1", | ||
16 | + "ffmpeg": "0.0.4", | ||
17 | + "opencv4nodejs": "^5.6.0", | ||
18 | + "path": "^0.12.7", | ||
19 | + "request": "^2.88.2" | ||
20 | + } | ||
21 | +} |
... | @@ -12,5 +12,5 @@ app.use(bodyParser.urlencoded({limit: '100mb', extended: true})); | ... | @@ -12,5 +12,5 @@ app.use(bodyParser.urlencoded({limit: '100mb', extended: true})); |
12 | app.use(bodyParser()); | 12 | app.use(bodyParser()); |
13 | app.use('/api', api); | 13 | app.use('/api', api); |
14 | 14 | ||
15 | -const port = 3002; | 15 | +const port = 3003; |
16 | app.listen(port, () => console.log(`노드서버 시작 : ${port}`)); | 16 | app.listen(port, () => console.log(`노드서버 시작 : ${port}`)); | ... | ... |
Back-end/utils.js
deleted
100644 → 0
1 | -const path = require('path'); | ||
2 | -const cv = require('opencv4nodejs'); | ||
3 | - | ||
4 | -exports.cv = cv; | ||
5 | - | ||
6 | -const dataPath = path.resolve(__dirname, './data'); | ||
7 | -exports.dataPath = dataPath; | ||
8 | -exports.getDataFilePath = fileName => { | ||
9 | - let targetPath = path.resolve(dataPath, fileName); | ||
10 | - return targetPath; | ||
11 | -} | ||
12 | - | ||
13 | -const grabFrames = (videoFile, delay, onFrame) => { | ||
14 | - const cap = new cv.VideoCapture(videoFile); | ||
15 | - let done = false; | ||
16 | - const intvl = setInterval(() => { | ||
17 | - let frame = cap.read(); | ||
18 | - // loop back to start on end of stream reached | ||
19 | - if (frame.empty) { | ||
20 | - cap.reset(); | ||
21 | - frame = cap.read(); | ||
22 | - } | ||
23 | - onFrame(frame); | ||
24 | - | ||
25 | - const key = cv.waitKey(delay); | ||
26 | - done = key !== -1 && key !== 255; | ||
27 | - if (done) { | ||
28 | - clearInterval(intvl); | ||
29 | - console.log('Key pressed, exiting.'); | ||
30 | - } | ||
31 | - }, 0); | ||
32 | -}; | ||
33 | -exports.grabFrames = grabFrames; | ||
34 | - | ||
35 | -exports.runVideoDetection = (src, detect) => { | ||
36 | - grabFrames(src, 1, frame => { | ||
37 | - detect(frame); | ||
38 | - }); | ||
39 | -}; | ||
40 | - | ||
41 | -exports.drawRectAroundBlobs = (binaryImg, dstImg, minPxSize, fixedRectWidth) => { | ||
42 | - const { | ||
43 | - centroids, | ||
44 | - stats | ||
45 | - } = binaryImg.connectedComponentsWithStats(); | ||
46 | - | ||
47 | - // pretend label 0 is background | ||
48 | - for (let label = 1; label < centroids.rows; label += 1) { | ||
49 | - const [x1, y1] = [stats.at(label, cv.CC_STAT_LEFT), stats.at(label, cv.CC_STAT_TOP)]; | ||
50 | - const [x2, y2] = [ | ||
51 | - x1 + (fixedRectWidth || stats.at(label, cv.CC_STAT_WIDTH)), | ||
52 | - y1 + (fixedRectWidth || stats.at(label, cv.CC_STAT_HEIGHT)) | ||
53 | - ]; | ||
54 | - const size = stats.at(label, cv.CC_STAT_AREA); | ||
55 | - const blue = new cv.Vec(255, 0, 0); | ||
56 | - if (minPxSize < size) { | ||
57 | - dstImg.drawRectangle( | ||
58 | - new cv.Point(x1, y1), | ||
59 | - new cv.Point(x2, y2), | ||
60 | - { color: blue, thickness: 2 } | ||
61 | - ); | ||
62 | - } | ||
63 | - } | ||
64 | -}; | ||
65 | - | ||
66 | -const drawRect = (image, rect, color, opts = { thickness: 2 }) => | ||
67 | - image.drawRectangle( | ||
68 | - rect, | ||
69 | - color, | ||
70 | - opts.thickness, | ||
71 | - cv.LINE_8 | ||
72 | - ); | ||
73 | - | ||
74 | -exports.drawRect = drawRect; | ||
75 | -exports.drawBlueRect = (image, rect, opts = { thickness: 2 }) => | ||
76 | - drawRect(image, rect, new cv.Vec(255, 0, 0), opts); | ||
77 | -exports.drawGreenRect = (image, rect, opts = { thickness: 2 }) => | ||
78 | - drawRect(image, rect, new cv.Vec(0, 255, 0), opts); | ||
79 | -exports.drawRedRect = (image, rect, opts = { thickness: 2 }) => | ||
80 | - drawRect(image, rect, new cv.Vec(0, 0, 255), opts); | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
-
Please register or login to post a comment