apiRouter.js 3.68 KB
const express = require('express');
const fs = require('fs');
const {
    cv,
    getDataFilePath,
    drawBlueRect,
    drawGreenRect
} = require('./utils');

const openCV = require('opencv4nodejs');

const router = express.Router();

//================================================================

router.post('/videoResult', function (req, res) {

    try {
    let preview = req.body[0].preview;

    str = preview.replace(/^data:(.*?);base64,/, "");
    str = str.replace(/ /g, '+');


    fs.writeFile(`./data/temp.mp4`, str, 'base64', function (err) {
        if (err) throw err;
        console.log("saved");
        const vCap = new openCV.VideoCapture('./data/temp.mp4')
        const delay = 1000;
        let done = false;
        let cnt = 0;
        while (!done) {
            let frame = vCap.read();
            cv.imwrite('./data/' + cnt + '.jpg');
            cnt++;
            if (frame.empty) {
                vCap.reset();
                frame = vCap.read();
            }
        }
    });
    } catch (err) {
        console.log("err : " + err);
    }

    return res.json({ data: 'myData' });
});

//================================================================

// router.post('/faceRecognition', function (req, res) {

//     try {
//         let preview = req.body[0].preview;

//         str = preview.replace(/^data:(.*?);base64,/, "");
//         str = str.replace(/ /g, '+');

//         // 임시파일 저장
//         fs.writeFile(`./data/temp.jpg`, str, 'base64', function (err) {
//             if (err) throw err;
//             console.log("saved");
//             detectFaceAndEyes('./data/temp.jpg');
//         });



//     } catch (err) {
//         console.log('err: ' + err);
//     }

//     return res.json({ data: 'myData' });
// });

//================================================================

function base64encode(plaintext) {
    return Buffer.from(plaintext, "utf8").toString('base64');
}

function base64decode(base64text) {
    console.log(base64text.length);
    return Buffer.from(base64text, 'base64').toString('utf8');
}

// function detectFaceAndEyes(filePath) {
//     const image = cv.imread(filePath);
//     const faceClassifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_DEFAULT);
//     const eyeClassifier = new cv.CascadeClassifier(cv.HAAR_EYE);

//     // detect faces
//     const faceResult = faceClassifier.detectMultiScale(image.bgrToGray());

//     if (!faceResult.objects.length) {
//         throw new Error('No faces detected!');
//     }

//     const sortByNumDetections = result => result.numDetections
//         .map((num, idx) => ({ num, idx }))
//         .sort(((n0, n1) => n1.num - n0.num))
//         .map(({ idx }) => idx);

//     // get best result
//     const faceRect = faceResult.objects[sortByNumDetections(faceResult)[0]];
//     console.log('faceRects:', faceResult.objects);
//     console.log('confidences:', faceResult.numDetections);

//     // detect eyes
//     const faceRegion = image.getRegion(faceRect);
//     const eyeResult = eyeClassifier.detectMultiScale(faceRegion);
//     console.log('eyeRects:', eyeResult.objects);
//     console.log('confidences:', eyeResult.numDetections);

//     // get best result
//     const eyeRects = sortByNumDetections(eyeResult)
//         .slice(0, 2)
//         .map(idx => eyeResult.objects[idx]);


//     // draw face detection
//     drawBlueRect(image, faceRect);

//     // draw eyes detection in face region
//     eyeRects.forEach(eyeRect => drawGreenRect(faceRegion, eyeRect));

//     cv.imwrite(`./data/temp2.jpg`, image);
// }

module.exports = router;