Toggle navigation
Toggle navigation
This project
Loading...
Sign in
2020-2-capstone-design2
/
2014104149
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Pipelines
Graphs
Issues
0
Merge Requests
0
Wiki
Snippets
Network
Create a new issue
Builds
Commits
Issue Boards
Authored by
Graduate
2020-11-28 16:09:31 +0900
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Commit
e6c4243c5fb188fe1c11e2517a039def8d6e993b
e6c4243c
1 parent
1a2c24b6
Change register page
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
146 additions
and
81 deletions
flask/templates/index.html
flask/templates/register.html
flask/templates/index.html
View file @
e6c4243
...
...
@@ -58,16 +58,16 @@ function load_cascade()
function
main
()
{
let
video
=
document
.
getElementById
(
"videoInput"
);
let
canvasOutput
=
document
.
getElementById
(
"canvasOutput"
);
let
canvasContext
=
canvasOutput
.
getContext
(
'2d'
);
let
src
=
new
cv
.
Mat
(
video
.
height
,
video
.
width
,
cv
.
CV_8UC4
);
let
dst
=
new
cv
.
Mat
(
video
.
height
,
video
.
width
,
cv
.
CV_8UC4
);
// let gray = new cv.Mat();
let
cap
=
new
cv
.
VideoCapture
(
video
);
let
faces
=
new
cv
.
RectVector
();
let
classifier
=
new
cv
.
CascadeClassifier
();
class
Tracker
{
let
video
=
document
.
getElementById
(
"videoInput"
);
let
canvasOutput
=
document
.
getElementById
(
"canvasOutput"
);
let
canvasContext
=
canvasOutput
.
getContext
(
'2d'
);
let
src
=
new
cv
.
Mat
(
video
.
height
,
video
.
width
,
cv
.
CV_8UC4
);
let
dst
=
new
cv
.
Mat
(
video
.
height
,
video
.
width
,
cv
.
CV_8UC4
);
// let gray = new cv.Mat();
let
cap
=
new
cv
.
VideoCapture
(
video
);
let
faces
=
new
cv
.
RectVector
();
let
classifier
=
new
cv
.
CascadeClassifier
();
class
Tracker
{
constructor
(){
this
.
arr
=
new
Array
();
}
...
...
@@ -92,14 +92,14 @@ class Tracker{
this
.
arr
.
push
(
ent
)
return
true
;
}
};
var
tracker
=
new
Tracker
();
var
streaming
=
true
;
};
var
tracker
=
new
Tracker
();
var
streaming
=
true
;
classifier
.
load
(
'haarcascade_frontalface_default.xml'
);
classifier
.
load
(
'haarcascade_frontalface_default.xml'
);
const
FPS
=
30
;
function
processVideo
()
{
const
FPS
=
30
;
function
processVideo
()
{
try
{
if
(
!
streaming
)
{
// clean and stop.
...
...
@@ -132,10 +132,9 @@ function processVideo() {
let
tempCanvas
=
document
.
createElement
(
"canvas"
);
cv
.
imshow
(
tempCanvas
,
cropped
);
console
.
log
(
'b64encode'
);
if
(
tracker
.
register
(
face
.
x
,
face
.
y
,
face
.
width
,
face
.
height
,
Date
.
now
()
)){
if
(
tracker
.
register
(
face
.
x
,
face
.
y
,
face
.
width
,
face
.
height
)){
let
b64encoded
=
tempCanvas
.
toDataURL
(
"image/jpeg"
,
1.0
);
b64encoded
=
b64encoded
.
replace
(
'data:image/jpeg;base64,'
,
''
)
console
.
log
(
'ajax post'
);
b64encoded
=
b64encoded
.
replace
(
'data:image/jpeg;base64,'
,
''
);
$
.
ajax
({
type
:
"POST"
,
url
:
"/verify"
,
...
...
@@ -172,15 +171,15 @@ function processVideo() {
}
catch
(
err
)
{
console
.
log
(
err
);
}
}
setTimeout
(
processVideo
,
0
);
}
setTimeout
(
processVideo
,
0
);
}
</script>
</head>
<body
onload=
"cv['onRuntimeInitialized']=()=>{ init(); };"
>
<div
id=
"container"
>
<video
autoplay=
"true"
id=
"videoInput"
style=
"display: none; object-fit: cover;"
></video>
<canvas
id=
"canvasOutput"
></canvas>
</div>
<div
id=
"container"
>
<video
autoplay=
"true"
id=
"videoInput"
style=
"display: none; object-fit: cover;"
></video>
<canvas
id=
"canvasOutput"
></canvas>
</div>
</body>
</html>
...
...
flask/templates/register.html
View file @
e6c4243
...
...
@@ -11,8 +11,32 @@
<script
type=
'text/javascript'
src=
"{{url_for('static', filename='js/utils.js')}}"
></script>
<script
type=
'text/javascript'
src=
"https://code.jquery.com/jquery-1.12.4.min.js"
></script>
<script
type=
'text/javascript'
>
var
tempImage
=
new
Image
();
var
tempCanvas
=
document
.
createElement
(
"canvas"
);
var
b64encoded
=
''
;
var
streaming
=
true
;
function
init
()
{
let
video
=
document
.
getElementById
(
'videoInput'
);
let
container
=
document
.
getElementById
(
'container'
);
let
canvasOutput
=
document
.
getElementById
(
"canvasOutput"
);
if
(
navigator
.
mediaDevices
.
getUserMedia
){
navigator
.
mediaDevices
.
getUserMedia
({
video
:
true
})
.
then
(
function
(
stream
)
{
video
.
srcObject
=
stream
;
video
.
addEventListener
(
'canplay'
,
()
=>
{
video
.
width
=
video
.
videoWidth
;
video
.
height
=
video
.
videoHeight
;
container
.
style
.
width
=
video
.
videoWidth
+
'px'
;
container
.
style
.
height
=
video
.
videoHeight
+
'px'
;
canvasOutput
.
width
=
video
.
videoWidth
;
canvasOutput
.
height
=
video
.
videoHeight
;
load_cascade
();
});
}).
catch
(
function
(
err0r
)
{
console
.
log
(
"Something went wrong!"
);
streaming
=
false
;
});
}
}
function
load_cascade
()
{
...
...
@@ -20,69 +44,121 @@ function load_cascade()
let
faceCascadeURL
=
'static/js/haarcascade_frontalface_default.xml'
let
utils
=
new
Utils
(
'errorMessage'
);
utils
.
createFileFromUrl
(
faceCascadeFile
,
faceCascadeURL
,
()
=>
{
activate
();
main
();
});
}
function
activate
()
{
let
fileloader
=
document
.
getElementById
(
"fileloader"
);
fileloader
.
disabled
=
false
;
}
function
detect_face
()
function
main
()
{
let
canvas
=
document
.
createElement
(
'canvas'
);
canvas
.
width
=
tempImage
.
width
;
canvas
.
height
=
tempImage
.
height
;
let
ctx
=
canvas
.
getContext
(
'2d'
)
ctx
.
drawImage
(
tempImage
,
0
,
0
);
let
src
=
cv
.
imread
(
canvas
);
let
dst
=
new
cv
.
Mat
(
src
.
cols
,
src
.
rows
,
cv
.
CV_8UC4
);
// let gray = new cv.Mat();
let
video
=
document
.
getElementById
(
"videoInput"
);
let
canvasOutput
=
document
.
getElementById
(
"canvasOutput"
);
let
canvasContext
=
canvasOutput
.
getContext
(
'2d'
);
let
src
=
new
cv
.
Mat
(
video
.
height
,
video
.
width
,
cv
.
CV_8UC4
);
let
dst
=
new
cv
.
Mat
(
video
.
height
,
video
.
width
,
cv
.
CV_8UC4
);
let
cap
=
new
cv
.
VideoCapture
(
video
);
let
faces
=
new
cv
.
RectVector
();
let
classifier
=
new
cv
.
CascadeClassifier
();
class
Tracker
{
constructor
(){
this
.
arr
=
new
Array
();
}
register
=
function
(
x
,
y
,
width
,
height
)
{
var
x_center
=
(
x
+
width
)
/
2
;
var
y_center
=
(
y
+
height
)
/
2
;
var
now
=
Date
.
now
()
this
.
arr
=
this
.
arr
.
filter
(
ent
=>
now
-
ent
.
time
<
300
);
for
(
const
prop
in
this
.
arr
){
var
prop_x_center
=
(
this
.
arr
[
prop
].
x
+
this
.
arr
[
prop
].
width
)
/
2
;
var
prop_y_center
=
(
this
.
arr
[
prop
].
y
+
this
.
arr
[
prop
].
height
)
/
2
;
if
(
Math
.
abs
(
x_center
-
prop_x_center
)
<
10
&&
Math
.
abs
(
y_center
-
prop_y_center
)
<
10
){
this
.
arr
[
prop
].
x
=
x
;
this
.
arr
[
prop
].
y
=
y
;
this
.
arr
[
prop
].
width
=
width
;
this
.
arr
[
prop
].
height
=
height
;
this
.
arr
[
prop
].
time
=
now
;
return
this
.
arr
[
prop
].
init_time
;
}
}
var
ent
=
{
x
:
x
,
y
:
y
,
width
:
width
,
height
:
height
,
time
:
now
,
init_time
:
now
}
this
.
arr
.
push
(
ent
)
return
now
;
}
};
var
tracker
=
new
Tracker
();
classifier
.
load
(
'haarcascade_frontalface_default.xml'
);
const
FPS
=
30
;
function
processVideo
()
{
try
{
if
(
!
streaming
)
{
// clean and stop.
src
.
delete
();
dst
.
delete
();
gray
.
delete
();
faces
.
delete
();
classifier
.
delete
();
return
;
}
let
begin
=
Date
.
now
();
// start processing.
cap
.
read
(
src
);
cv
.
flip
(
src
,
src
,
1
);
src
.
copyTo
(
dst
);
// cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
let
msize
=
new
cv
.
Size
(
tempImage
.
width
/
4
,
tempImage
.
height
/
4
);
// detect faces.
let
msize
=
new
cv
.
Size
(
video
.
width
/
4
,
video
.
height
/
4
);
classifier
.
detectMultiScale
(
dst
,
faces
,
1.1
,
3
,
0
,
msize
);
if
(
faces
.
size
()
==
0
)
{
alert
(
'얼굴이 인식되지 않았습니다. 얼굴 이미지가 작지 않은지 확인해주세요.'
);
}
else
if
(
faces
.
size
()
>
1
)
{
alert
(
'하나의 얼굴만 등록해주세요.'
)
}
// draw faces.
console
.
log
(
'draw faces'
);
for
(
let
i
=
0
;
i
<
faces
.
size
();
++
i
)
{
let
face
=
faces
.
get
(
i
);
let
point1
=
new
cv
.
Point
(
face
.
x
,
face
.
y
);
let
point2
=
new
cv
.
Point
(
face
.
x
+
face
.
width
,
face
.
y
+
face
.
height
);
cv
.
rectangle
(
dst
,
point1
,
point2
,
[
255
,
0
,
0
,
255
],
8
);
let
cropped
=
new
cv
.
Mat
();
let
rect
=
new
cv
.
Rect
(
Math
.
max
(
face
.
x
-
22
,
0
),
Math
.
max
(
face
.
y
-
44
,
0
),
Math
.
min
(
face
.
width
+
44
,
src
.
cols
),
Math
.
min
(
face
.
height
+
66
,
src
.
rows
));
let
cropped
=
src
.
roi
(
rect
);
cropped
=
src
.
roi
(
rect
);
let
tempCanvas
=
document
.
createElement
(
"canvas"
);
cv
.
imshow
(
tempCanvas
,
cropped
);
if
(
Date
.
now
()
-
tracker
.
register
(
face
.
x
,
face
.
y
,
face
.
width
,
face
.
height
)
>
1000
){
// 1초동안 인식되면 사진 촬영 종료하고 등록 버튼 활성화
b64encoded
=
tempCanvas
.
toDataURL
(
"image/jpeg"
,
1.0
);
toggle_streaming
();
activate_sender
();
}
if
(
faces
.
size
()
==
1
)
{
let
sender
=
document
.
getElementById
(
"sender"
);
sender
.
disabled
=
false
;
}
if
(
dst
.
cols
>
$
(
window
).
width
()
||
dst
.
rows
>
$
(
window
).
height
())
{
let
ratio
=
Math
.
min
(
$
(
window
).
width
()
/
parseFloat
(
dst
.
cols
),
$
(
window
).
height
()
/
parseFloat
(
dst
.
rows
));
let
dsize
=
new
cv
.
Size
(
dst
.
cols
*
ratio
,
dst
.
rows
*
ratio
);
cv
.
resize
(
dst
,
dst
,
dsize
,
0
,
0
,
cv
.
INTER_AREA
);
// to do resize preview
cv
.
imshow
(
'canvasOutput'
,
dst
);
// schedule the next one.
let
delay
=
1000
/
FPS
-
(
Date
.
now
()
-
begin
);
setTimeout
(
processVideo
,
delay
);
}
catch
(
err
)
{
console
.
log
(
err
);
}
}
let
preview
=
document
.
getElementById
(
'preview'
);
cv
.
imshow
(
preview
,
dst
);
setTimeout
(
processVideo
,
0
);
}
function
activate_sender
()
{
let
sender
=
document
.
getElementById
(
"sender"
);
fileloader
.
disabled
=
false
;
}
function
toggle_streaming
()
{
streamButton
=
document
.
getElementById
(
"streamButton"
);
streaming
=
!
streaming
;
if
(
streaming
)
streamButton
.
value
=
"촬영중지"
;
else
streamButton
.
value
=
"촬영시작"
;
main
();
}
function
submit
()
{
let
b64encoded
=
tempCanvas
.
toDataURL
(
'image/jpeg'
,
1.0
);
let
student_id
=
document
.
getElementById
(
'student_id'
).
value
;
let
student_name
=
document
.
getElementById
(
'student_name'
).
value
;
b64encoded
=
b64encoded
.
replace
(
'data:image/jpeg;base64,'
,
''
)
...
...
@@ -102,20 +178,9 @@ function submit()
})
}
var
loadFile
=
function
(
event
)
{
var
reader
=
new
FileReader
();
reader
.
readAsDataURL
(
event
.
target
.
files
[
0
]);
reader
.
onload
=
function
()
{
tempImage
.
src
=
reader
.
result
;
tempImage
.
onload
=
function
()
{
detect_face
();
}
};
};
</script>
</head>
<body
onload=
"cv['onRuntimeInitialized']=()=>{
load_cascade
();};"
class=
"w3-light-grey"
>
<body
onload=
"cv['onRuntimeInitialized']=()=>{
init
();};"
class=
"w3-light-grey"
>
<!-- w3-content defines a container for fixed size centered content,
and is wrapped around the whole page content, except for the footer in this example -->
<div
class=
"w3-content"
style=
"max-width:1400px"
>
...
...
@@ -126,14 +191,15 @@ and is wrapped around the whole page content, except for the footer in this exam
</header>
<div
class=
"w3-row"
,
style=
'text-align:center'
>
<h2><b>
얼굴 파일을 등록해주세요 (jpeg only)
</b></h2>
<h2><b>
얼굴을 등록해주세요 (jpeg only)
</b></h2>
<div
id=
"container"
>
<video
autoplay=
"true"
id=
"videoInput"
style=
"display: none; object-fit: cover;"
></video>
<canvas
id=
"canvasOutput"
></canvas>
</div>
<div>
학번:
<input
type=
"text"
id=
"student_id"
><br>
이름:
<input
type=
"text"
id=
"student_name"
><br><br>
<input
type=
"file"
id=
"fileloader"
name=
"file"
onchange=
"loadFile(event)"
autocomplete=
"off"
accept=
"image/jpeg"
required
disabled
>
<div>
<canvas
id=
"preview"
></canvas>
</div>
<input
id=
"streamButton"
type=
"button"
onclick=
"toggle_streaming()"
value=
"활영중지"
disabled
>
<input
id=
"sender"
type=
"button"
onclick=
"submit()"
value=
"등록"
disabled
>
</div>
</div>
...
...
Please
register
or
login
to post a comment