Hyunjun

add simple_convnet_rasberryPi code

1. 계획서 업로드
[강현준, 권은진, 김효은, 하미르] 라즈베리파이에 이미지 프로세싱 딥러닝을 적용한 자동화 계산 시스템
2. 중간보고서(논문) 업로드
[강현준, 권은진, 김효은, 하미르] 이미지 인식 딥러닝을 적용한 자동화 계산 시스템_중간보고서(논문)
......@@ -12,7 +10,6 @@
4. params.pkl->params.csv
프로그램의 속도보장을 위해 python->c++ 컨버팅 작업이 필요함
python 코드에서 가중치를 pickle 파일에 저장하는데 c++에서는 pickle 파일을 읽는 데에 어려움이 있음
pickletools라는 라이브러리가 있지만 에러가 많고, 자바, 파이썬, C++ 여러 언어를 지원해 무겁기 때문에 속도를 올리기 위한 컨버팅 작업에는 적절하지 않음
......@@ -20,28 +17,23 @@ pickletools라는 라이브러리가 있지만 에러가 많고, 자바, 파이썬, C++ 여러 언어를
5. params.pkl->params.txt
입력처리할 때 csv파일로 읽으면 속도가 느림
이 또한 속도를 올리기 위한 컨버팅 작업의 목적에 맞지 않기 때문에 params.pkl 파일을 csv 파일이 아닌 txt 파일로 바꿈
6. python test 코드 추가
test하는 부분만 골라내기 위해 python test 코드를 추가(test.py), simple_convnet 내용 추가
7. python test 폴더 추가
python test 폴더에는 test에 필요하지 않은 train 부분을 삭제함
8. make_img.py 추가
이미지를 불러와 (32,32,3)의 크기로 resize한 후 input.txt에 저장함
9. simple_convnet_cpp 코드 추가
1) layers.hpp : Convolution, ReLu, Normalization, Pooling, DW_Conv등 각 layer가 구현
2) SimpleConvNet.hpp : 딥러닝 모델이 구현
3) input.txt : make_img.py코드로 만든 이미지를 (32,32,3)의 크기로 만들어 txt파일로 저장
......@@ -53,12 +45,17 @@ python test 폴더에는 test에 필요하지 않은 train 부분을 삭제함
10. google_image_crwaling 코드 추가
필요한 데이터셋을 만들기 위한 google_image_crwaling 코드 추가
11. chrome_crwaling 확장프로그램 추가
앞서 만든 코드는 1회에 20장의 사진만 다운로드 가능
데이터셋을 만들기 위해서는 훨씬 더 방대한 양의 데이터가 필요함
googel chrome의 확장프로그램을 활용한 구글 이미지 검색 결과 크롤링 프로그램 구현
\ No newline at end of file
googel chrome의 확장프로그램을 활용한 구글 이미지 검색 결과 크롤링 프로그램 구현
12. simple_convnet_rasberryPi 코드 추가
라즈베리파이에서 구동되는 코드 추가
make_img.py에서는 라즈베리파이에 연결된 카메라가 촬영을 하면 그 사진을 32*32*3 사이즈로 resize해준 후 input.txt파일로 변환해줌
main.cpp에서 convnet 코드 실행
predict.sh 쉘파일로 전체 실행
\ No newline at end of file
......
1. g++ -std=c++11 main.cpp-> a.out
2. ./predict.sh
* ߰ : chmod +x ./predict.sh
\ No newline at end of file
#ifndef LAYERS_
#define LAYERS_
#include<vector>
#include<cmath>
// 3D Matrix
struct Mat {
int dim, row, col;
std::vector< double > mat;
Mat(int dim, int row, int col, std::vector< double > v) : dim(dim), row(row), col(col), mat(v) {};
};
// Mat 일차원으로 계산 + dimension 변수
class Layer {
public:
virtual std::vector<Mat> forward(std::vector<Mat> &x) { return std::vector<Mat>(); }
};
// padding
class Convolution : public Layer {
private:
std::vector<Mat> W;
int stride, pad;
public:
Convolution() {};
~Convolution() {};
Convolution(std::vector<Mat> W, int stride=1, int pad=0) : W(W), stride(stride), pad(pad) {};
// Each image x conv with each w
virtual std::vector<Mat> forward(std::vector<Mat> &x) {
std::vector< Mat > out;
int n, nw;
for (n = 0; n < x.size(); n++) {
std::vector<double>rev;
for (nw = 0; nw < W.size(); nw++) {
auto e = Convolution_(x[n], W[nw]);
rev.insert(rev.end(), e.begin(), e.end());
}
int out_r = (x[n].row + 2 * pad - W[0].row) / stride + 1;
int out_c = (x[n].col + 2 * pad - W[0].col) / stride + 1;
out.push_back(Mat(nw, out_r, out_c, rev));
}
return out;
}
// Convolution x and W (both are 3-D Mat)
std::vector<double> Convolution_(const Mat& x,const Mat& w) {
std::vector<double> ret;
int ndim = x.dim - w.dim + 1;
for (int d = 0; d < x.dim - w.dim + 1; d++) {
for (int r = -pad; r < x.row - w.row + 1 + pad; r++) {
for (int c = -pad; c < x.col - w.col +1 +pad; c++) {
ret.push_back(Convolution_(x, w, d, r, c));
}
}
}
return ret;
}
double Convolution_(const Mat& x, const Mat& w, int d, int r,int c) {
double ret = 0, xx=0;
int ds = w.col * w.row, rs = w.col;
int dxs = x.col * x.row, rxs = x.col;
for (int dd = 0; dd < w.dim; dd++) {
for (int rr = 0; rr < w.row; rr++) {
for (int cc = 0; cc < w.col; cc++) {
if ((pad > 0) && (r + rr < 0 || c + cc < 0 || r + rr >= x.row || c + cc >= x.col))
xx = 0;
else
xx = x.mat[(d + dd)*(dxs)+(r + rr)*rxs + (c + cc)];
ret += xx * w.mat[dd*(ds)+rr*(rs)+cc];
}
}
}
return ret;
}
};
// Depthwise Conv
class DW_Convolution : public Layer {
private:
std::vector<Mat> W;
int stride, pad;
public:
DW_Convolution() {};
~DW_Convolution() {};
DW_Convolution(std::vector<Mat> W, int stride=1, int pad=0) : W(W), stride(stride), pad(pad) {};
virtual std::vector<Mat> forward(std::vector<Mat> &x) {
std::vector<Mat> out;
int n, d;
for (n = 0; n < x.size(); n++) {
// Each dimension Conv with each filter
std::vector<double> rev;
for (d = 0; d < x[n].dim; d++) {
std::vector<double> e = Convolution_(x[n], W[d], d);
rev.insert(rev.end(), e.begin(), e.end());
}
int out_r = (x[n].row + 2 * pad - W[0].row) / stride + 1;
int out_c = (x[n].col + 2 * pad - W[0].col) / stride + 1;
out.push_back(Mat(d, out_r, out_c, rev));
}
return out;
}
std::vector<double> Convolution_(const Mat& x, const Mat& w, int d) {
std::vector<double> out;
int dd = d * x.col * x.row;
for (int r = -pad; r < x.row - w.row + 1 + pad; r++) { // r+=stride
for (int c = -pad; c < x.col - w.col + 1 + pad; c++) {
out.push_back(Convolution_(x, w, dd, r, c));
}
}
return out;
}
double Convolution_(const Mat& x, const Mat& w, int dd, int r, int c) {
double ret = 0, xx=0;
for (int rr = 0; rr < w.row; rr++) {
for (int cc = 0; cc < w.col; cc++) {
if ((pad > 0) && (r + rr < 0 || c + cc < 0 || r + rr >= x.row || c + cc >= x.col))
xx = 0;
else
xx = x.mat[dd + (r + rr)*x.col + (c+cc)];
ret += xx * w.mat[rr*w.col + cc];
}
}
return ret;
}
};
// n개의 이미지 같은 위치의 Row, col 값을 Normalization
class LightNormalization : public Layer{
public:
virtual std::vector<Mat> forward(std::vector<Mat>& x) {
std::vector<Mat> out;
int dim = x[0].dim, row = x[0].row, col = x[0].col, nx = x.size();
int ds = row*col;
for (int d = 0; d < dim; d++) {
double mu = 0, var=0, std, tmp; // mu : mean of x img each dim
for (int r = 0; r < row; r++)
for (int c = 0; c < col; c++)
for (int n = 0; n < nx; n++)
mu += x[n].mat[d*ds + r*col + c];
mu = mu / (double)(row*col*nx);
for (int r = 0; r < row; r++)
for (int c = 0; c < col; c++)
for (int n = 0; n < nx; n++) {
tmp = x[n].mat[d*ds + r*col + c] - mu;
var += (tmp*tmp);
}
var = var / (double)(row*col*nx);
std = sqrt(var+10e-7);
for (int r = 0; r < row; r++)
for (int c = 0; c < col; c++)
for (int n = 0; n < nx; n++)
x[n].mat[d*ds + r*col + c] = (x[n].mat[d*ds + r*col + c] - mu) / std;
}
return x;
}
};
class Relu : public Layer {
public:
virtual std::vector<Mat> forward(std::vector<Mat> &x) {
int nx = x.size(), nm = x[0].dim * x[0].row * x[0].col;
for (int n = 0; n < nx; n++)
for (int i = 0; i < nm; i++)
if (x[n].mat[i] < 0)
x[n].mat[i] = 0;
return x;
}
};
class Pooling : public Layer{
private:
int pool_h, pool_w, stride, pad;
public:
Pooling() { pad = 0; };
~Pooling() {};
Pooling(int pool_h, int pool_w, int stride=1, int pad=0) :pool_h(pool_h), pool_w(pool_w), stride(stride), pad(pad) {};
virtual std::vector<Mat> forward(std::vector<Mat>& x) {
std::vector<Mat> out;
int n, d, nx = x.size();
for (n = 0; n < nx; n++) {
std::vector<double> rev;
for (d = 0; d < x[n].dim; d++) {
std::vector<double> e = MaxPooling_(x[n], d);
rev.insert(rev.end(), e.begin(), e.end());
}
int out_h = (x[n].row + 2 * pad - pool_h) / stride + 1;
int out_w = (x[n].col + 2 * pad - pool_w) / stride + 1;
out.push_back(Mat(d, out_h, out_w, rev));
}
return out;
}
// Pooling each image
std::vector<double> MaxPooling_(Mat& x, int d) {
std::vector<double> out;
int row = x.row, col = x.col;
int dd = d * col * row;
for (int r = -pad; r < row - pool_h + 1 + pad; r+=stride) {
for (int c = -pad; c < col - pool_w + 1 + pad; c+=stride) {
out.push_back(MaxPooling_(x, dd, r, c));
}
}
return out;
}
// Pooling pool_w * pool_h
double MaxPooling_(Mat& x, int dd, int r, int c) {
double ret = 0, xx = 0;
for (int rr = 0; rr < pool_h; rr++) {
for (int cc = 0; cc < pool_w; cc++) {
if ((pad > 0) && (r + rr < 0 || c + cc < 0 || r + rr >= x.row || c + cc >= x.col))
xx = 0;
else
xx = x.mat[dd + (r + rr)*x.col + (c + cc)];
if(ret < xx)
ret = xx;
}
}
return ret;
}
};
class Affine : public Layer{
private:
std::vector<Mat> W;
public:
Affine() {}
~Affine() {}
Affine(std::vector<Mat>& W) : W(W){}
virtual std::vector<Mat> forward(std::vector<Mat>& x) {
std::vector<Mat> out;
int nx = x.size();
for (int n = 0; n < nx; n++) {
Mat e = Dot_(x[n]);
out.push_back(e);
}
return out;
}
Mat Dot_(const Mat& x) {
int dim = W[0].dim, row = W[0].row, col = W[0].col, nw = W.size();
int size = dim*row*col;
std::vector<double> ret(col);
for (int c = 0; c < col; c++) {
for (int n = 0; n < nw; n++) {
ret[c] += W[n].mat[c] * x.mat[n];
}
}
return Mat(col, 1, 1, ret);
}
};
#endif
#ifndef SIMPLECONV_
#define SIMPLECONV_
#include"Layers.hpp"
#include<iostream>
#include<cstdio>
#include<string.h>
#include<stdlib.h>
struct input_dim {
int d1, d2, d3;
input_dim(int d1, int d2, int d3) :d1(d1), d2(d2), d3(d3) {};
};
struct conv_param {
int fn1, fn2, fn3;
int filtersize, pad, stride;
conv_param(int ftnum1, int ftnum2, int ftnum3, int ftsize, int pad, int stride) :fn1(ftnum1),
fn2(ftnum2), fn3(ftnum3), filtersize(ftsize), pad(pad), stride(stride) {};
};
class SimpleConvNet {
private:
std::vector< Layer* > layers;
std::vector<Mat> W[7]; // weights
std::vector<int> shape[7]; // shape of each weights
public:
SimpleConvNet() {}
~SimpleConvNet() {}
SimpleConvNet(input_dim id, conv_param cp, int hidden_size=512, int output_size=10, bool pretrained=true) {
if (pretrained)
load_trained("params.txt");
layers.push_back(new Convolution(W[0], 1, 1));
layers.push_back(new LightNormalization());
layers.push_back(new Relu());
layers.push_back(new Pooling(2, 2, 2));
layers.push_back(new Convolution(W[1], 1, 0));
layers.push_back(new LightNormalization());
layers.push_back(new Relu());
layers.push_back(new DW_Convolution(W[2], 1, 1));
layers.push_back(new LightNormalization());
layers.push_back(new Relu());
layers.push_back(new Pooling(2, 2, 2));
layers.push_back(new Convolution(W[3], 1, 0));
layers.push_back(new LightNormalization());
layers.push_back(new Relu());
layers.push_back(new DW_Convolution(W[4], 1, 1));
layers.push_back(new LightNormalization());
layers.push_back(new Relu());
layers.push_back(new Pooling(2, 2, 2));
layers.push_back(new Affine(W[5]));
layers.push_back(new LightNormalization());
layers.push_back(new Relu());
layers.push_back(new Affine(W[6]));
}
std::vector< Mat > predict(std::vector<Mat>& x) {
for (int i = 0; i < layers.size(); i++) {
x = layers[i]->forward(x);
}
return x;
}
double accuracy(std::vector< std::vector< unsigned char > > x, std::vector< int > ans, int batch_size=100) {
return 1.0;
}
std::vector<int> argmax(std::vector< Mat >& x) {
std::vector<int> pred;
for (int n = 0; n < x.size(); n++) {
int pid = 0, pos;
double pval = -1e9;
for (int i = 0; i < x[n].mat.size(); i++) {
if (pval < x[n].mat[i]) {
pval = x[n].mat[i];
pid = i;
}
}
pred.push_back(pid);
}
return pred;
}
void load_trained(const char* filename="params.txt") {
FILE *f = fopen(filename, "r");
if (f == NULL) {
printf("File not found\n");
exit(1);
}
char line[10] = { 0 };
int keynum;
while (fscanf(f, "%s", line)==1) {
char s[4][10] = { 0 };
keynum = line[1] - '0' - 1;
// get shape
fscanf(f, "%s", s[0]);
fscanf(f, "%s", s[1]);
if (s[1][strlen(s[1]) - 1] != '\"') {
fscanf(f, "%s", s[2]);
fscanf(f, "%s", s[3]);
}
// nw = number of weights : shape[0]
// size = input size of W[key]
int size = 1, nw=0;
for (int i = 0; i < 4; i++) {
int val = 0;
for (int j = 0; j < strlen(s[i]); j++) {
if ('0' <= s[i][j] && s[i][j] <= '9') {
val = 10 * val + (s[i][j] - '0');
}
}
if (val) {
shape[keynum].push_back(val);
size *= val;
if (nw == 0)
nw = val;
}
}
// Read data of W[key]
int fsize = size / nw;
double *mm = new double[fsize];
for (int i = 0; i < size; i++) {
fscanf(f, "%lf", &mm[i%fsize]);
if (i%fsize == fsize - 1) {
if(shape[keynum].size() == 2)
W[keynum].push_back(Mat(1, 1, shape[keynum][1], std::vector<double>(mm, mm + fsize)));
else if(shape[keynum].size() == 4)
W[keynum].push_back(Mat(shape[keynum][1], shape[keynum][2],
shape[keynum][3], std::vector<double>(mm, mm + fsize)));
}
}
}
printf("Trained weights loading done\n");
}
};
#endif
59
59
59
59
60
60
60
62
62
62
63
63
62
61
61
59
57
56
55
54
52
51
48
46
42
39
36
32
31
28
25
22
56
57
58
58
59
59
59
61
61
62
63
62
61
61
61
59
57
56
56
55
53
51
49
46
42
39
36
33
31
28
25
21
54
55
56
56
57
58
59
60
60
61
62
61
61
61
60
60
58
57
56
55
53
51
49
46
42
39
35
32
30
29
25
21
53
55
56
56
56
56
58
59
60
60
61
61
62
61
60
59
58
58
57
55
53
51
49
47
43
40
36
32
30
28
24
20
53
55
55
55
55
56
57
58
60
60
61
61
61
61
61
59
59
58
57
56
54
52
49
47
43
40
37
34
31
28
25
21
51
53
54
54
55
55
56
57
59
59
60
61
60
61
60
60
59
59
57
56
54
52
49
46
43
40
36
33
30
27
24
21
50
52
53
53
54
55
56
57
59
59
60
60
60
60
61
60
59
59
57
56
54
52
49
47
43
40
36
33
29
26
23
20
48
51
52
53
54
55
56
57
58
60
61
61
61
61
61
60
59
58
57
56
54
51
49
47
44
40
36
34
30
27
23
19
47
50
51
52
54
55
56
57
58
60
60
60
61
61
61
61
59
59
58
56
54
51
49
47
44
41
37
34
30
27
23
20
46
49
51
52
54
55
56
57
58
59
60
60
61
61
61
60
59
59
57
55
53
51
49
47
44
41
38
33
30
26
23
20
45
48
51
51
53
55
56
57
57
59
59
60
61
60
60
59
58
58
57
55
53
51
49
46
43
40
37
33
30
26
22
20
44
47
49
51
52
54
55
56
57
58
58
59
59
59
59
59
58
58
56
54
52
50
47
45
42
40
36
32
29
25
22
19
44
47
49
51
52
53
55
56
57
57
57
58
59
59
59
58
58
58
56
54
51
50
47
44
42
39
35
32
29
25
21
18
43
46
49
50
52
53
55
56
56
57
57
58
58
58
59
58
57
56
55
53
51
50
48
45
42
38
35
32
28
25
22
18
42
46
48
50
51
53
54
54
55
56
57
57
58
58
57
57
56
56
54
53
51
49
47
44
42
37
35
31
28
25
21
18
41
45
48
49
50
51
53
54
55
55
57
57
57
57
56
57
56
55
54
52
51
49
47
44
41
38
34
31
28
24
21
17
39
43
46
48
49
50
53
53
54
55
55
55
56
56
56
56
55
53
52
52
50
47
46
43
41
38
34
31
27
24
20
17
39
42
45
47
49
50
52
53
54
54
55
55
55
55
55
55
54
53
53
52
49
48
46
43
41
37
34
31
27
23
19
16
38
42
44
46
49
50
51
52
54
55
55
55
55
55
54
54
54
53
52
51
49
48
46
44
41
37
34
31
27
24
19
16
36
40
43
45
48
49
50
51
52
54
54
54
53
53
54
53
53
52
51
49
48
46
45
44
41
37
34
30
27
23
19
16
35
39
41
44
46
47
49
50
52
52
52
53
52
52
53
53
51
51
50
49
47
45
45
43
40
36
34
30
26
22
18
15
34
37
39
42
44
46
48
50
51
51
51
51
51
51
52
52
50
50
49
47
46
45
44
42
38
35
33
28
25
22
18
15
33
36
38
41
43
44
46
48
49
50
50
50
50
51
50
50
50
48
47
46
45
44
42
40
37
35
32
28
25
21
18
15
32
34
37
39
42
43
44
46
47
48
49
49
49
49
49
49
48
47
46
45
44
42
41
38
37
33
30
27
24
21
18
15
30
33
35
37
40
42
43
45
46
47
47
47
48
48
48
47
47
46
45
44
43
41
39
38
36
32
29
26
23
20
17
14
28
31
34
35
37
40
42
43
44
45
45
45
45
46
45
46
46
44
44
42
42
39
38
37
34
31
28
25
22
18
16
13
26
29
32
34
36
38
40
41
41
43
44
44
44
44
44
43
44
42
42
41
40
38
37
35
33
30
27
23
21
18
16
12
25
27
30
32
34
36
38
39
40
41
42
43
43
43
42
42
42
41
40
40
38
37
35
34
32
28
25
22
20
17
14
12
25
27
29
32
34
35
37
37
39
39
41
41
41
41
40
40
41
40
39
39
38
36
35
33
31
28
25
23
19
17
15
12
23
26
28
30
32
34
35
36
36
38
38
39
39
39
39
39
39
38
38
37
36
35
33
32
29
26
24
20
18
16
14
11
23
25
27
28
30
32
33
33
35
36
37
38
38
38
38
38
37
38
37
36
35
33
32
30
27
24
22
19
18
15
12
10
23
26
26
28
29
31
32
33
34
35
36
37
37
38
37
37
37
36
35
35
34
32
31
29
26
24
22
19
18
15
12
10
46
46
47
47
47
46
47
46
47
47
46
46
47
47
47
47
47
47
48
49
49
50
50
50
51
51
52
52
53
53
53
53
46
46
47
47
47
46
46
46
46
46
46
46
46
46
46
47
47
47
48
48
49
50
50
51
51
51
51
52
53
54
53
53
47
47
47
47
47
46
46
46
46
47
46
46
46
47
47
47
47
48
48
49
49
50
50
51
51
51
52
53
53
54
54
54
47
47
48
47
47
47
47
47
47
47
47
47
47
47
47
48
48
48
49
49
50
50
51
51
52
52
52
53
54
55
55
55
47
48
48
48
47
47
47
47
47
47
47
47
47
47
47
48
48
48
49
49
50
50
51
52
52
52
53
53
54
55
55
56
47
48
48
48
47
47
47
47
47
47
47
46
46
46
47
48
48
49
49
49
50
50
51
51
52
52
53
53
54
55
55
55
48
48
48
47
47
47
46
47
47
47
46
46
46
47
47
47
48
49
49
49
50
50
50
51
52
52
53
53
54
55
56
55
48
48
48
48
47
47
47
47
47
47
47
47
47
47
48
48
49
49
50
50
50
51
51
52
52
53
53
53
54
55
56
56
49
49
48
48
48
48
48
47
48
48
48
48
48
48
49
49
49
51
51
51
51
51
52
52
53
53
54
54
55
56
56
57
49
49
49
49
49
49
48
48
48
48
48
48
49
49
49
50
50
51
51
52
52
52
52
53
53
53
54
55
55
56
57
57
49
49
49
48
48
49
48
48
48
48
49
49
49
49
50
50
51
51
52
52
53
52
52
53
53
53
54
55
55
56
57
57
49
49
49
49
48
49
49
48
48
48
49
49
49
50
50
51
51
51
52
53
52
52
52
53
53
53
54
55
56
56
57
57
49
49
49
49
49
49
49
49
49
49
50
50
50
51
51
52
52
52
53
53
53
53
53
53
53
54
54
55
56
56
57
58
50
50
50
50
50
50
50
50
50
50
51
51
51
52
53
53
53
53
54
54
54
54
54
54
54
55
55
56
57
58
58
59
51
51
51
51
51
50
51
51
51
51
51
51
52
52
53
54
54
55
55
55
55
55
55
55
55
55
56
57
57
58
59
59
51
51
51
51
51
51
51
51
51
51
51
51
52
52
53
54
55
55
55
56
56
55
55
55
55
55
56
57
57
58
59
59
51
51
51
51
51
51
51
51
51
52
52
52
52
53
53
54
55
55
55
56
56
55
55
55
55
55
56
57
57
58
59
59
52
52
52
51
51
52
52
51
52
52
53
52
53
54
54
55
55
55
56
56
56
56
56
56
56
56
57
58
58
59
60
60
53
53
52
52
52
52
53
53
52
52
53
53
54
54
55
55
56
57
57
57
57
57
56
57
57
57
58
58
59
59
60
60
53
53
53
53
53
53
53
53
53
53
53
54
54
55
55
56
57
57
58
57
58
57
57
57
57
58
59
59
59
60
60
61
54
54
54
53
53
53
53
53
53
54
54
54
55
55
56
56
57
57
58
58
58
57
58
58
58
58
59
59
59
60
61
62
53
54
54
54
53
53
53
54
54
53
54
54
55
55
56
56
57
58
58
58
58
58
58
58
58
59
59
59
59
60
61
62
54
54
54
54
54
54
54
54
54
54
54
54
54
55
56
57
57
58
59
58
58
59
59
59
59
59
59
60
60
61
62
63
55
56
55
55
55
55
55
56
56
55
55
55
55
56
57
58
59
59
59
59
59
59
60
60
60
60
61
61
61
62
63
64
56
56
56
56
56
56
56
56
57
56
56
56
57
57
57
58
59
59
60
59
59
60
60
61
61
61
61
61
62
63
64
66
57
57
57
57
57
56
56
57
57
57
57
57
57
57
57
58
59
59
59
60
60
60
61
61
61
61
61
62
63
64
65
66
57
57
57
57
57
57
56
57
57
57
57
57
57
58
57
58
59
59
60
60
60
61
62
62
62
61
62
62
63
64
66
67
57
58
58
58
58
58
57
57
57
58
58
58
58
58
58
59
59
60
60
60
61
62
62
62
62
62
62
63
64
65
66
68
59
59
59
59
59
58
58
59
58
59
59
59
59
59
59
59
60
61
61
62
62
63
63
63
63
63
64
64
66
67
68
69
59
60
60
60
59
59
59
59
59
59
60
59
59
60
60
61
61
62
62
63
63
64
64
64
64
64
64
66
67
68
69
71
60
61
60
60
60
60
60
60
59
60
60
60
60
61
61
62
62
62
63
63
63
64
64
64
64
65
65
66
67
69
70
72
61
61
61
61
61
60
60
60
60
60
60
61
61
61
61
62
62
62
63
63
63
64
64
64
65
65
66
67
68
69
71
73
30
33
33
33
33
33
33
34
34
35
35
35
35
35
35
35
35
35
35
35
35
35
36
35
35
34
33
33
33
32
30
29
30
32
32
33
33
32
32
33
34
35
35
35
34
35
35
35
35
34
35
35
34
34
35
35
35
34
33
33
32
31
30
29
30
32
33
33
32
32
33
34
34
34
35
34
34
34
35
34
35
34
34
34
35
35
35
36
34
34
33
33
33
32
31
29
30
33
34
34
33
34
34
35
35
35
36
35
35
35
34
34
35
35
34
34
35
35
36
36
35
35
34
34
34
33
32
30
31
33
34
34
33
33
34
35
35
35
34
34
34
34
34
34
34
34
34
35
34
35
35
36
36
36
35
34
34
33
32
31
31
33
33
34
34
33
33
35
35
35
34
34
33
33
34
34
34
34
34
34
34
35
35
35
35
35
35
34
34
34
32
31
31
34
33
33
34
34
34
35
35
34
34
34
33
33
33
34
33
34
34
33
34
35
35
35
35
35
35
35
34
34
33
31
31
34
34
34
34
34
35
35
35
35
34
34
34
33
33
34
34
34
34
34
34
35
35
36
36
36
35
36
35
35
34
32
32
35
35
35
35
35
35
35
35
35
34
34
34
34
34
35
35
34
35
35
35
35
35
36
37
36
36
36
36
36
36
34
32
35
36
36
36
35
35
36
35
35
35
34
34
34
34
34
34
34
35
35
35
36
35
36
37
37
37
37
37
37
36
34
33
36
36
36
36
35
36
35
35
35
35
34
35
35
34
35
34
35
35
35
35
36
36
37
37
37
37
37
38
37
36
34
34
35
35
36
36
36
35
35
35
35
34
34
35
34
34
34
34
35
35
35
35
36
36
37
36
36
37
37
37
37
36
35
33
35
36
36
36
35
35
36
35
34
34
35
35
34
34
34
34
35
35
35
35
35
36
37
37
37
37
37
37
37
36
35
34
36
37
37
37
36
36
36
36
35
35
35
35
34
34
35
35
35
36
36
35
36
37
37
37
37
38
38
39
39
38
36
34
37
37
37
37
36
37
36
36
36
35
35
36
35
35
35
35
35
36
36
36
36
37
37
37
37
38
38
39
39
38
37
34
37
37
37
37
37
37
36
36
36
35
35
36
36
35
36
35
35
37
36
36
37
37
38
38
38
38
39
39
39
38
37
35
36
37
37
37
37
36
36
36
37
36
35
35
36
36
36
36
36
36
36
36
37
37
38
38
38
38
38
39
39
38
37
34
37
37
37
37
37
37
37
37
37
36
36
36
36
36
36
36
37
37
36
36
37
38
38
38
38
39
39
40
39
39
38
35
38
38
38
38
38
39
38
37
37
37
37
36
36
37
37
37
37
38
38
38
38
39
39
39
39
40
40
41
40
40
39
37
39
39
38
39
38
39
39
39
38
38
38
37
37
37
37
38
38
39
39
38
38
39
40
40
41
41
42
42
42
41
40
37
39
39
38
38
39
38
39
39
38
38
38
38
37
37
37
38
38
39
39
38
39
40
40
40
40
41
41
41
42
41
40
36
39
39
39
39
39
39
39
39
39
38
38
38
38
38
38
38
39
39
39
39
40
40
40
40
41
41
42
42
42
42
41
37
40
40
40
40
40
39
39
39
39
39
39
38
38
38
39
39
39
39
39
40
40
40
40
41
41
42
43
43
42
42
42
38
40
41
41
41
41
40
40
40
40
39
39
39
39
39
39
40
40
40
41
40
41
41
42
42
42
43
43
43
43
43
42
39
42
42
42
41
41
42
41
40
40
40
40
40
40
40
41
41
41
41
41
41
41
41
43
43
43
43
44
44
44
44
44
40
42
43
42
42
43
42
41
41
40
40
40
40
40
40
41
41
41
41
41
41
41
42
43
44
44
44
44
44
45
45
44
39
42
43
42
42
42
43
42
42
41
40
40
40
40
40
40
41
41
41
41
42
42
43
44
44
44
44
44
44
46
46
44
40
42
43
42
43
43
43
42
42
42
41
41
41
40
41
41
41
42
42
43
43
43
44
44
45
44
45
45
45
46
46
45
41
44
44
44
44
44
44
43
43
43
43
43
42
42
42
42
43
43
44
44
44
45
45
46
46
46
46
46
46
47
47
46
42
44
44
45
45
44
45
44
44
44
44
43
43
43
43
44
44
44
45
45
46
46
46
47
47
46
46
47
47
48
48
48
42
44
45
45
45
45
45
44
44
44
44
44
44
44
44
45
45
45
45
46
46
46
47
47
47
47
47
47
49
49
48
48
43
45
46
46
46
45
45
45
45
45
44
45
45
46
45
46
46
46
46
46
46
47
47
47
47
47
47
48
49
49
50
49
#include"Layers.hpp"
#include"SimpleConvNet.hpp"
using namespace std;
int main() {
input_dim id = { 3, 32, 32 };
conv_param cp = { 32,32,64, 3,1,1 };
SimpleConvNet SCN(id, cp);
freopen("input.txt", "r", stdin);
vector<Mat> X;
int nx = 1, dim = 3, row = 32, col = 32;
double tmp;
for (int i = 0; i < nx; i++) {
vector<double> rev;
for (int d = 0; d < dim; d++) {
for (int r = 0; r < row; r++) {
for (int c = 0; c < col; c++) {
scanf("%lf", &tmp);
rev.push_back(tmp);
}
}
}
X.push_back(Mat(dim, row, col, rev));
}
freopen("pred.txt", "r", stdin);
nx = 2, dim = 3, row = 32, col = 32;
for (int i = 0; i < nx; i++) {
vector<double> rev;
for (int d = 0; d < dim; d++) {
for (int r = 0; r < row; r++) {
for (int c = 0; c < col; c++) {
scanf("%lf", &tmp);
rev.push_back(tmp);
}
}
}
X.push_back(Mat(dim, row, col, rev));
}
auto x = SCN.predict(X);
auto pred = SCN.argmax(x);
int num = 0, pd;
printf("predict : %d ", pred[0]);
return 0;
}
\ No newline at end of file
from numpy import array, savetxt
from PIL import Image
import sys
import picamera
camera = picamera.PiCamera()
filename = 'image.jpg'
camera.capture(filename)
img = array(Image.open(filename).resize((32,32), Image.ANTIALIAS)).transpose(2,0,1).flatten()
savetxt('input.txt', img, fmt='%d', delimiter=' ')
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
#!/bin/bash
python make_img.py $1
./a.out