Hyunjun

add simple_convnet_rasberryPi code

1 1. 계획서 업로드 1 1. 계획서 업로드
2 -
3 [강현준, 권은진, 김효은, 하미르] 라즈베리파이에 이미지 프로세싱 딥러닝을 적용한 자동화 계산 시스템 2 [강현준, 권은진, 김효은, 하미르] 라즈베리파이에 이미지 프로세싱 딥러닝을 적용한 자동화 계산 시스템
4 3
5 4
6 2. 중간보고서(논문) 업로드 5 2. 중간보고서(논문) 업로드
7 -
8 [강현준, 권은진, 김효은, 하미르] 이미지 인식 딥러닝을 적용한 자동화 계산 시스템_중간보고서(논문) 6 [강현준, 권은진, 김효은, 하미르] 이미지 인식 딥러닝을 적용한 자동화 계산 시스템_중간보고서(논문)
9 7
10 8
...@@ -12,7 +10,6 @@ ...@@ -12,7 +10,6 @@
12 10
13 11
14 4. params.pkl->params.csv 12 4. params.pkl->params.csv
15 -
16 프로그램의 속도보장을 위해 python->c++ 컨버팅 작업이 필요함 13 프로그램의 속도보장을 위해 python->c++ 컨버팅 작업이 필요함
17 python 코드에서 가중치를 pickle 파일에 저장하는데 c++에서는 pickle 파일을 읽는 데에 어려움이 있음 14 python 코드에서 가중치를 pickle 파일에 저장하는데 c++에서는 pickle 파일을 읽는 데에 어려움이 있음
18 pickletools라는 라이브러리가 있지만 에러가 많고, 자바, 파이썬, C++ 여러 언어를 지원해 무겁기 때문에 속도를 올리기 위한 컨버팅 작업에는 적절하지 않음 15 pickletools라는 라이브러리가 있지만 에러가 많고, 자바, 파이썬, C++ 여러 언어를 지원해 무겁기 때문에 속도를 올리기 위한 컨버팅 작업에는 적절하지 않음
...@@ -20,28 +17,23 @@ pickletools라는 라이브러리가 있지만 에러가 많고, 자바, 파이썬, C++ 여러 언어를 ...@@ -20,28 +17,23 @@ pickletools라는 라이브러리가 있지만 에러가 많고, 자바, 파이썬, C++ 여러 언어를
20 17
21 18
22 5. params.pkl->params.txt 19 5. params.pkl->params.txt
23 -
24 입력처리할 때 csv파일로 읽으면 속도가 느림 20 입력처리할 때 csv파일로 읽으면 속도가 느림
25 이 또한 속도를 올리기 위한 컨버팅 작업의 목적에 맞지 않기 때문에 params.pkl 파일을 csv 파일이 아닌 txt 파일로 바꿈 21 이 또한 속도를 올리기 위한 컨버팅 작업의 목적에 맞지 않기 때문에 params.pkl 파일을 csv 파일이 아닌 txt 파일로 바꿈
26 22
27 23
28 6. python test 코드 추가 24 6. python test 코드 추가
29 -
30 test하는 부분만 골라내기 위해 python test 코드를 추가(test.py), simple_convnet 내용 추가 25 test하는 부분만 골라내기 위해 python test 코드를 추가(test.py), simple_convnet 내용 추가
31 26
32 27
33 7. python test 폴더 추가 28 7. python test 폴더 추가
34 -
35 python test 폴더에는 test에 필요하지 않은 train 부분을 삭제함 29 python test 폴더에는 test에 필요하지 않은 train 부분을 삭제함
36 30
37 31
38 8. make_img.py 추가 32 8. make_img.py 추가
39 -
40 이미지를 불러와 (32,32,3)의 크기로 resize한 후 input.txt에 저장함 33 이미지를 불러와 (32,32,3)의 크기로 resize한 후 input.txt에 저장함
41 34
42 35
43 9. simple_convnet_cpp 코드 추가 36 9. simple_convnet_cpp 코드 추가
44 -
45 1) layers.hpp : Convolution, ReLu, Normalization, Pooling, DW_Conv등 각 layer가 구현 37 1) layers.hpp : Convolution, ReLu, Normalization, Pooling, DW_Conv등 각 layer가 구현
46 2) SimpleConvNet.hpp : 딥러닝 모델이 구현 38 2) SimpleConvNet.hpp : 딥러닝 모델이 구현
47 3) input.txt : make_img.py코드로 만든 이미지를 (32,32,3)의 크기로 만들어 txt파일로 저장 39 3) input.txt : make_img.py코드로 만든 이미지를 (32,32,3)의 크기로 만들어 txt파일로 저장
...@@ -53,12 +45,17 @@ python test 폴더에는 test에 필요하지 않은 train 부분을 삭제함 ...@@ -53,12 +45,17 @@ python test 폴더에는 test에 필요하지 않은 train 부분을 삭제함
53 45
54 46
55 10. google_image_crwaling 코드 추가 47 10. google_image_crwaling 코드 추가
56 -
57 필요한 데이터셋을 만들기 위한 google_image_crwaling 코드 추가 48 필요한 데이터셋을 만들기 위한 google_image_crwaling 코드 추가
58 49
59 50
60 11. chrome_crwaling 확장프로그램 추가 51 11. chrome_crwaling 확장프로그램 추가
61 -
62 앞서 만든 코드는 1회에 20장의 사진만 다운로드 가능 52 앞서 만든 코드는 1회에 20장의 사진만 다운로드 가능
63 데이터셋을 만들기 위해서는 훨씬 더 방대한 양의 데이터가 필요함 53 데이터셋을 만들기 위해서는 훨씬 더 방대한 양의 데이터가 필요함
64 -googel chrome의 확장프로그램을 활용한 구글 이미지 검색 결과 크롤링 프로그램 구현
...\ No newline at end of file ...\ No newline at end of file
54 +googel chrome의 확장프로그램을 활용한 구글 이미지 검색 결과 크롤링 프로그램 구현
55 +
56 +
57 +12. simple_convnet_rasberryPi 코드 추가
58 +라즈베리파이에서 구동되는 코드 추가
59 +make_img.py에서는 라즈베리파이에 연결된 카메라가 촬영을 하면 그 사진을 32*32*3 사이즈로 resize해준 후 input.txt파일로 변환해줌
60 +main.cpp에서 convnet 코드 실행
61 +predict.sh 쉘파일로 전체 실행
...\ No newline at end of file ...\ No newline at end of file
......
1 +1. g++ -std=c++11 main.cpp-> a.out
2 +
3 +2. ./predict.sh
4 +* ߰ : chmod +x ./predict.sh
...\ No newline at end of file ...\ No newline at end of file
1 +#ifndef LAYERS_
2 +#define LAYERS_
3 +#include<vector>
4 +#include<cmath>
5 +// 3D Matrix
6 +struct Mat {
7 + int dim, row, col;
8 + std::vector< double > mat;
9 + Mat(int dim, int row, int col, std::vector< double > v) : dim(dim), row(row), col(col), mat(v) {};
10 +};
11 +
12 +// Mat 일차원으로 계산 + dimension 변수
13 +class Layer {
14 +public:
15 + virtual std::vector<Mat> forward(std::vector<Mat> &x) { return std::vector<Mat>(); }
16 +};
17 +
18 +// padding
19 +class Convolution : public Layer {
20 +private:
21 + std::vector<Mat> W;
22 + int stride, pad;
23 +public:
24 + Convolution() {};
25 + ~Convolution() {};
26 + Convolution(std::vector<Mat> W, int stride=1, int pad=0) : W(W), stride(stride), pad(pad) {};
27 +
28 + // Each image x conv with each w
29 + virtual std::vector<Mat> forward(std::vector<Mat> &x) {
30 + std::vector< Mat > out;
31 + int n, nw;
32 + for (n = 0; n < x.size(); n++) {
33 + std::vector<double>rev;
34 + for (nw = 0; nw < W.size(); nw++) {
35 + auto e = Convolution_(x[n], W[nw]);
36 + rev.insert(rev.end(), e.begin(), e.end());
37 + }
38 + int out_r = (x[n].row + 2 * pad - W[0].row) / stride + 1;
39 + int out_c = (x[n].col + 2 * pad - W[0].col) / stride + 1;
40 + out.push_back(Mat(nw, out_r, out_c, rev));
41 + }
42 + return out;
43 + }
44 +
45 + // Convolution x and W (both are 3-D Mat)
46 + std::vector<double> Convolution_(const Mat& x,const Mat& w) {
47 + std::vector<double> ret;
48 + int ndim = x.dim - w.dim + 1;
49 + for (int d = 0; d < x.dim - w.dim + 1; d++) {
50 + for (int r = -pad; r < x.row - w.row + 1 + pad; r++) {
51 + for (int c = -pad; c < x.col - w.col +1 +pad; c++) {
52 + ret.push_back(Convolution_(x, w, d, r, c));
53 + }
54 + }
55 + }
56 + return ret;
57 + }
58 +
59 + double Convolution_(const Mat& x, const Mat& w, int d, int r,int c) {
60 + double ret = 0, xx=0;
61 + int ds = w.col * w.row, rs = w.col;
62 + int dxs = x.col * x.row, rxs = x.col;
63 + for (int dd = 0; dd < w.dim; dd++) {
64 + for (int rr = 0; rr < w.row; rr++) {
65 + for (int cc = 0; cc < w.col; cc++) {
66 + if ((pad > 0) && (r + rr < 0 || c + cc < 0 || r + rr >= x.row || c + cc >= x.col))
67 + xx = 0;
68 + else
69 + xx = x.mat[(d + dd)*(dxs)+(r + rr)*rxs + (c + cc)];
70 + ret += xx * w.mat[dd*(ds)+rr*(rs)+cc];
71 + }
72 + }
73 + }
74 + return ret;
75 + }
76 +};
77 +
78 +// Depthwise Conv
79 +class DW_Convolution : public Layer {
80 +private:
81 + std::vector<Mat> W;
82 + int stride, pad;
83 +public:
84 + DW_Convolution() {};
85 + ~DW_Convolution() {};
86 + DW_Convolution(std::vector<Mat> W, int stride=1, int pad=0) : W(W), stride(stride), pad(pad) {};
87 +
88 + virtual std::vector<Mat> forward(std::vector<Mat> &x) {
89 + std::vector<Mat> out;
90 + int n, d;
91 + for (n = 0; n < x.size(); n++) {
92 + // Each dimension Conv with each filter
93 + std::vector<double> rev;
94 + for (d = 0; d < x[n].dim; d++) {
95 + std::vector<double> e = Convolution_(x[n], W[d], d);
96 + rev.insert(rev.end(), e.begin(), e.end());
97 + }
98 + int out_r = (x[n].row + 2 * pad - W[0].row) / stride + 1;
99 + int out_c = (x[n].col + 2 * pad - W[0].col) / stride + 1;
100 + out.push_back(Mat(d, out_r, out_c, rev));
101 + }
102 + return out;
103 + }
104 +
105 + std::vector<double> Convolution_(const Mat& x, const Mat& w, int d) {
106 + std::vector<double> out;
107 + int dd = d * x.col * x.row;
108 + for (int r = -pad; r < x.row - w.row + 1 + pad; r++) { // r+=stride
109 + for (int c = -pad; c < x.col - w.col + 1 + pad; c++) {
110 + out.push_back(Convolution_(x, w, dd, r, c));
111 + }
112 + }
113 + return out;
114 + }
115 +
116 + double Convolution_(const Mat& x, const Mat& w, int dd, int r, int c) {
117 + double ret = 0, xx=0;
118 + for (int rr = 0; rr < w.row; rr++) {
119 + for (int cc = 0; cc < w.col; cc++) {
120 + if ((pad > 0) && (r + rr < 0 || c + cc < 0 || r + rr >= x.row || c + cc >= x.col))
121 + xx = 0;
122 + else
123 + xx = x.mat[dd + (r + rr)*x.col + (c+cc)];
124 + ret += xx * w.mat[rr*w.col + cc];
125 + }
126 + }
127 + return ret;
128 + }
129 +};
130 +
131 +// n개의 이미지 같은 위치의 Row, col 값을 Normalization
132 +class LightNormalization : public Layer{
133 +public:
134 + virtual std::vector<Mat> forward(std::vector<Mat>& x) {
135 + std::vector<Mat> out;
136 + int dim = x[0].dim, row = x[0].row, col = x[0].col, nx = x.size();
137 + int ds = row*col;
138 + for (int d = 0; d < dim; d++) {
139 + double mu = 0, var=0, std, tmp; // mu : mean of x img each dim
140 + for (int r = 0; r < row; r++)
141 + for (int c = 0; c < col; c++)
142 + for (int n = 0; n < nx; n++)
143 + mu += x[n].mat[d*ds + r*col + c];
144 +
145 + mu = mu / (double)(row*col*nx);
146 +
147 + for (int r = 0; r < row; r++)
148 + for (int c = 0; c < col; c++)
149 + for (int n = 0; n < nx; n++) {
150 + tmp = x[n].mat[d*ds + r*col + c] - mu;
151 + var += (tmp*tmp);
152 + }
153 +
154 + var = var / (double)(row*col*nx);
155 + std = sqrt(var+10e-7);
156 + for (int r = 0; r < row; r++)
157 + for (int c = 0; c < col; c++)
158 + for (int n = 0; n < nx; n++)
159 + x[n].mat[d*ds + r*col + c] = (x[n].mat[d*ds + r*col + c] - mu) / std;
160 + }
161 + return x;
162 + }
163 +};
164 +
165 +class Relu : public Layer {
166 +public:
167 + virtual std::vector<Mat> forward(std::vector<Mat> &x) {
168 + int nx = x.size(), nm = x[0].dim * x[0].row * x[0].col;
169 + for (int n = 0; n < nx; n++)
170 + for (int i = 0; i < nm; i++)
171 + if (x[n].mat[i] < 0)
172 + x[n].mat[i] = 0;
173 + return x;
174 + }
175 +};
176 +
177 +class Pooling : public Layer{
178 +private:
179 + int pool_h, pool_w, stride, pad;
180 +public:
181 + Pooling() { pad = 0; };
182 + ~Pooling() {};
183 + Pooling(int pool_h, int pool_w, int stride=1, int pad=0) :pool_h(pool_h), pool_w(pool_w), stride(stride), pad(pad) {};
184 +
185 + virtual std::vector<Mat> forward(std::vector<Mat>& x) {
186 + std::vector<Mat> out;
187 + int n, d, nx = x.size();
188 + for (n = 0; n < nx; n++) {
189 + std::vector<double> rev;
190 + for (d = 0; d < x[n].dim; d++) {
191 + std::vector<double> e = MaxPooling_(x[n], d);
192 + rev.insert(rev.end(), e.begin(), e.end());
193 + }
194 + int out_h = (x[n].row + 2 * pad - pool_h) / stride + 1;
195 + int out_w = (x[n].col + 2 * pad - pool_w) / stride + 1;
196 + out.push_back(Mat(d, out_h, out_w, rev));
197 + }
198 + return out;
199 + }
200 +
201 + // Pooling each image
202 + std::vector<double> MaxPooling_(Mat& x, int d) {
203 + std::vector<double> out;
204 + int row = x.row, col = x.col;
205 + int dd = d * col * row;
206 + for (int r = -pad; r < row - pool_h + 1 + pad; r+=stride) {
207 + for (int c = -pad; c < col - pool_w + 1 + pad; c+=stride) {
208 + out.push_back(MaxPooling_(x, dd, r, c));
209 + }
210 + }
211 + return out;
212 + }
213 +
214 + // Pooling pool_w * pool_h
215 + double MaxPooling_(Mat& x, int dd, int r, int c) {
216 + double ret = 0, xx = 0;
217 + for (int rr = 0; rr < pool_h; rr++) {
218 + for (int cc = 0; cc < pool_w; cc++) {
219 + if ((pad > 0) && (r + rr < 0 || c + cc < 0 || r + rr >= x.row || c + cc >= x.col))
220 + xx = 0;
221 + else
222 + xx = x.mat[dd + (r + rr)*x.col + (c + cc)];
223 + if(ret < xx)
224 + ret = xx;
225 + }
226 + }
227 + return ret;
228 + }
229 +};
230 +
231 +class Affine : public Layer{
232 +private:
233 + std::vector<Mat> W;
234 +public:
235 + Affine() {}
236 + ~Affine() {}
237 + Affine(std::vector<Mat>& W) : W(W){}
238 +
239 + virtual std::vector<Mat> forward(std::vector<Mat>& x) {
240 + std::vector<Mat> out;
241 + int nx = x.size();
242 + for (int n = 0; n < nx; n++) {
243 + Mat e = Dot_(x[n]);
244 + out.push_back(e);
245 + }
246 + return out;
247 + }
248 +
249 + Mat Dot_(const Mat& x) {
250 + int dim = W[0].dim, row = W[0].row, col = W[0].col, nw = W.size();
251 + int size = dim*row*col;
252 + std::vector<double> ret(col);
253 +
254 + for (int c = 0; c < col; c++) {
255 + for (int n = 0; n < nw; n++) {
256 + ret[c] += W[n].mat[c] * x.mat[n];
257 + }
258 +
259 + }
260 + return Mat(col, 1, 1, ret);
261 + }
262 +};
263 +#endif
1 +#ifndef SIMPLECONV_
2 +#define SIMPLECONV_
3 +#include"Layers.hpp"
4 +#include<iostream>
5 +#include<cstdio>
6 +#include<string.h>
7 +#include<stdlib.h>
8 +struct input_dim {
9 + int d1, d2, d3;
10 + input_dim(int d1, int d2, int d3) :d1(d1), d2(d2), d3(d3) {};
11 +};
12 +
13 +struct conv_param {
14 + int fn1, fn2, fn3;
15 + int filtersize, pad, stride;
16 + conv_param(int ftnum1, int ftnum2, int ftnum3, int ftsize, int pad, int stride) :fn1(ftnum1),
17 + fn2(ftnum2), fn3(ftnum3), filtersize(ftsize), pad(pad), stride(stride) {};
18 +};
19 +
20 +class SimpleConvNet {
21 +private:
22 + std::vector< Layer* > layers;
23 +
24 + std::vector<Mat> W[7]; // weights
25 + std::vector<int> shape[7]; // shape of each weights
26 +public:
27 + SimpleConvNet() {}
28 + ~SimpleConvNet() {}
29 + SimpleConvNet(input_dim id, conv_param cp, int hidden_size=512, int output_size=10, bool pretrained=true) {
30 + if (pretrained)
31 + load_trained("params.txt");
32 +
33 + layers.push_back(new Convolution(W[0], 1, 1));
34 + layers.push_back(new LightNormalization());
35 + layers.push_back(new Relu());
36 + layers.push_back(new Pooling(2, 2, 2));
37 +
38 + layers.push_back(new Convolution(W[1], 1, 0));
39 + layers.push_back(new LightNormalization());
40 + layers.push_back(new Relu());
41 +
42 + layers.push_back(new DW_Convolution(W[2], 1, 1));
43 + layers.push_back(new LightNormalization());
44 + layers.push_back(new Relu());
45 + layers.push_back(new Pooling(2, 2, 2));
46 +
47 + layers.push_back(new Convolution(W[3], 1, 0));
48 + layers.push_back(new LightNormalization());
49 + layers.push_back(new Relu());
50 +
51 + layers.push_back(new DW_Convolution(W[4], 1, 1));
52 + layers.push_back(new LightNormalization());
53 + layers.push_back(new Relu());
54 + layers.push_back(new Pooling(2, 2, 2));
55 +
56 + layers.push_back(new Affine(W[5]));
57 + layers.push_back(new LightNormalization());
58 + layers.push_back(new Relu());
59 +
60 + layers.push_back(new Affine(W[6]));
61 + }
62 +
63 + std::vector< Mat > predict(std::vector<Mat>& x) {
64 + for (int i = 0; i < layers.size(); i++) {
65 + x = layers[i]->forward(x);
66 + }
67 + return x;
68 + }
69 +
70 + double accuracy(std::vector< std::vector< unsigned char > > x, std::vector< int > ans, int batch_size=100) {
71 + return 1.0;
72 + }
73 +
74 + std::vector<int> argmax(std::vector< Mat >& x) {
75 + std::vector<int> pred;
76 + for (int n = 0; n < x.size(); n++) {
77 + int pid = 0, pos;
78 + double pval = -1e9;
79 + for (int i = 0; i < x[n].mat.size(); i++) {
80 + if (pval < x[n].mat[i]) {
81 + pval = x[n].mat[i];
82 + pid = i;
83 + }
84 + }
85 + pred.push_back(pid);
86 + }
87 + return pred;
88 + }
89 +
90 + void load_trained(const char* filename="params.txt") {
91 + FILE *f = fopen(filename, "r");
92 + if (f == NULL) {
93 + printf("File not found\n");
94 + exit(1);
95 + }
96 + char line[10] = { 0 };
97 + int keynum;
98 + while (fscanf(f, "%s", line)==1) {
99 + char s[4][10] = { 0 };
100 + keynum = line[1] - '0' - 1;
101 +
102 + // get shape
103 + fscanf(f, "%s", s[0]);
104 + fscanf(f, "%s", s[1]);
105 + if (s[1][strlen(s[1]) - 1] != '\"') {
106 + fscanf(f, "%s", s[2]);
107 + fscanf(f, "%s", s[3]);
108 + }
109 +
110 + // nw = number of weights : shape[0]
111 + // size = input size of W[key]
112 + int size = 1, nw=0;
113 + for (int i = 0; i < 4; i++) {
114 + int val = 0;
115 + for (int j = 0; j < strlen(s[i]); j++) {
116 + if ('0' <= s[i][j] && s[i][j] <= '9') {
117 + val = 10 * val + (s[i][j] - '0');
118 + }
119 + }
120 + if (val) {
121 + shape[keynum].push_back(val);
122 + size *= val;
123 + if (nw == 0)
124 + nw = val;
125 + }
126 + }
127 + // Read data of W[key]
128 + int fsize = size / nw;
129 + double *mm = new double[fsize];
130 + for (int i = 0; i < size; i++) {
131 + fscanf(f, "%lf", &mm[i%fsize]);
132 + if (i%fsize == fsize - 1) {
133 + if(shape[keynum].size() == 2)
134 + W[keynum].push_back(Mat(1, 1, shape[keynum][1], std::vector<double>(mm, mm + fsize)));
135 + else if(shape[keynum].size() == 4)
136 + W[keynum].push_back(Mat(shape[keynum][1], shape[keynum][2],
137 + shape[keynum][3], std::vector<double>(mm, mm + fsize)));
138 + }
139 + }
140 + }
141 + printf("Trained weights loading done\n");
142 + }
143 +};
144 +#endif
1 +59
2 +59
3 +59
4 +59
5 +60
6 +60
7 +60
8 +62
9 +62
10 +62
11 +63
12 +63
13 +62
14 +61
15 +61
16 +59
17 +57
18 +56
19 +55
20 +54
21 +52
22 +51
23 +48
24 +46
25 +42
26 +39
27 +36
28 +32
29 +31
30 +28
31 +25
32 +22
33 +56
34 +57
35 +58
36 +58
37 +59
38 +59
39 +59
40 +61
41 +61
42 +62
43 +63
44 +62
45 +61
46 +61
47 +61
48 +59
49 +57
50 +56
51 +56
52 +55
53 +53
54 +51
55 +49
56 +46
57 +42
58 +39
59 +36
60 +33
61 +31
62 +28
63 +25
64 +21
65 +54
66 +55
67 +56
68 +56
69 +57
70 +58
71 +59
72 +60
73 +60
74 +61
75 +62
76 +61
77 +61
78 +61
79 +60
80 +60
81 +58
82 +57
83 +56
84 +55
85 +53
86 +51
87 +49
88 +46
89 +42
90 +39
91 +35
92 +32
93 +30
94 +29
95 +25
96 +21
97 +53
98 +55
99 +56
100 +56
101 +56
102 +56
103 +58
104 +59
105 +60
106 +60
107 +61
108 +61
109 +62
110 +61
111 +60
112 +59
113 +58
114 +58
115 +57
116 +55
117 +53
118 +51
119 +49
120 +47
121 +43
122 +40
123 +36
124 +32
125 +30
126 +28
127 +24
128 +20
129 +53
130 +55
131 +55
132 +55
133 +55
134 +56
135 +57
136 +58
137 +60
138 +60
139 +61
140 +61
141 +61
142 +61
143 +61
144 +59
145 +59
146 +58
147 +57
148 +56
149 +54
150 +52
151 +49
152 +47
153 +43
154 +40
155 +37
156 +34
157 +31
158 +28
159 +25
160 +21
161 +51
162 +53
163 +54
164 +54
165 +55
166 +55
167 +56
168 +57
169 +59
170 +59
171 +60
172 +61
173 +60
174 +61
175 +60
176 +60
177 +59
178 +59
179 +57
180 +56
181 +54
182 +52
183 +49
184 +46
185 +43
186 +40
187 +36
188 +33
189 +30
190 +27
191 +24
192 +21
193 +50
194 +52
195 +53
196 +53
197 +54
198 +55
199 +56
200 +57
201 +59
202 +59
203 +60
204 +60
205 +60
206 +60
207 +61
208 +60
209 +59
210 +59
211 +57
212 +56
213 +54
214 +52
215 +49
216 +47
217 +43
218 +40
219 +36
220 +33
221 +29
222 +26
223 +23
224 +20
225 +48
226 +51
227 +52
228 +53
229 +54
230 +55
231 +56
232 +57
233 +58
234 +60
235 +61
236 +61
237 +61
238 +61
239 +61
240 +60
241 +59
242 +58
243 +57
244 +56
245 +54
246 +51
247 +49
248 +47
249 +44
250 +40
251 +36
252 +34
253 +30
254 +27
255 +23
256 +19
257 +47
258 +50
259 +51
260 +52
261 +54
262 +55
263 +56
264 +57
265 +58
266 +60
267 +60
268 +60
269 +61
270 +61
271 +61
272 +61
273 +59
274 +59
275 +58
276 +56
277 +54
278 +51
279 +49
280 +47
281 +44
282 +41
283 +37
284 +34
285 +30
286 +27
287 +23
288 +20
289 +46
290 +49
291 +51
292 +52
293 +54
294 +55
295 +56
296 +57
297 +58
298 +59
299 +60
300 +60
301 +61
302 +61
303 +61
304 +60
305 +59
306 +59
307 +57
308 +55
309 +53
310 +51
311 +49
312 +47
313 +44
314 +41
315 +38
316 +33
317 +30
318 +26
319 +23
320 +20
321 +45
322 +48
323 +51
324 +51
325 +53
326 +55
327 +56
328 +57
329 +57
330 +59
331 +59
332 +60
333 +61
334 +60
335 +60
336 +59
337 +58
338 +58
339 +57
340 +55
341 +53
342 +51
343 +49
344 +46
345 +43
346 +40
347 +37
348 +33
349 +30
350 +26
351 +22
352 +20
353 +44
354 +47
355 +49
356 +51
357 +52
358 +54
359 +55
360 +56
361 +57
362 +58
363 +58
364 +59
365 +59
366 +59
367 +59
368 +59
369 +58
370 +58
371 +56
372 +54
373 +52
374 +50
375 +47
376 +45
377 +42
378 +40
379 +36
380 +32
381 +29
382 +25
383 +22
384 +19
385 +44
386 +47
387 +49
388 +51
389 +52
390 +53
391 +55
392 +56
393 +57
394 +57
395 +57
396 +58
397 +59
398 +59
399 +59
400 +58
401 +58
402 +58
403 +56
404 +54
405 +51
406 +50
407 +47
408 +44
409 +42
410 +39
411 +35
412 +32
413 +29
414 +25
415 +21
416 +18
417 +43
418 +46
419 +49
420 +50
421 +52
422 +53
423 +55
424 +56
425 +56
426 +57
427 +57
428 +58
429 +58
430 +58
431 +59
432 +58
433 +57
434 +56
435 +55
436 +53
437 +51
438 +50
439 +48
440 +45
441 +42
442 +38
443 +35
444 +32
445 +28
446 +25
447 +22
448 +18
449 +42
450 +46
451 +48
452 +50
453 +51
454 +53
455 +54
456 +54
457 +55
458 +56
459 +57
460 +57
461 +58
462 +58
463 +57
464 +57
465 +56
466 +56
467 +54
468 +53
469 +51
470 +49
471 +47
472 +44
473 +42
474 +37
475 +35
476 +31
477 +28
478 +25
479 +21
480 +18
481 +41
482 +45
483 +48
484 +49
485 +50
486 +51
487 +53
488 +54
489 +55
490 +55
491 +57
492 +57
493 +57
494 +57
495 +56
496 +57
497 +56
498 +55
499 +54
500 +52
501 +51
502 +49
503 +47
504 +44
505 +41
506 +38
507 +34
508 +31
509 +28
510 +24
511 +21
512 +17
513 +39
514 +43
515 +46
516 +48
517 +49
518 +50
519 +53
520 +53
521 +54
522 +55
523 +55
524 +55
525 +56
526 +56
527 +56
528 +56
529 +55
530 +53
531 +52
532 +52
533 +50
534 +47
535 +46
536 +43
537 +41
538 +38
539 +34
540 +31
541 +27
542 +24
543 +20
544 +17
545 +39
546 +42
547 +45
548 +47
549 +49
550 +50
551 +52
552 +53
553 +54
554 +54
555 +55
556 +55
557 +55
558 +55
559 +55
560 +55
561 +54
562 +53
563 +53
564 +52
565 +49
566 +48
567 +46
568 +43
569 +41
570 +37
571 +34
572 +31
573 +27
574 +23
575 +19
576 +16
577 +38
578 +42
579 +44
580 +46
581 +49
582 +50
583 +51
584 +52
585 +54
586 +55
587 +55
588 +55
589 +55
590 +55
591 +54
592 +54
593 +54
594 +53
595 +52
596 +51
597 +49
598 +48
599 +46
600 +44
601 +41
602 +37
603 +34
604 +31
605 +27
606 +24
607 +19
608 +16
609 +36
610 +40
611 +43
612 +45
613 +48
614 +49
615 +50
616 +51
617 +52
618 +54
619 +54
620 +54
621 +53
622 +53
623 +54
624 +53
625 +53
626 +52
627 +51
628 +49
629 +48
630 +46
631 +45
632 +44
633 +41
634 +37
635 +34
636 +30
637 +27
638 +23
639 +19
640 +16
641 +35
642 +39
643 +41
644 +44
645 +46
646 +47
647 +49
648 +50
649 +52
650 +52
651 +52
652 +53
653 +52
654 +52
655 +53
656 +53
657 +51
658 +51
659 +50
660 +49
661 +47
662 +45
663 +45
664 +43
665 +40
666 +36
667 +34
668 +30
669 +26
670 +22
671 +18
672 +15
673 +34
674 +37
675 +39
676 +42
677 +44
678 +46
679 +48
680 +50
681 +51
682 +51
683 +51
684 +51
685 +51
686 +51
687 +52
688 +52
689 +50
690 +50
691 +49
692 +47
693 +46
694 +45
695 +44
696 +42
697 +38
698 +35
699 +33
700 +28
701 +25
702 +22
703 +18
704 +15
705 +33
706 +36
707 +38
708 +41
709 +43
710 +44
711 +46
712 +48
713 +49
714 +50
715 +50
716 +50
717 +50
718 +51
719 +50
720 +50
721 +50
722 +48
723 +47
724 +46
725 +45
726 +44
727 +42
728 +40
729 +37
730 +35
731 +32
732 +28
733 +25
734 +21
735 +18
736 +15
737 +32
738 +34
739 +37
740 +39
741 +42
742 +43
743 +44
744 +46
745 +47
746 +48
747 +49
748 +49
749 +49
750 +49
751 +49
752 +49
753 +48
754 +47
755 +46
756 +45
757 +44
758 +42
759 +41
760 +38
761 +37
762 +33
763 +30
764 +27
765 +24
766 +21
767 +18
768 +15
769 +30
770 +33
771 +35
772 +37
773 +40
774 +42
775 +43
776 +45
777 +46
778 +47
779 +47
780 +47
781 +48
782 +48
783 +48
784 +47
785 +47
786 +46
787 +45
788 +44
789 +43
790 +41
791 +39
792 +38
793 +36
794 +32
795 +29
796 +26
797 +23
798 +20
799 +17
800 +14
801 +28
802 +31
803 +34
804 +35
805 +37
806 +40
807 +42
808 +43
809 +44
810 +45
811 +45
812 +45
813 +45
814 +46
815 +45
816 +46
817 +46
818 +44
819 +44
820 +42
821 +42
822 +39
823 +38
824 +37
825 +34
826 +31
827 +28
828 +25
829 +22
830 +18
831 +16
832 +13
833 +26
834 +29
835 +32
836 +34
837 +36
838 +38
839 +40
840 +41
841 +41
842 +43
843 +44
844 +44
845 +44
846 +44
847 +44
848 +43
849 +44
850 +42
851 +42
852 +41
853 +40
854 +38
855 +37
856 +35
857 +33
858 +30
859 +27
860 +23
861 +21
862 +18
863 +16
864 +12
865 +25
866 +27
867 +30
868 +32
869 +34
870 +36
871 +38
872 +39
873 +40
874 +41
875 +42
876 +43
877 +43
878 +43
879 +42
880 +42
881 +42
882 +41
883 +40
884 +40
885 +38
886 +37
887 +35
888 +34
889 +32
890 +28
891 +25
892 +22
893 +20
894 +17
895 +14
896 +12
897 +25
898 +27
899 +29
900 +32
901 +34
902 +35
903 +37
904 +37
905 +39
906 +39
907 +41
908 +41
909 +41
910 +41
911 +40
912 +40
913 +41
914 +40
915 +39
916 +39
917 +38
918 +36
919 +35
920 +33
921 +31
922 +28
923 +25
924 +23
925 +19
926 +17
927 +15
928 +12
929 +23
930 +26
931 +28
932 +30
933 +32
934 +34
935 +35
936 +36
937 +36
938 +38
939 +38
940 +39
941 +39
942 +39
943 +39
944 +39
945 +39
946 +38
947 +38
948 +37
949 +36
950 +35
951 +33
952 +32
953 +29
954 +26
955 +24
956 +20
957 +18
958 +16
959 +14
960 +11
961 +23
962 +25
963 +27
964 +28
965 +30
966 +32
967 +33
968 +33
969 +35
970 +36
971 +37
972 +38
973 +38
974 +38
975 +38
976 +38
977 +37
978 +38
979 +37
980 +36
981 +35
982 +33
983 +32
984 +30
985 +27
986 +24
987 +22
988 +19
989 +18
990 +15
991 +12
992 +10
993 +23
994 +26
995 +26
996 +28
997 +29
998 +31
999 +32
1000 +33
1001 +34
1002 +35
1003 +36
1004 +37
1005 +37
1006 +38
1007 +37
1008 +37
1009 +37
1010 +36
1011 +35
1012 +35
1013 +34
1014 +32
1015 +31
1016 +29
1017 +26
1018 +24
1019 +22
1020 +19
1021 +18
1022 +15
1023 +12
1024 +10
1025 +46
1026 +46
1027 +47
1028 +47
1029 +47
1030 +46
1031 +47
1032 +46
1033 +47
1034 +47
1035 +46
1036 +46
1037 +47
1038 +47
1039 +47
1040 +47
1041 +47
1042 +47
1043 +48
1044 +49
1045 +49
1046 +50
1047 +50
1048 +50
1049 +51
1050 +51
1051 +52
1052 +52
1053 +53
1054 +53
1055 +53
1056 +53
1057 +46
1058 +46
1059 +47
1060 +47
1061 +47
1062 +46
1063 +46
1064 +46
1065 +46
1066 +46
1067 +46
1068 +46
1069 +46
1070 +46
1071 +46
1072 +47
1073 +47
1074 +47
1075 +48
1076 +48
1077 +49
1078 +50
1079 +50
1080 +51
1081 +51
1082 +51
1083 +51
1084 +52
1085 +53
1086 +54
1087 +53
1088 +53
1089 +47
1090 +47
1091 +47
1092 +47
1093 +47
1094 +46
1095 +46
1096 +46
1097 +46
1098 +47
1099 +46
1100 +46
1101 +46
1102 +47
1103 +47
1104 +47
1105 +47
1106 +48
1107 +48
1108 +49
1109 +49
1110 +50
1111 +50
1112 +51
1113 +51
1114 +51
1115 +52
1116 +53
1117 +53
1118 +54
1119 +54
1120 +54
1121 +47
1122 +47
1123 +48
1124 +47
1125 +47
1126 +47
1127 +47
1128 +47
1129 +47
1130 +47
1131 +47
1132 +47
1133 +47
1134 +47
1135 +47
1136 +48
1137 +48
1138 +48
1139 +49
1140 +49
1141 +50
1142 +50
1143 +51
1144 +51
1145 +52
1146 +52
1147 +52
1148 +53
1149 +54
1150 +55
1151 +55
1152 +55
1153 +47
1154 +48
1155 +48
1156 +48
1157 +47
1158 +47
1159 +47
1160 +47
1161 +47
1162 +47
1163 +47
1164 +47
1165 +47
1166 +47
1167 +47
1168 +48
1169 +48
1170 +48
1171 +49
1172 +49
1173 +50
1174 +50
1175 +51
1176 +52
1177 +52
1178 +52
1179 +53
1180 +53
1181 +54
1182 +55
1183 +55
1184 +56
1185 +47
1186 +48
1187 +48
1188 +48
1189 +47
1190 +47
1191 +47
1192 +47
1193 +47
1194 +47
1195 +47
1196 +46
1197 +46
1198 +46
1199 +47
1200 +48
1201 +48
1202 +49
1203 +49
1204 +49
1205 +50
1206 +50
1207 +51
1208 +51
1209 +52
1210 +52
1211 +53
1212 +53
1213 +54
1214 +55
1215 +55
1216 +55
1217 +48
1218 +48
1219 +48
1220 +47
1221 +47
1222 +47
1223 +46
1224 +47
1225 +47
1226 +47
1227 +46
1228 +46
1229 +46
1230 +47
1231 +47
1232 +47
1233 +48
1234 +49
1235 +49
1236 +49
1237 +50
1238 +50
1239 +50
1240 +51
1241 +52
1242 +52
1243 +53
1244 +53
1245 +54
1246 +55
1247 +56
1248 +55
1249 +48
1250 +48
1251 +48
1252 +48
1253 +47
1254 +47
1255 +47
1256 +47
1257 +47
1258 +47
1259 +47
1260 +47
1261 +47
1262 +47
1263 +48
1264 +48
1265 +49
1266 +49
1267 +50
1268 +50
1269 +50
1270 +51
1271 +51
1272 +52
1273 +52
1274 +53
1275 +53
1276 +53
1277 +54
1278 +55
1279 +56
1280 +56
1281 +49
1282 +49
1283 +48
1284 +48
1285 +48
1286 +48
1287 +48
1288 +47
1289 +48
1290 +48
1291 +48
1292 +48
1293 +48
1294 +48
1295 +49
1296 +49
1297 +49
1298 +51
1299 +51
1300 +51
1301 +51
1302 +51
1303 +52
1304 +52
1305 +53
1306 +53
1307 +54
1308 +54
1309 +55
1310 +56
1311 +56
1312 +57
1313 +49
1314 +49
1315 +49
1316 +49
1317 +49
1318 +49
1319 +48
1320 +48
1321 +48
1322 +48
1323 +48
1324 +48
1325 +49
1326 +49
1327 +49
1328 +50
1329 +50
1330 +51
1331 +51
1332 +52
1333 +52
1334 +52
1335 +52
1336 +53
1337 +53
1338 +53
1339 +54
1340 +55
1341 +55
1342 +56
1343 +57
1344 +57
1345 +49
1346 +49
1347 +49
1348 +48
1349 +48
1350 +49
1351 +48
1352 +48
1353 +48
1354 +48
1355 +49
1356 +49
1357 +49
1358 +49
1359 +50
1360 +50
1361 +51
1362 +51
1363 +52
1364 +52
1365 +53
1366 +52
1367 +52
1368 +53
1369 +53
1370 +53
1371 +54
1372 +55
1373 +55
1374 +56
1375 +57
1376 +57
1377 +49
1378 +49
1379 +49
1380 +49
1381 +48
1382 +49
1383 +49
1384 +48
1385 +48
1386 +48
1387 +49
1388 +49
1389 +49
1390 +50
1391 +50
1392 +51
1393 +51
1394 +51
1395 +52
1396 +53
1397 +52
1398 +52
1399 +52
1400 +53
1401 +53
1402 +53
1403 +54
1404 +55
1405 +56
1406 +56
1407 +57
1408 +57
1409 +49
1410 +49
1411 +49
1412 +49
1413 +49
1414 +49
1415 +49
1416 +49
1417 +49
1418 +49
1419 +50
1420 +50
1421 +50
1422 +51
1423 +51
1424 +52
1425 +52
1426 +52
1427 +53
1428 +53
1429 +53
1430 +53
1431 +53
1432 +53
1433 +53
1434 +54
1435 +54
1436 +55
1437 +56
1438 +56
1439 +57
1440 +58
1441 +50
1442 +50
1443 +50
1444 +50
1445 +50
1446 +50
1447 +50
1448 +50
1449 +50
1450 +50
1451 +51
1452 +51
1453 +51
1454 +52
1455 +53
1456 +53
1457 +53
1458 +53
1459 +54
1460 +54
1461 +54
1462 +54
1463 +54
1464 +54
1465 +54
1466 +55
1467 +55
1468 +56
1469 +57
1470 +58
1471 +58
1472 +59
1473 +51
1474 +51
1475 +51
1476 +51
1477 +51
1478 +50
1479 +51
1480 +51
1481 +51
1482 +51
1483 +51
1484 +51
1485 +52
1486 +52
1487 +53
1488 +54
1489 +54
1490 +55
1491 +55
1492 +55
1493 +55
1494 +55
1495 +55
1496 +55
1497 +55
1498 +55
1499 +56
1500 +57
1501 +57
1502 +58
1503 +59
1504 +59
1505 +51
1506 +51
1507 +51
1508 +51
1509 +51
1510 +51
1511 +51
1512 +51
1513 +51
1514 +51
1515 +51
1516 +51
1517 +52
1518 +52
1519 +53
1520 +54
1521 +55
1522 +55
1523 +55
1524 +56
1525 +56
1526 +55
1527 +55
1528 +55
1529 +55
1530 +55
1531 +56
1532 +57
1533 +57
1534 +58
1535 +59
1536 +59
1537 +51
1538 +51
1539 +51
1540 +51
1541 +51
1542 +51
1543 +51
1544 +51
1545 +51
1546 +52
1547 +52
1548 +52
1549 +52
1550 +53
1551 +53
1552 +54
1553 +55
1554 +55
1555 +55
1556 +56
1557 +56
1558 +55
1559 +55
1560 +55
1561 +55
1562 +55
1563 +56
1564 +57
1565 +57
1566 +58
1567 +59
1568 +59
1569 +52
1570 +52
1571 +52
1572 +51
1573 +51
1574 +52
1575 +52
1576 +51
1577 +52
1578 +52
1579 +53
1580 +52
1581 +53
1582 +54
1583 +54
1584 +55
1585 +55
1586 +55
1587 +56
1588 +56
1589 +56
1590 +56
1591 +56
1592 +56
1593 +56
1594 +56
1595 +57
1596 +58
1597 +58
1598 +59
1599 +60
1600 +60
1601 +53
1602 +53
1603 +52
1604 +52
1605 +52
1606 +52
1607 +53
1608 +53
1609 +52
1610 +52
1611 +53
1612 +53
1613 +54
1614 +54
1615 +55
1616 +55
1617 +56
1618 +57
1619 +57
1620 +57
1621 +57
1622 +57
1623 +56
1624 +57
1625 +57
1626 +57
1627 +58
1628 +58
1629 +59
1630 +59
1631 +60
1632 +60
1633 +53
1634 +53
1635 +53
1636 +53
1637 +53
1638 +53
1639 +53
1640 +53
1641 +53
1642 +53
1643 +53
1644 +54
1645 +54
1646 +55
1647 +55
1648 +56
1649 +57
1650 +57
1651 +58
1652 +57
1653 +58
1654 +57
1655 +57
1656 +57
1657 +57
1658 +58
1659 +59
1660 +59
1661 +59
1662 +60
1663 +60
1664 +61
1665 +54
1666 +54
1667 +54
1668 +53
1669 +53
1670 +53
1671 +53
1672 +53
1673 +53
1674 +54
1675 +54
1676 +54
1677 +55
1678 +55
1679 +56
1680 +56
1681 +57
1682 +57
1683 +58
1684 +58
1685 +58
1686 +57
1687 +58
1688 +58
1689 +58
1690 +58
1691 +59
1692 +59
1693 +59
1694 +60
1695 +61
1696 +62
1697 +53
1698 +54
1699 +54
1700 +54
1701 +53
1702 +53
1703 +53
1704 +54
1705 +54
1706 +53
1707 +54
1708 +54
1709 +55
1710 +55
1711 +56
1712 +56
1713 +57
1714 +58
1715 +58
1716 +58
1717 +58
1718 +58
1719 +58
1720 +58
1721 +58
1722 +59
1723 +59
1724 +59
1725 +59
1726 +60
1727 +61
1728 +62
1729 +54
1730 +54
1731 +54
1732 +54
1733 +54
1734 +54
1735 +54
1736 +54
1737 +54
1738 +54
1739 +54
1740 +54
1741 +54
1742 +55
1743 +56
1744 +57
1745 +57
1746 +58
1747 +59
1748 +58
1749 +58
1750 +59
1751 +59
1752 +59
1753 +59
1754 +59
1755 +59
1756 +60
1757 +60
1758 +61
1759 +62
1760 +63
1761 +55
1762 +56
1763 +55
1764 +55
1765 +55
1766 +55
1767 +55
1768 +56
1769 +56
1770 +55
1771 +55
1772 +55
1773 +55
1774 +56
1775 +57
1776 +58
1777 +59
1778 +59
1779 +59
1780 +59
1781 +59
1782 +59
1783 +60
1784 +60
1785 +60
1786 +60
1787 +61
1788 +61
1789 +61
1790 +62
1791 +63
1792 +64
1793 +56
1794 +56
1795 +56
1796 +56
1797 +56
1798 +56
1799 +56
1800 +56
1801 +57
1802 +56
1803 +56
1804 +56
1805 +57
1806 +57
1807 +57
1808 +58
1809 +59
1810 +59
1811 +60
1812 +59
1813 +59
1814 +60
1815 +60
1816 +61
1817 +61
1818 +61
1819 +61
1820 +61
1821 +62
1822 +63
1823 +64
1824 +66
1825 +57
1826 +57
1827 +57
1828 +57
1829 +57
1830 +56
1831 +56
1832 +57
1833 +57
1834 +57
1835 +57
1836 +57
1837 +57
1838 +57
1839 +57
1840 +58
1841 +59
1842 +59
1843 +59
1844 +60
1845 +60
1846 +60
1847 +61
1848 +61
1849 +61
1850 +61
1851 +61
1852 +62
1853 +63
1854 +64
1855 +65
1856 +66
1857 +57
1858 +57
1859 +57
1860 +57
1861 +57
1862 +57
1863 +56
1864 +57
1865 +57
1866 +57
1867 +57
1868 +57
1869 +57
1870 +58
1871 +57
1872 +58
1873 +59
1874 +59
1875 +60
1876 +60
1877 +60
1878 +61
1879 +62
1880 +62
1881 +62
1882 +61
1883 +62
1884 +62
1885 +63
1886 +64
1887 +66
1888 +67
1889 +57
1890 +58
1891 +58
1892 +58
1893 +58
1894 +58
1895 +57
1896 +57
1897 +57
1898 +58
1899 +58
1900 +58
1901 +58
1902 +58
1903 +58
1904 +59
1905 +59
1906 +60
1907 +60
1908 +60
1909 +61
1910 +62
1911 +62
1912 +62
1913 +62
1914 +62
1915 +62
1916 +63
1917 +64
1918 +65
1919 +66
1920 +68
1921 +59
1922 +59
1923 +59
1924 +59
1925 +59
1926 +58
1927 +58
1928 +59
1929 +58
1930 +59
1931 +59
1932 +59
1933 +59
1934 +59
1935 +59
1936 +59
1937 +60
1938 +61
1939 +61
1940 +62
1941 +62
1942 +63
1943 +63
1944 +63
1945 +63
1946 +63
1947 +64
1948 +64
1949 +66
1950 +67
1951 +68
1952 +69
1953 +59
1954 +60
1955 +60
1956 +60
1957 +59
1958 +59
1959 +59
1960 +59
1961 +59
1962 +59
1963 +60
1964 +59
1965 +59
1966 +60
1967 +60
1968 +61
1969 +61
1970 +62
1971 +62
1972 +63
1973 +63
1974 +64
1975 +64
1976 +64
1977 +64
1978 +64
1979 +64
1980 +66
1981 +67
1982 +68
1983 +69
1984 +71
1985 +60
1986 +61
1987 +60
1988 +60
1989 +60
1990 +60
1991 +60
1992 +60
1993 +59
1994 +60
1995 +60
1996 +60
1997 +60
1998 +61
1999 +61
2000 +62
2001 +62
2002 +62
2003 +63
2004 +63
2005 +63
2006 +64
2007 +64
2008 +64
2009 +64
2010 +65
2011 +65
2012 +66
2013 +67
2014 +69
2015 +70
2016 +72
2017 +61
2018 +61
2019 +61
2020 +61
2021 +61
2022 +60
2023 +60
2024 +60
2025 +60
2026 +60
2027 +60
2028 +61
2029 +61
2030 +61
2031 +61
2032 +62
2033 +62
2034 +62
2035 +63
2036 +63
2037 +63
2038 +64
2039 +64
2040 +64
2041 +65
2042 +65
2043 +66
2044 +67
2045 +68
2046 +69
2047 +71
2048 +73
2049 +30
2050 +33
2051 +33
2052 +33
2053 +33
2054 +33
2055 +33
2056 +34
2057 +34
2058 +35
2059 +35
2060 +35
2061 +35
2062 +35
2063 +35
2064 +35
2065 +35
2066 +35
2067 +35
2068 +35
2069 +35
2070 +35
2071 +36
2072 +35
2073 +35
2074 +34
2075 +33
2076 +33
2077 +33
2078 +32
2079 +30
2080 +29
2081 +30
2082 +32
2083 +32
2084 +33
2085 +33
2086 +32
2087 +32
2088 +33
2089 +34
2090 +35
2091 +35
2092 +35
2093 +34
2094 +35
2095 +35
2096 +35
2097 +35
2098 +34
2099 +35
2100 +35
2101 +34
2102 +34
2103 +35
2104 +35
2105 +35
2106 +34
2107 +33
2108 +33
2109 +32
2110 +31
2111 +30
2112 +29
2113 +30
2114 +32
2115 +33
2116 +33
2117 +32
2118 +32
2119 +33
2120 +34
2121 +34
2122 +34
2123 +35
2124 +34
2125 +34
2126 +34
2127 +35
2128 +34
2129 +35
2130 +34
2131 +34
2132 +34
2133 +35
2134 +35
2135 +35
2136 +36
2137 +34
2138 +34
2139 +33
2140 +33
2141 +33
2142 +32
2143 +31
2144 +29
2145 +30
2146 +33
2147 +34
2148 +34
2149 +33
2150 +34
2151 +34
2152 +35
2153 +35
2154 +35
2155 +36
2156 +35
2157 +35
2158 +35
2159 +34
2160 +34
2161 +35
2162 +35
2163 +34
2164 +34
2165 +35
2166 +35
2167 +36
2168 +36
2169 +35
2170 +35
2171 +34
2172 +34
2173 +34
2174 +33
2175 +32
2176 +30
2177 +31
2178 +33
2179 +34
2180 +34
2181 +33
2182 +33
2183 +34
2184 +35
2185 +35
2186 +35
2187 +34
2188 +34
2189 +34
2190 +34
2191 +34
2192 +34
2193 +34
2194 +34
2195 +34
2196 +35
2197 +34
2198 +35
2199 +35
2200 +36
2201 +36
2202 +36
2203 +35
2204 +34
2205 +34
2206 +33
2207 +32
2208 +31
2209 +31
2210 +33
2211 +33
2212 +34
2213 +34
2214 +33
2215 +33
2216 +35
2217 +35
2218 +35
2219 +34
2220 +34
2221 +33
2222 +33
2223 +34
2224 +34
2225 +34
2226 +34
2227 +34
2228 +34
2229 +34
2230 +35
2231 +35
2232 +35
2233 +35
2234 +35
2235 +35
2236 +34
2237 +34
2238 +34
2239 +32
2240 +31
2241 +31
2242 +34
2243 +33
2244 +33
2245 +34
2246 +34
2247 +34
2248 +35
2249 +35
2250 +34
2251 +34
2252 +34
2253 +33
2254 +33
2255 +33
2256 +34
2257 +33
2258 +34
2259 +34
2260 +33
2261 +34
2262 +35
2263 +35
2264 +35
2265 +35
2266 +35
2267 +35
2268 +35
2269 +34
2270 +34
2271 +33
2272 +31
2273 +31
2274 +34
2275 +34
2276 +34
2277 +34
2278 +34
2279 +35
2280 +35
2281 +35
2282 +35
2283 +34
2284 +34
2285 +34
2286 +33
2287 +33
2288 +34
2289 +34
2290 +34
2291 +34
2292 +34
2293 +34
2294 +35
2295 +35
2296 +36
2297 +36
2298 +36
2299 +35
2300 +36
2301 +35
2302 +35
2303 +34
2304 +32
2305 +32
2306 +35
2307 +35
2308 +35
2309 +35
2310 +35
2311 +35
2312 +35
2313 +35
2314 +35
2315 +34
2316 +34
2317 +34
2318 +34
2319 +34
2320 +35
2321 +35
2322 +34
2323 +35
2324 +35
2325 +35
2326 +35
2327 +35
2328 +36
2329 +37
2330 +36
2331 +36
2332 +36
2333 +36
2334 +36
2335 +36
2336 +34
2337 +32
2338 +35
2339 +36
2340 +36
2341 +36
2342 +35
2343 +35
2344 +36
2345 +35
2346 +35
2347 +35
2348 +34
2349 +34
2350 +34
2351 +34
2352 +34
2353 +34
2354 +34
2355 +35
2356 +35
2357 +35
2358 +36
2359 +35
2360 +36
2361 +37
2362 +37
2363 +37
2364 +37
2365 +37
2366 +37
2367 +36
2368 +34
2369 +33
2370 +36
2371 +36
2372 +36
2373 +36
2374 +35
2375 +36
2376 +35
2377 +35
2378 +35
2379 +35
2380 +34
2381 +35
2382 +35
2383 +34
2384 +35
2385 +34
2386 +35
2387 +35
2388 +35
2389 +35
2390 +36
2391 +36
2392 +37
2393 +37
2394 +37
2395 +37
2396 +37
2397 +38
2398 +37
2399 +36
2400 +34
2401 +34
2402 +35
2403 +35
2404 +36
2405 +36
2406 +36
2407 +35
2408 +35
2409 +35
2410 +35
2411 +34
2412 +34
2413 +35
2414 +34
2415 +34
2416 +34
2417 +34
2418 +35
2419 +35
2420 +35
2421 +35
2422 +36
2423 +36
2424 +37
2425 +36
2426 +36
2427 +37
2428 +37
2429 +37
2430 +37
2431 +36
2432 +35
2433 +33
2434 +35
2435 +36
2436 +36
2437 +36
2438 +35
2439 +35
2440 +36
2441 +35
2442 +34
2443 +34
2444 +35
2445 +35
2446 +34
2447 +34
2448 +34
2449 +34
2450 +35
2451 +35
2452 +35
2453 +35
2454 +35
2455 +36
2456 +37
2457 +37
2458 +37
2459 +37
2460 +37
2461 +37
2462 +37
2463 +36
2464 +35
2465 +34
2466 +36
2467 +37
2468 +37
2469 +37
2470 +36
2471 +36
2472 +36
2473 +36
2474 +35
2475 +35
2476 +35
2477 +35
2478 +34
2479 +34
2480 +35
2481 +35
2482 +35
2483 +36
2484 +36
2485 +35
2486 +36
2487 +37
2488 +37
2489 +37
2490 +37
2491 +38
2492 +38
2493 +39
2494 +39
2495 +38
2496 +36
2497 +34
2498 +37
2499 +37
2500 +37
2501 +37
2502 +36
2503 +37
2504 +36
2505 +36
2506 +36
2507 +35
2508 +35
2509 +36
2510 +35
2511 +35
2512 +35
2513 +35
2514 +35
2515 +36
2516 +36
2517 +36
2518 +36
2519 +37
2520 +37
2521 +37
2522 +37
2523 +38
2524 +38
2525 +39
2526 +39
2527 +38
2528 +37
2529 +34
2530 +37
2531 +37
2532 +37
2533 +37
2534 +37
2535 +37
2536 +36
2537 +36
2538 +36
2539 +35
2540 +35
2541 +36
2542 +36
2543 +35
2544 +36
2545 +35
2546 +35
2547 +37
2548 +36
2549 +36
2550 +37
2551 +37
2552 +38
2553 +38
2554 +38
2555 +38
2556 +39
2557 +39
2558 +39
2559 +38
2560 +37
2561 +35
2562 +36
2563 +37
2564 +37
2565 +37
2566 +37
2567 +36
2568 +36
2569 +36
2570 +37
2571 +36
2572 +35
2573 +35
2574 +36
2575 +36
2576 +36
2577 +36
2578 +36
2579 +36
2580 +36
2581 +36
2582 +37
2583 +37
2584 +38
2585 +38
2586 +38
2587 +38
2588 +38
2589 +39
2590 +39
2591 +38
2592 +37
2593 +34
2594 +37
2595 +37
2596 +37
2597 +37
2598 +37
2599 +37
2600 +37
2601 +37
2602 +37
2603 +36
2604 +36
2605 +36
2606 +36
2607 +36
2608 +36
2609 +36
2610 +37
2611 +37
2612 +36
2613 +36
2614 +37
2615 +38
2616 +38
2617 +38
2618 +38
2619 +39
2620 +39
2621 +40
2622 +39
2623 +39
2624 +38
2625 +35
2626 +38
2627 +38
2628 +38
2629 +38
2630 +38
2631 +39
2632 +38
2633 +37
2634 +37
2635 +37
2636 +37
2637 +36
2638 +36
2639 +37
2640 +37
2641 +37
2642 +37
2643 +38
2644 +38
2645 +38
2646 +38
2647 +39
2648 +39
2649 +39
2650 +39
2651 +40
2652 +40
2653 +41
2654 +40
2655 +40
2656 +39
2657 +37
2658 +39
2659 +39
2660 +38
2661 +39
2662 +38
2663 +39
2664 +39
2665 +39
2666 +38
2667 +38
2668 +38
2669 +37
2670 +37
2671 +37
2672 +37
2673 +38
2674 +38
2675 +39
2676 +39
2677 +38
2678 +38
2679 +39
2680 +40
2681 +40
2682 +41
2683 +41
2684 +42
2685 +42
2686 +42
2687 +41
2688 +40
2689 +37
2690 +39
2691 +39
2692 +38
2693 +38
2694 +39
2695 +38
2696 +39
2697 +39
2698 +38
2699 +38
2700 +38
2701 +38
2702 +37
2703 +37
2704 +37
2705 +38
2706 +38
2707 +39
2708 +39
2709 +38
2710 +39
2711 +40
2712 +40
2713 +40
2714 +40
2715 +41
2716 +41
2717 +41
2718 +42
2719 +41
2720 +40
2721 +36
2722 +39
2723 +39
2724 +39
2725 +39
2726 +39
2727 +39
2728 +39
2729 +39
2730 +39
2731 +38
2732 +38
2733 +38
2734 +38
2735 +38
2736 +38
2737 +38
2738 +39
2739 +39
2740 +39
2741 +39
2742 +40
2743 +40
2744 +40
2745 +40
2746 +41
2747 +41
2748 +42
2749 +42
2750 +42
2751 +42
2752 +41
2753 +37
2754 +40
2755 +40
2756 +40
2757 +40
2758 +40
2759 +39
2760 +39
2761 +39
2762 +39
2763 +39
2764 +39
2765 +38
2766 +38
2767 +38
2768 +39
2769 +39
2770 +39
2771 +39
2772 +39
2773 +40
2774 +40
2775 +40
2776 +40
2777 +41
2778 +41
2779 +42
2780 +43
2781 +43
2782 +42
2783 +42
2784 +42
2785 +38
2786 +40
2787 +41
2788 +41
2789 +41
2790 +41
2791 +40
2792 +40
2793 +40
2794 +40
2795 +39
2796 +39
2797 +39
2798 +39
2799 +39
2800 +39
2801 +40
2802 +40
2803 +40
2804 +41
2805 +40
2806 +41
2807 +41
2808 +42
2809 +42
2810 +42
2811 +43
2812 +43
2813 +43
2814 +43
2815 +43
2816 +42
2817 +39
2818 +42
2819 +42
2820 +42
2821 +41
2822 +41
2823 +42
2824 +41
2825 +40
2826 +40
2827 +40
2828 +40
2829 +40
2830 +40
2831 +40
2832 +41
2833 +41
2834 +41
2835 +41
2836 +41
2837 +41
2838 +41
2839 +41
2840 +43
2841 +43
2842 +43
2843 +43
2844 +44
2845 +44
2846 +44
2847 +44
2848 +44
2849 +40
2850 +42
2851 +43
2852 +42
2853 +42
2854 +43
2855 +42
2856 +41
2857 +41
2858 +40
2859 +40
2860 +40
2861 +40
2862 +40
2863 +40
2864 +41
2865 +41
2866 +41
2867 +41
2868 +41
2869 +41
2870 +41
2871 +42
2872 +43
2873 +44
2874 +44
2875 +44
2876 +44
2877 +44
2878 +45
2879 +45
2880 +44
2881 +39
2882 +42
2883 +43
2884 +42
2885 +42
2886 +42
2887 +43
2888 +42
2889 +42
2890 +41
2891 +40
2892 +40
2893 +40
2894 +40
2895 +40
2896 +40
2897 +41
2898 +41
2899 +41
2900 +41
2901 +42
2902 +42
2903 +43
2904 +44
2905 +44
2906 +44
2907 +44
2908 +44
2909 +44
2910 +46
2911 +46
2912 +44
2913 +40
2914 +42
2915 +43
2916 +42
2917 +43
2918 +43
2919 +43
2920 +42
2921 +42
2922 +42
2923 +41
2924 +41
2925 +41
2926 +40
2927 +41
2928 +41
2929 +41
2930 +42
2931 +42
2932 +43
2933 +43
2934 +43
2935 +44
2936 +44
2937 +45
2938 +44
2939 +45
2940 +45
2941 +45
2942 +46
2943 +46
2944 +45
2945 +41
2946 +44
2947 +44
2948 +44
2949 +44
2950 +44
2951 +44
2952 +43
2953 +43
2954 +43
2955 +43
2956 +43
2957 +42
2958 +42
2959 +42
2960 +42
2961 +43
2962 +43
2963 +44
2964 +44
2965 +44
2966 +45
2967 +45
2968 +46
2969 +46
2970 +46
2971 +46
2972 +46
2973 +46
2974 +47
2975 +47
2976 +46
2977 +42
2978 +44
2979 +44
2980 +45
2981 +45
2982 +44
2983 +45
2984 +44
2985 +44
2986 +44
2987 +44
2988 +43
2989 +43
2990 +43
2991 +43
2992 +44
2993 +44
2994 +44
2995 +45
2996 +45
2997 +46
2998 +46
2999 +46
3000 +47
3001 +47
3002 +46
3003 +46
3004 +47
3005 +47
3006 +48
3007 +48
3008 +48
3009 +42
3010 +44
3011 +45
3012 +45
3013 +45
3014 +45
3015 +45
3016 +44
3017 +44
3018 +44
3019 +44
3020 +44
3021 +44
3022 +44
3023 +44
3024 +45
3025 +45
3026 +45
3027 +45
3028 +46
3029 +46
3030 +46
3031 +47
3032 +47
3033 +47
3034 +47
3035 +47
3036 +47
3037 +49
3038 +49
3039 +48
3040 +48
3041 +43
3042 +45
3043 +46
3044 +46
3045 +46
3046 +45
3047 +45
3048 +45
3049 +45
3050 +45
3051 +44
3052 +45
3053 +45
3054 +46
3055 +45
3056 +46
3057 +46
3058 +46
3059 +46
3060 +46
3061 +46
3062 +47
3063 +47
3064 +47
3065 +47
3066 +47
3067 +47
3068 +48
3069 +49
3070 +49
3071 +50
3072 +49
1 +#include"Layers.hpp"
2 +#include"SimpleConvNet.hpp"
3 +using namespace std;
4 +int main() {
5 +
6 + input_dim id = { 3, 32, 32 };
7 + conv_param cp = { 32,32,64, 3,1,1 };
8 + SimpleConvNet SCN(id, cp);
9 +
10 + freopen("input.txt", "r", stdin);
11 + vector<Mat> X;
12 + int nx = 1, dim = 3, row = 32, col = 32;
13 + double tmp;
14 + for (int i = 0; i < nx; i++) {
15 + vector<double> rev;
16 + for (int d = 0; d < dim; d++) {
17 + for (int r = 0; r < row; r++) {
18 + for (int c = 0; c < col; c++) {
19 + scanf("%lf", &tmp);
20 + rev.push_back(tmp);
21 + }
22 + }
23 + }
24 + X.push_back(Mat(dim, row, col, rev));
25 + }
26 + freopen("pred.txt", "r", stdin);
27 + nx = 2, dim = 3, row = 32, col = 32;
28 + for (int i = 0; i < nx; i++) {
29 + vector<double> rev;
30 + for (int d = 0; d < dim; d++) {
31 + for (int r = 0; r < row; r++) {
32 + for (int c = 0; c < col; c++) {
33 + scanf("%lf", &tmp);
34 + rev.push_back(tmp);
35 + }
36 + }
37 + }
38 + X.push_back(Mat(dim, row, col, rev));
39 + }
40 +
41 + auto x = SCN.predict(X);
42 +
43 + auto pred = SCN.argmax(x);
44 +
45 + int num = 0, pd;
46 +
47 + printf("predict : %d ", pred[0]);
48 + return 0;
49 +}
...\ No newline at end of file ...\ No newline at end of file
1 +from numpy import array, savetxt
2 +from PIL import Image
3 +import sys
4 +import picamera
5 +
6 +camera = picamera.PiCamera()
7 +
8 +filename = 'image.jpg'
9 +camera.capture(filename)
10 +
11 +img = array(Image.open(filename).resize((32,32), Image.ANTIALIAS)).transpose(2,0,1).flatten()
12 +
13 +savetxt('input.txt', img, fmt='%d', delimiter=' ')
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
1 +#!/bin/bash
2 +
3 +python make_img.py $1
4 +./a.out