정희원

Upload new file

1 +import datetime
2 +import numpy as np
3 +import matplotlib.pyplot as plt
4 +from keras.layers import Input, Activation, Conv2D, Flatten, Dense, MaxPooling2D
5 +from keras.models import Model, load_model
6 +from keras.preprocessing.image import ImageDataGenerator
7 +from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
8 +plt.style.use('dark_background')
9 +
10 +
11 +x_train = np.load('sauce/x_train.npy').astype(np.float32)
12 +y_train = np.load('sauce/y_train.npy').astype(np.float32)
13 +x_val = np.load('sauce/x_val.npy').astype(np.float32)
14 +y_val = np.load('sauce/y_val.npy').astype(np.float32)
15 +
16 +
17 +'''
18 +print(x_train.shape,y_train.shape)
19 +(2586,26,34,1)(2586,1)
20 +세로 26 가로34 흑백 이미지 2586개,
21 +
22 +print(x_val.shape,y_val.shape)
23 +(288, 26, 34, 1) (288, 1)
24 +'''
25 +
26 +
27 +train_datagen = ImageDataGenerator(
28 + #data Augmentation 데이터변형 데이터 수 늘리기
29 + rescale=1./255,
30 + rotation_range=10,
31 + width_shift_range=0.2,
32 + height_shift_range=0.2,
33 + shear_range=0.2
34 +)
35 +val_datagen = ImageDataGenerator(
36 + rescale=1. / 255,
37 + rotation_range=10,
38 + width_shift_range=0.2,
39 + height_shift_range=0.2,
40 + shear_range=0.2
41 +)
42 +train_generator = train_datagen.flow(
43 + #원 데이터셋에 ImageDataGenerator 이용한 변형한 것을 적용
44 + x=x_train, y=y_train,
45 + batch_size=32,
46 + shuffle=True
47 +)
48 +val_generator = val_datagen.flow(
49 + x=x_val, y=y_val,
50 + batch_size=32,
51 + shuffle=False
52 +)
53 +
54 +inputs = Input(shape=(26, 34, 1))
55 +net = Conv2D(32, kernel_size=3, strides=1, padding='same', activation='relu')(inputs)
56 +#1번째 convolution layer
57 +net = MaxPooling2D(pool_size=2)(net)
58 +#데이터 연산 줄여주는 역할
59 +#여기서는 연산 후 가장 큰 값 MaxPooling
60 +#차원 축소
61 +net = Conv2D(64, kernel_size=3, strides=1, padding='same', activation='relu')(net)
62 +net = MaxPooling2D(pool_size=2)(net)
63 +net = Conv2D(128, kernel_size=3, strides=1, padding='same', activation='relu')(net)
64 +net = MaxPooling2D(pool_size=2)(net)
65 +net = Flatten()(net)
66 +#완전연결층
67 +#1차원으로 펴주고
68 +net = Dense(512)(net)
69 +#512개의 fully connected layer와 연결
70 +net = Activation('relu')(net)
71 +net = Dense(1)(net)
72 +#마지막 레이어 한개 0~1사이값
73 +outputs = Activation('sigmoid')(net)
74 +#sigmoid 0~1
75 +model = Model(inputs=inputs, outputs=outputs)
76 +model.compile(optimizer='Nadam', loss='binary_crossentropy', metrics=['acc'])
77 +#loss는 0이냐 1이냐 니깐 binary, accuracy 추가
78 +
79 +model.summary()
80 +
81 +model.fit_generator(
82 + #위의 generator로 데이터셋 늘렸기에 이때 는 fit_generator가 좋다
83 + train_generator, epochs=50, validation_data=val_generator,
84 + callbacks=[
85 + #save_best_only 학습이 잘되면 저장해라
86 + ModelCheckpoint('sauce/models.h5', monitor='val_acc', save_best_only=True, mode='max', verbose=1),
87 +
88 + #learning이 잘안되면 learning rate를 줄여라
89 + ReduceLROnPlateau(monitor='val_acc', factor=0.2, patience=10, verbose=1, mode='auto', min_lr=1e-05)
90 + ]
91 +)
92 +
93 +
94 +
95 +
96 +