김명락

final

1 +import win32com.client
2 +import pythoncom
3 +import pandas as pd
4 +import time
5 +
6 +class XASessionEventHandler:
7 + login_state = 0
8 +
9 + def OnLogin(self, code, msg):
10 + if code == "0000":
11 + print("로그인 성공")
12 + XASessionEventHandler.login_state = 1
13 + else:
14 + print("로그인 실패")
15 +
16 +class XAQueryEventHandler:
17 + query_state = 0
18 +
19 + def OnReceiveData(self, code):
20 + XAQueryEventHandler.query_state = 1
21 +
22 +# ===================================================================================================================
23 +instXASession = win32com.client.DispatchWithEvents("XA_Session.XASession", XASessionEventHandler)
24 +
25 +with open('account.txt') as f:
26 + id, passwd, cert_passwd = f.read().split()
27 +
28 +instXASession.ConnectServer("hts.ebestsec.co.kr", 20001)
29 +instXASession.Login(id, passwd, cert_passwd, 0, 0)
30 +
31 +while XASessionEventHandler.login_state == 0:
32 + pythoncom.PumpWaitingMessages()
33 +
34 +datecut = ["20210526", "20190517", "20170426","20150421","20130409","20110407","20090413","20070404","20050404","20030326","20010316","19900305","19880616","19860926","19850316"]
35 +res=pd.DataFrame()
36 +for i in range(0, 14):
37 + # ----------------------------------------------------------------------------
38 + # T8413
39 + # ----------------------------------------------------------------------------
40 + instXAQueryT8413 = win32com.client.DispatchWithEvents("XA_DataSet.XAQuery", XAQueryEventHandler)
41 + instXAQueryT8413.ResFileName = "C:\\eBEST\\xingAPI\\Res\\t8413.res"
42 +
43 + instXAQueryT8413.SetFieldData("t8413InBlock", "shcode", 0, "207940")
44 + instXAQueryT8413.SetFieldData("t8413InBlock", "gubun", 0, "2")
45 + instXAQueryT8413.SetFieldData("t8413InBlock", "sdate", 0, datecut[i+1])
46 + instXAQueryT8413.SetFieldData("t8413InBlock", "edate", 0, datecut[i])
47 + instXAQueryT8413.SetFieldData("t8413InBlock", "comp_yn", 0, "N")
48 +
49 + instXAQueryT8413.Request(0)
50 +
51 + while XAQueryEventHandler.query_state == 0:
52 + pythoncom.PumpWaitingMessages()
53 +
54 + count = instXAQueryT8413.GetBlockCount("t8413OutBlock1")
55 +
56 + stockdata=[]
57 + datelist=[]
58 + for j in range(count):
59 + date = instXAQueryT8413.GetFieldData("t8413OutBlock1", "date", j)
60 + open = instXAQueryT8413.GetFieldData("t8413OutBlock1", "open", j)
61 + high = instXAQueryT8413.GetFieldData("t8413OutBlock1", "high", j)
62 + low = instXAQueryT8413.GetFieldData("t8413OutBlock1", "low", j)
63 + close = instXAQueryT8413.GetFieldData("t8413OutBlock1", "close", j)
64 + vol = instXAQueryT8413.GetFieldData("t8413OutBlock1", "jdiff_vol", j)
65 + datelist.append(date)
66 + stockdata.append([date, open, high, low, close, vol])
67 +
68 + df = pd.DataFrame(stockdata, columns=['날짜','시가','고가','저가','종가','거래량'], index=datelist)
69 + print(df)
70 +
71 + res=res.append(df)
72 + XAQueryEventHandler.query_state = 0
73 + time.sleep(3)
74 +
75 +
76 + # instXAQueryT8413 = win32com.client.DispatchWithEvents("XA_DataSet.XAQuery", XAQueryEventHandler)
77 + # instXAQueryT8413.ResFileName = "C:\\eBEST\\xingAPI\\Res\\t8413.res"
78 +
79 + # instXAQueryT8413.SetFieldData("t8413InBlock", "shcode", 0, "005930")
80 + # instXAQueryT8413.SetFieldData("t8413InBlock", "gubun", 0, "2")
81 + # instXAQueryT8413.SetFieldData("t8413InBlock", "sdate", 0, "20190520")
82 + # instXAQueryT8413.SetFieldData("t8413InBlock", "edate", 0, "20210526")
83 + # instXAQueryT8413.SetFieldData("t8413InBlock", "comp_yn", 0, "N")
84 +
85 + # instXAQueryT8413.Request(0)
86 +
87 + # while XAQueryEventHandler.query_state == 0:
88 + # pythoncom.PumpWaitingMessages()
89 +
90 + # count = instXAQueryT8413.GetBlockCount("t8413OutBlock1")
91 +
92 + # stockdata.clear()
93 + # for i in range(count):
94 + # date = instXAQueryT8413.GetFieldData("t8413OutBlock1", "date", i)
95 + # open = instXAQueryT8413.GetFieldData("t8413OutBlock1", "open", i)
96 + # high = instXAQueryT8413.GetFieldData("t8413OutBlock1", "high", i)
97 + # low = instXAQueryT8413.GetFieldData("t8413OutBlock1", "low", i)
98 + # close = instXAQueryT8413.GetFieldData("t8413OutBlock1", "close", i)
99 + # vol = instXAQueryT8413.GetFieldData("t8413OutBlock1", "jdiff_vol", i)
100 + # stockdata.append([date, open, high, low, close, vol])
101 +
102 + # df2 = df.append(pd.DataFrame(stockdata, columns=['날짜','시가','고가','저가','종가','거래량']))
103 +
104 + # print(df2)
105 +
106 +
107 +res.drop_duplicates()
108 +print(res)
109 +res.to_csv('C:/Users/myung/OneDrive/바탕 화면/삼성바이오로직스.csv', sep=',', na_rep='NaN',index=False)
...\ No newline at end of file ...\ No newline at end of file
1 +from google.colab import drive
2 +import pandas as pd
3 +import matplotlib.pyplot as plt
4 +import seaborn as sns
5 +from sklearn.preprocessing import MinMaxScaler
6 +import numpy as np
7 +from keras.models import Sequential
8 +from keras.layers import Dense
9 +from keras.callbacks import EarlyStopping, ModelCheckpoint
10 +from keras.layers import LSTM
11 +
12 +drive.mount('/content/drive')
13 +df_price = pd.read_csv('/content/drive/My Drive/Colab Notebooks/삼성바이오로직스.csv', encoding='cp949')
14 +pd.to_datetime(df_price['날짜'], format='%Y%m%d')
15 +
16 +df_price['날짜'] = pd.to_datetime(df_price['날짜'], format='%Y%m%d')
17 +df_price['연도'] =df_price['날짜'].dt.year
18 +df_price['월'] =df_price['날짜'].dt.month
19 +df_price['일'] =df_price['날짜'].dt.day
20 +
21 +df = df_price.loc[df_price['연도']>=1990]
22 +scaler = MinMaxScaler()
23 +scale_cols = ['시가', '고가', '저가', '종가', '거래량']
24 +
25 +df_scaled = scaler.fit_transform(df[scale_cols])
26 +df_scaled = pd.DataFrame(df_scaled)
27 +df_scaled.columns = scale_cols
28 +
29 +
30 +TEST_SIZE = 200
31 +window_size=20
32 +
33 +def make_dataset(data, label, window_size=20):
34 + feature_list = []
35 + label_list = []
36 + for i in range(len(data) - window_size):
37 + feature_list.append(np.array(data.iloc[i:i+window_size]))
38 + label_list.append(np.array(label.iloc[i+window_size]))
39 + return np.array(feature_list), np.array(label_list)
40 +
41 +def main():
42 + train = df_scaled[:-TEST_SIZE]
43 + test = df_scaled[-TEST_SIZE:]
44 + feature_cols = ['시가', '고가', '저가', '거래량']
45 + label_cols = ['종가']
46 +
47 + train_feature = train[feature_cols]
48 + train_label = train[label_cols]
49 +
50 + # train dataset
51 + train_feature, train_label = make_dataset(train_feature, train_label, 20)
52 +
53 + # train, validation set 생성
54 + from sklearn.model_selection import train_test_split
55 + x_train, x_valid, y_train, y_valid = train_test_split(train_feature, train_label, test_size=0.2)
56 +
57 + x_train.shape, x_valid.shape
58 +
59 + test_feature = test[feature_cols]
60 + test_label = test[label_cols]
61 +
62 + # test dataset (실제 예측 해볼 데이터)
63 + test_feature, test_label = make_dataset(test_feature, test_label, 20)
64 +
65 + test_feature.shape, test_label.shape
66 +
67 +
68 + model = Sequential()
69 + model.add(LSTM(16,
70 + input_shape=(train_feature.shape[1], train_feature.shape[2]),
71 + activation='relu',
72 + return_sequences=False)
73 + )
74 + model.add(Dense(1))
75 +
76 + model.compile(loss='mean_squared_error', optimizer='adam')
77 + early_stop = EarlyStopping(monitor='val_loss', patience=10)
78 + filename = '/content/drive/My Drive/Colab Notebooks/tmp_samba.h5'
79 + checkpoint = ModelCheckpoint(filename, monitor='val_loss', verbose=1, save_best_only=True, mode='auto')
80 +
81 + # history = model.fit(x_train, y_train,
82 + # epochs=200,
83 + # batch_size=16,
84 + # validation_data=(x_valid, y_valid),
85 + # callbacks=[early_stop, checkpoint])
86 +
87 + # weight 로딩
88 + model.load_weights(filename)
89 +
90 + model.summary()
91 + # 예측
92 + pred = model.predict(test_feature)
93 + plt.figure(figsize=(12, 9))
94 + plt.plot(test_label, label='actual')
95 + plt.plot(pred, label='prediction')
96 + plt.legend()
97 + plt.show()
98 +
99 +if __name__ == "__main__":
100 + main()
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
No preview for this file type
No preview for this file type
This file is too large to display.
No preview for this file type
No preview for this file type