최재은

Add : Event Embedding code & data, 최종 보고서 수정

Showing 41 changed files with 4105 additions and 0 deletions
This diff could not be displayed because it is too large.
1 +{
2 + "nbformat": 4,
3 + "nbformat_minor": 0,
4 + "metadata": {
5 + "colab": {
6 + "name": "stock-prediction.ipynb",
7 + "provenance": []
8 + },
9 + "kernelspec": {
10 + "name": "python3",
11 + "display_name": "Python 3"
12 + },
13 + "accelerator": "GPU"
14 + },
15 + "cells": [
16 + {
17 + "cell_type": "code",
18 + "metadata": {
19 + "id": "9EBLJGRkA7au",
20 + "colab_type": "code",
21 + "outputId": "6aa769f2-a86f-463c-893c-7a8b0b3aca08",
22 + "colab": {
23 + "base_uri": "https://localhost:8080/",
24 + "height": 122
25 + }
26 + },
27 + "source": [
28 + "from google.colab import auth\n",
29 + "auth.authenticate_user()\n",
30 + "\n",
31 + "from google.colab import drive\n",
32 + "drive.mount('/content/gdrive')"
33 + ],
34 + "execution_count": 0,
35 + "outputs": [
36 + {
37 + "output_type": "stream",
38 + "text": [
39 + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n",
40 + "\n",
41 + "Enter your authorization code:\n",
42 + "··········\n",
43 + "Mounted at /content/gdrive\n"
44 + ],
45 + "name": "stdout"
46 + }
47 + ]
48 + },
49 + {
50 + "cell_type": "code",
51 + "metadata": {
52 + "id": "xz6TIi8x-3wI",
53 + "colab_type": "code",
54 + "outputId": "47fb88a4-fa0e-4327-b5d0-9ab7f42041f2",
55 + "colab": {
56 + "base_uri": "https://localhost:8080/",
57 + "height": 34
58 + }
59 + },
60 + "source": [
61 + "%tensorflow_version 1.x"
62 + ],
63 + "execution_count": 0,
64 + "outputs": [
65 + {
66 + "output_type": "stream",
67 + "text": [
68 + "TensorFlow 1.x selected.\n"
69 + ],
70 + "name": "stdout"
71 + }
72 + ]
73 + },
74 + {
75 + "cell_type": "code",
76 + "metadata": {
77 + "id": "ykRUwvkvIoKH",
78 + "colab_type": "code",
79 + "outputId": "6d116660-4a7f-4c85-a733-6956210958c9",
80 + "colab": {
81 + "base_uri": "https://localhost:8080/",
82 + "height": 68
83 + }
84 + },
85 + "source": [
86 + "!cd gdrive/'My Drive'/'capstone 2' && ls"
87 + ],
88 + "execution_count": 0,
89 + "outputs": [
90 + {
91 + "output_type": "stream",
92 + "text": [
93 + "main.go\t\t preprocessed_data.gsheet title.csv Word2vec.model\n",
94 + "metadata.tsv\t title2020.csv\t\t title.gsheet\n",
95 + "preprocessed_data.csv title2.csv\t\t word2vec\n"
96 + ],
97 + "name": "stdout"
98 + }
99 + ]
100 + },
101 + {
102 + "cell_type": "markdown",
103 + "metadata": {
104 + "id": "C-V3pgCEX5xR",
105 + "colab_type": "text"
106 + },
107 + "source": [
108 + "### 데이터 전처리 \n",
109 + "[~ 포토 \\~]가 들어간 기사 제목은 데이터 리스트에서 삭제 하고 [\\~]가 들어간 기사 제목은 [\\~] 삭제\n"
110 + ]
111 + },
112 + {
113 + "cell_type": "code",
114 + "metadata": {
115 + "id": "oZ2Q2_uWViO3",
116 + "colab_type": "code",
117 + "outputId": "0fc44251-8920-4e1a-9472-5603d0ce1264",
118 + "colab": {
119 + "base_uri": "https://localhost:8080/",
120 + "height": 785
121 + }
122 + },
123 + "source": [
124 + "!pip3 install hanja==0.13.0\n",
125 + "!pip3 install git+https://github.com/haven-jeon/PyKoSpacing.git"
126 + ],
127 + "execution_count": 0,
128 + "outputs": [
129 + {
130 + "output_type": "stream",
131 + "text": [
132 + "Requirement already satisfied: hanja==0.13.0 in /usr/local/lib/python3.6/dist-packages (0.13.0)\n",
133 + "Collecting git+https://github.com/haven-jeon/PyKoSpacing.git\n",
134 + " Cloning https://github.com/haven-jeon/PyKoSpacing.git to /tmp/pip-req-build-n_sau7zy\n",
135 + " Running command git clone -q https://github.com/haven-jeon/PyKoSpacing.git /tmp/pip-req-build-n_sau7zy\n",
136 + "Requirement already satisfied (use --upgrade to upgrade): pykospacing==0.1 from git+https://github.com/haven-jeon/PyKoSpacing.git in /usr/local/lib/python3.6/dist-packages\n",
137 + "Collecting tensorflow<=1.6.0,>=1.4.0\n",
138 + " Using cached https://files.pythonhosted.org/packages/d9/0f/fbd8bb92459c75db93040f80702ebe4ba83a52cdb6ad930654c31dc0b711/tensorflow-1.6.0-cp36-cp36m-manylinux1_x86_64.whl\n",
139 + "Requirement already satisfied: keras>=2.1.5 in /usr/local/lib/python3.6/dist-packages (from pykospacing==0.1) (2.2.5)\n",
140 + "Requirement already satisfied: h5py>=2.7.1 in /usr/local/lib/python3.6/dist-packages (from pykospacing==0.1) (2.10.0)\n",
141 + "Requirement already satisfied: argparse>=1.4.0 in /usr/local/lib/python3.6/dist-packages (from pykospacing==0.1) (1.4.0)\n",
142 + "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (1.12.0)\n",
143 + "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (0.34.2)\n",
144 + "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (0.8.1)\n",
145 + "Requirement already satisfied: numpy>=1.13.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (1.18.2)\n",
146 + "Collecting tensorboard<1.7.0,>=1.6.0\n",
147 + " Using cached https://files.pythonhosted.org/packages/b0/67/a8c91665987d359211dcdca5c8b2a7c1e0876eb0702a4383c1e4ff76228d/tensorboard-1.6.0-py3-none-any.whl\n",
148 + "Requirement already satisfied: protobuf>=3.4.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (3.10.0)\n",
149 + "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (1.1.0)\n",
150 + "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (1.27.2)\n",
151 + "Requirement already satisfied: gast>=0.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (0.3.3)\n",
152 + "Requirement already satisfied: absl-py>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (0.9.0)\n",
153 + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from keras>=2.1.5->pykospacing==0.1) (3.13)\n",
154 + "Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from keras>=2.1.5->pykospacing==0.1) (1.4.1)\n",
155 + "Requirement already satisfied: keras-applications>=1.0.8 in /usr/local/lib/python3.6/dist-packages (from keras>=2.1.5->pykospacing==0.1) (1.0.8)\n",
156 + "Requirement already satisfied: keras-preprocessing>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from keras>=2.1.5->pykospacing==0.1) (1.1.0)\n",
157 + "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.7.0,>=1.6.0->tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (3.2.1)\n",
158 + "Requirement already satisfied: html5lib==0.9999999 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.7.0,>=1.6.0->tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (0.9999999)\n",
159 + "Requirement already satisfied: bleach==1.5.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.7.0,>=1.6.0->tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (1.5.0)\n",
160 + "Requirement already satisfied: werkzeug>=0.11.10 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.7.0,>=1.6.0->tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (1.0.0)\n",
161 + "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.4.0->tensorflow<=1.6.0,>=1.4.0->pykospacing==0.1) (46.0.0)\n",
162 + "Building wheels for collected packages: pykospacing\n",
163 + " Building wheel for pykospacing (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
164 + " Created wheel for pykospacing: filename=pykospacing-0.1-cp36-none-any.whl size=2255598 sha256=249ac07d0d8b26e4b9d4a1821995b953f9f90c6206cb378f183ff2be5001b607\n",
165 + " Stored in directory: /tmp/pip-ephem-wheel-cache-th23h_qr/wheels/4d/45/58/e26cb2b7f6a063d234158c6fd1e5700f6e15b99d67154340ba\n",
166 + "Successfully built pykospacing\n",
167 + "\u001b[31mERROR: magenta 0.3.19 has requirement tensorflow>=1.12.0, but you'll have tensorflow 1.6.0 which is incompatible.\u001b[0m\n",
168 + "Installing collected packages: tensorboard, tensorflow\n",
169 + " Found existing installation: tensorboard 2.2.0\n",
170 + " Uninstalling tensorboard-2.2.0:\n",
171 + " Successfully uninstalled tensorboard-2.2.0\n",
172 + " Found existing installation: tensorflow 2.2.0rc2\n",
173 + " Uninstalling tensorflow-2.2.0rc2:\n",
174 + " Successfully uninstalled tensorflow-2.2.0rc2\n",
175 + "Successfully installed tensorboard-1.6.0 tensorflow-1.6.0\n"
176 + ],
177 + "name": "stdout"
178 + }
179 + ]
180 + },
181 + {
182 + "cell_type": "code",
183 + "metadata": {
184 + "id": "Ddf2Fp7-JAYI",
185 + "colab_type": "code",
186 + "outputId": "0d3029dd-6db6-41eb-8142-24d42bd516b6",
187 + "colab": {
188 + "base_uri": "https://localhost:8080/",
189 + "height": 34
190 + }
191 + },
192 + "source": [
193 + "import hanja\n",
194 + "import pandas as pd\n",
195 + "import re\n",
196 + "from pykospacing import spacing\n",
197 + "\n",
198 + "news_list = pd.read_csv('gdrive/My Drive/capstone 2/title.csv', encoding='utf-8')\n",
199 + "print(len(news_list))\n",
200 + "photo_regexp = \"\\[.*포토.*\\]\"\n",
201 + "brac_regexp = \"\\[.*\\]\"\n",
202 + "spechar_with_regexp = \"[^가-힣ㄱ-ㅎㅏ-ㅣa-zA-Z0-9\\s\\+\\.\\-]\"\n",
203 + "spechar_without_regexp = \"[^가-힣ㄱ-ㅎㅏ-ㅣa-zA-Z0-9\\s\\.]\"\n",
204 + "percentage = \"(\\+\\d*\\.\\d*)|(\\-\\d*\\.\\d*)\"\n",
205 + "is_exist_regexp = \"[가-힣ㄱ-ㅎㅏ-ㅣa-zA-Z0-9\\s\\.]\"\n",
206 + "\n",
207 + "\n",
208 + "for i, title in enumerate(news_list['title']):\n",
209 + " if re.search(photo_regexp,title):\n",
210 + " news_list.drop(i, inplace=True)\n",
211 + " else :\n",
212 + " if \"\" in title:\n",
213 + " title = title.replace(\"\",\"상승\")\n",
214 + " if \"\" in title:\n",
215 + " title = title.replace(\"\",\"하락\")\n",
216 + "\n",
217 + " title = hanja.translate(title, 'substitution')\n",
218 + " title = re.sub(brac_regexp, '', title)\n",
219 + " title = re.sub(\"\\.{3}|\\.{2}\", '', title)\n",
220 + "\n",
221 + " if re.search(percentage,title):\n",
222 + " title = re.sub(spechar_with_regexp, '',title).lstrip()\n",
223 + " else:\n",
224 + " title = re.sub(spechar_without_regexp, '', title).lstrip()\n",
225 + " \n",
226 + " if ( not re.search(is_exist_regexp,title) ) or (len(title) == 0):\n",
227 + " news_list.drop(i, inplace=True)\n",
228 + " \n",
229 + " news_list[\"title\"][i] = spacing(title)\n",
230 + " \n",
231 + "\n",
232 + "\n",
233 + "df = pd.DataFrame(news_list, columns=['title','date','publication'])\n",
234 + "df.to_csv('gdrive/My Drive/capstone 2/without_percentage_preprocessed_data.csv',sep=',',encoding='UTF-8',index=False) \n"
235 + ],
236 + "execution_count": 0,
237 + "outputs": [
238 + {
239 + "output_type": "stream",
240 + "text": [
241 + "100960\n"
242 + ],
243 + "name": "stdout"
244 + }
245 + ]
246 + }
247 + ]
248 +}
...\ No newline at end of file ...\ No newline at end of file
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
1 +import numpy as np
2 +
3 +print(np.load("./resultEmbeding.pickle", allow_pickle=True))
4 +D
...\ No newline at end of file ...\ No newline at end of file
1 +{"class_name": "Sequential", "config": [{"class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true, "batch_input_shape": [null, 5, 80], "dtype": "float32"}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "units": 512, "activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.8}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "units": 1024, "activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "rate": 0.8}}, {"class_name": "Dropout", "config": {"name": "dropout_3", "trainable": true, "rate": 0.8}}, {"class_name": "Dense", "config": {"name": "dense_3", "trainable": true, "units": 2, "activation": "softmax", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}], "keras_version": "2.0.2", "backend": "theano"}
...\ No newline at end of file ...\ No newline at end of file
1 +{"class_name": "Sequential", "config": [{"class_name": "Conv1D", "config": {"name": "conv1d_1", "trainable": true, "batch_input_shape": [null, 5, 80], "dtype": "float32", "filters": 128, "kernel_size": [1], "strides": [1], "padding": "valid", "dilation_rate": [1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv1D", "config": {"name": "conv1d_2", "trainable": true, "filters": 128, "kernel_size": [3], "strides": [1], "padding": "same", "dilation_rate": [1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling1D", "config": {"name": "max_pooling1d_1", "trainable": true, "strides": [2], "pool_size": [2], "padding": "valid"}}, {"class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.8}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "units": 2, "activation": "softmax", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}], "keras_version": "2.0.2", "backend": "theano"}
...\ No newline at end of file ...\ No newline at end of file
1 +{"class_name": "Sequential", "config": [{"class_name": "LSTM", "config": {"name": "lstm_1", "trainable": true, "batch_input_shape": [null, 5, 80], "dtype": "float32", "return_sequences": false, "go_backwards": false, "stateful": false, "unroll": false, "implementation": 0, "units": 256, "activation": "tanh", "recurrent_activation": "hard_sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "recurrent_initializer": {"class_name": "Orthogonal", "config": {"gain": 1.0, "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "unit_forget_bias": true, "kernel_regularizer": {"class_name": "L1L2", "config": {"l1": 0.0, "l2": 0.0010000000474974513}}, "recurrent_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "recurrent_constraint": null, "bias_constraint": null, "dropout": 0.0, "recurrent_dropout": 0.0}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.6}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "units": 2, "activation": "softmax", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": {"class_name": "L1L2", "config": {"l1": 0.0, "l2": 0.0010000000474974513}}, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}], "keras_version": "2.0.2", "backend": "theano"}
...\ No newline at end of file ...\ No newline at end of file
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
1 +days Close
2 +1 68.960003
3 +2 64.560002
4 +3 65.460003
5 +4 69.800002
6 +5 70.199998
7 +6 70.650002
8 +7 69.879997
9 +8 70.459997
10 +9 70.489998
11 +10 70.759997
12 +11 72.05
13 +12 72.249998
14 +13 72.64
15 +14 73.979998
16 +15 76.899998
17 +16 77.529999
18 +17 79.63
19 +18 77.809999
20 +19 78.840002
21 +20 78.939998
22 +21 80.900002
23 +22 78.74
24 +23 78.359998
25 +24 81.210003
26 +25 84.630001
27 +26 88.410001
28 +27 90.13
29 +28 87.81
30 +29 86.809998
31 +30 85.290003
32 +31 88.229997
33 +32 88.929999
34 +33 88.990001
35 +34 44.859999
36 +35 44.5
37 +36 44.119998
38 +37 41.789999
39 +38 42.809999
40 +39 42.75
41 +40 40.53
42 +41 39.349999
43 +42 39.83
44 +43 40.27
45 +44 40.320002
46 +45 40.96
47 +46 41.180001
48 +47 42.249999
49 +48 42.959999
50 +49 43.699999
51 +50 42.829999
52 +51 42.549999
53 +52 42.500001
54 +53 42.529999
55 +54 41.750001
56 +55 42.800002
57 +56 41.669999
58 +57 40.890001
59 +58 41.089999
60 +59 41.890001
61 +60 42.330001
62 +61 43.559999
63 +62 43.74
64 +63 41.920002
65 +64 42.659998
66 +65 41.040001
67 +66 37.260002
68 +67 35.350001
69 +68 35.620001
70 +69 37.090001
71 +70 35.510002
72 +71 37.18
73 +72 35.500001
74 +73 36.979999
75 +74 36.190001
76 +75 35.949998
77 +76 35.539999
78 +77 36.060001
79 +78 36.429999
80 +79 36.210001
81 +80 37.149999
82 +81 36.679998
83 +82 37.240001
84 +83 36.969998
85 +84 36.419999
86 +85 35.610001
87 +86 34.13
88 +87 34.770001
89 +88 35.549999
90 +89 35.360002
91 +90 35.839999
92 +91 37.550002
93 +92 37.550002
94 +93 39.759999
95 +94 39.700001
96 +95 39.779999
97 +96 40.740001
98 +97 40.56
99 +98 39.759999
100 +99 40.300001
101 +100 40.039999
102 +101 38.240001
103 +102 37.92
104 +103 36.539999
105 +104 36.920001
106 +105 37.650001
107 +106 35.809999
108 +107 35.900001
109 +108 36
110 +109 37.129999
111 +110 37.980001
112 +111 38.309999
113 +112 37.61
114 +113 37.859999
115 +114 38.550001
116 +115 38.889999
117 +116 37.76
118 +117 37.100001
119 +118 37.309999
120 +119 36.370001
121 +120 36.810001
122 +121 36.500001
123 +122 37.980001
124 +123 37.390001
125 +124 37.63
126 +125 38.250001
127 +126 38.100001
128 +127 38.240001
129 +128 38.35
130 +129 40.750001
131 +130 41.55
132 +131 41.489998
133 +132 43.190001
134 +133 43.630001
135 +134 43.289999
136 +135 43.999999
137 +136 43.810001
138 +137 43.630001
139 +138 43.989999
140 +139 43.800001
141 +140 42.650002
142 +141 42.75
143 +142 43.190001
144 +143 43.220001
145 +144 42.71
146 +145 42.989999
147 +146 42.650002
148 +147 43.820002
149 +148 43.380002
150 +149 43.999999
151 +150 46.1
152 +151 47.68
153 +152 46.25
154 +153 47.150001
155 +154 46.300002
156 +155 45.830001
157 +156 45.869998
158 +157 45.739999
159 +158 45.77
160 +159 46.059999
161 +160 45.739999
162 +161 45.840001
163 +162 46.570001
164 +163 46.889998
165 +164 46.260001
166 +165 46.22
167 +166 48.799999
168 +167 48.679999
169 +168 49.780001
170 +169 51.309999
171 +170 51.400002
172 +171 50.820002
173 +172 49.610001
174 +173 49.87
175 +174 51.210001
176 +175 52.64
177 +176 53.189999
178 +177 52.110001
179 +178 51.9
180 +179 53.199999
181 +180 53.840001
182 +181 53.440001
183 +182 51.080001
184 +183 52.34
185 +184 53.609999
186 +185 54.440001
187 +186 53.749999
188 +187 52.78
189 +188 51.699998
190 +189 51.299999
191 +190 50.370001
192 +191 51.589999
193 +192 49.249999
194 +193 53.739998
195 +194 54.000001
196 +195 53.440001
197 +196 52.21
198 +197 54.939999
199 +198 56.140003
200 +199 55.659999
201 +200 56.789998
202 +201 56.100002
203 +202 57.029997
204 +203 55.41
205 +204 54.469998
206 +205 57.590003
207 +206 57.500001
208 +207 59.949997
209 +208 61.85
210 +209 61.149998
211 +210 60.229997
212 +211 59.900002
213 +212 60.110001
214 +213 61.179998
215 +214 61.54
216 +215 61.449998
217 +216 62.279997
218 +217 64.949998
219 +218 64.520001
220 +219 64.560002
221 +220 64.959998
222 +221 66.52
223 +222 67.110001
224 +223 69.339998
225 +224 69.659999
226 +225 68.100003
227 +226 67.819997
228 +227 71.600003
229 +228 72.63
230 +229 71.820002
231 +230 74.049999
232 +231 73.949997
233 +232 74.08
234 +233 74.329999
235 +234 74.909999
236 +235 74.98
237 +236 72.009999
238 +237 72.180003
239 +238 71.109999
240 +239 71.379998
241 +240 72.110002
242 +241 73.5
243 +242 74.019999
244 +243 73.350003
245 +244 74.229997
246 +245 73.570002
247 +246 71.45
248 +247 71.890003
249 +248 74.750002
250 +249 74.97
251 +250 74.38
252 +251 76.299997
253 +252 76.049998
254 +253 80.860002
255 +254 83.899998
256 +255 84.29
257 +256 85.590003
258 +257 84.710003
259 +258 82.489999
260 +259 79.04
261 +260 76.089999
262 +261 77.670002
263 +262 76.039998
264 +263 74.200003
265 +264 72.33
266 +265 72.03
267 +266 75.000001
268 +267 75.509999
269 +268 75.419997
270 +269 72.100001
271 +270 71.850002
272 +271 67.299998
273 +272 67.599998
274 +273 68.809999
275 +274 64.949998
276 +275 67.309999
277 +276 64.709999
278 +277 67.639999
279 +278 69.220002
280 +279 70.57
281 +280 70.29
282 +281 69.079999
283 +282 71.319997
284 +283 71.75
285 +284 71.46
286 +285 70.990003
287 +286 68.489999
288 +287 69.099999
289 +288 69.609998
290 +289 67.720001
291 +290 65.479997
292 +291 66.310002
293 +292 65.660001
294 +293 63.930001
295 +294 63.189998
296 +295 65.680001
297 +296 67.319999
298 +297 66.23
299 +298 64.310003
300 +299 64.659998
301 +300 63.990003
302 +301 61.809999
303 +302 61.670003
304 +303 60.160002
305 +304 59.959997
306 +305 59.51
307 +306 58.710002
308 +307 62.329998
309 +308 62.750001
310 +309 62.72
311 +310 62.649999
312 +311 61.169998
313 +312 67.210003
314 +313 71.240002
315 +314 69.790002
316 +315 68.670003
317 +316 67.990001
318 +317 66.709998
319 +318 66.469999
320 +319 64.810001
321 +320 66.22
322 +321 65.650001
323 +322 67.629999
324 +323 67.039999
325 +324 65.750003
326 +325 66.169999
327 +326 68.149998
328 +327 69.359999
329 +328 70.390002
330 +329 69.599998
331 +330 71.619997
332 +331 71.139999
333 +332 71.129999
334 +333 71.890003
335 +334 71.890003
336 +335 71.029997
337 +336 70.6
338 +337 68.149998
339 +338 67.700001
340 +339 67.790003
341 +340 64.979999
342 +341 65.259998
343 +342 63.179997
344 +343 64.510001
345 +344 63.380002
346 +345 63.149997
347 +346 63.340001
348 +347 64.329997
349 +348 63.549999
350 +349 61.219999
351 +350 59.77
352 +351 62.170001
353 +352 61.660003
354 +353 59.999998
355 +354 59.719998
356 +355 58.559999
357 +356 60.760002
358 +357 59.240001
359 +358 57.000003
360 +359 58.33
361 +360 57.609997
362 +361 59.379997
363 +362 57.560002
364 +363 57.200001
365 +364 57.47
366 +365 57.860003
367 +366 59.580002
368 +367 58.829998
369 +368 58.990002
370 +369 57.429999
371 +370 56.02
372 +371 58.970001
373 +372 57.270002
374 +373 57.949998
375 +374 57.000003
376 +375 55.770001
377 +376 55.4
378 +377 55
379 +378 55.649999
380 +379 52.960001
381 +380 52.250001
382 +381 50.670002
383 +382 52.37
384 +383 52.899999
385 +384 54.1
386 +385 60.500003
387 +386 60.720001
388 +387 61.419997
389 +388 61.930002
390 +389 63.87
391 +390 63.400002
392 +391 65.589999
393 +392 67.96
394 +393 67.180002
395 +394 68.159998
396 +395 69.589997
397 +396 68.300001
398 +397 67.210003
399 +398 64.780001
400 +399 63.59
401 +400 64.069998
402 +401 63.650002
403 +402 63.940001
404 +403 66.449999
405 +404 67.98
406 +405 67.589998
407 +406 67.909999
408 +407 66.560001
409 +408 67.619999
410 +409 67.309999
411 +410 67.810003
412 +411 68.749998
413 +412 66.979998
414 +413 66.48
415 +414 66.959997
416 +415 67.849998
417 +416 68.380003
418 +417 71.48
419 +418 70.030001
420 +419 72.799997
421 +420 72.519998
422 +421 72.499997
423 +422 72.63
424 +423 74.200003
425 +424 74.170002
426 +425 74.1
427 +426 73.890002
428 +427 73.77
429 +428 75.26
430 +429 74.65
431 +430 73.000002
432 +431 75.749998
433 +432 77.610001
434 +433 76.41
435 +434 77.01
436 +435 76.98
437 +436 74.859998
438 +437 74.08
439 +438 75.380003
440 +439 74.829997
441 +440 74.220003
442 +441 74.629999
443 +442 73.81
444 +443 73.23
445 +444 75.26
446 +445 75.020001
447 +446 75.399997
448 +447 74.289998
449 +448 74.529997
450 +449 78.989999
451 +450 79.950001
452 +451 81.460002
453 +452 81.049999
454 +453 81.68
455 +454 82.189999
456 +455 80.409998
457 +456 80.419998
458 +457 81.08
459 +458 79.160003
460 +459 78.979999
461 +460 78.290003
462 +461 79.710002
463 +462 80.51
464 +463 82.449998
465 +464 83.339998
466 +465 83.12
467 +466 84.350001
468 +467 85.000003
469 +468 84.050001
470 +469 85.609997
471 +470 85.850002
472 +471 86.470003
473 +472 88.599998
474 +473 90.309998
475 +474 91.630001
476 +475 89.54
477 +476 91.809999
478 +477 91.799998
479 +478 91.660002
480 +479 91.320001
481 +480 91.120003
482 +481 91.27
483 +482 89.83
484 +483 87.040003
485 +484 88.259997
486 +485 88.750002
487 +486 86.140002
488 +487 89.050002
489 +488 88.549997
490 +489 87.719998
491 +490 85.47
492 +491 86.309999
493 +492 84.759997
494 +493 82.900002
495 +494 82.199999
496 +495 81.510003
497 +496 81.520003
498 +497 80.870002
499 +498 84.839999
500 +499 83.800002
501 +500 85.659998
502 +501 85.049997
503 +502 85.47
504 +503 92.570003
505 +504 96.999997
506 +505 95.800003
507 +506 94.620003
508 +507 97.099999
509 +508 94.949997
510 +509 89.070003
511 +510 88.500003
512 +511 86.789997
513 +512 85.699999
514 +513 86.700002
515 +514 86.249998
516 +515 85.379998
517 +516 85.939998
518 +517 85.550002
519 +518 85.73
520 +519 84.739997
521 +520 84.749997
522 +521 83.939999
523 +522 84.149997
524 +523 86.150002
525 +524 86.180003
526 +525 83.270003
527 +526 84.88
528 +527 84.700003
529 +528 85.300003
530 +529 85.210001
531 +530 84.829999
532 +531 85.899997
533 +532 89.199999
534 +533 89.509999
535 +534 89.070003
536 +535 88.510003
537 +536 83.929998
538 +537 84.610001
539 +538 87.060003
540 +539 85.409999
541 +540 86.32
542 +541 88.190002
543 +542 87.719998
544 +543 87.999998
545 +544 87.969997
546 +545 89.870001
547 +546 88.400001
548 +547 89.999997
549 +548 89.570001
550 +549 89.590001
551 +550 91.130003
552 +551 91.479998
553 +552 93.869999
554 +553 93.960001
555 +554 93.519998
556 +555 95.849998
557 +556 95.460002
558 +557 93.239998
559 +558 93.750003
560 +559 92.909997
561 +560 93.650001
562 +561 94.5
563 +562 94.270001
564 +563 94.679997
565 +564 93.650001
566 +565 94.250001
567 +566 92.590003
568 +567 92.190001
569 +568 90.240003
570 +569 91.430003
571 +570 90.349998
572 +571 90.4
573 +572 90.270003
574 +573 90.969999
575 +574 93.509997
576 +575 93.239998
577 +576 95.349999
578 +577 98.839999
579 +578 99.919997
580 +579 99.800001
581 +580 99.47
582 +581 100.390001
583 +582 100.400002
584 +583 100.809998
585 +584 103.920002
586 +585 105.060001
587 +586 106.880003
588 +587 107.34
589 +588 108.739999
590 +589 109.36
591 +590 107.519998
592 +591 107.34
593 +592 109.440001
594 +593 110.020001
595 +594 111.98
596 +595 113.539995
597 +596 112.889994
598 +597 110.689997
599 +598 113.619997
600 +599 114.35
601 +600 118.769995
602 +601 121.189997
603 +602 118.4
604 +603 121.33
605 +604 122.670004
606 +605 123.639999
607 +606 124.069996
608 +607 124.490005
609 +608 120.190001
610 +609 120.380005
611 +610 117.500006
612 +611 118.749994
613 +612 120.499994
614 +613 125.090006
615 +614 123.66
616 +615 121.550005
617 +616 123.900005
618 +617 122.999998
619 +618 122.339996
620 +619 119.650002
621 +620 121.889999
622 +621 120.559996
623 +622 122.040003
624 +623 121.259998
625 +624 127.17
626 +625 132.749994
627 +626 132.299997
628 +627 130.330006
629 +628 132.349998
630 +629 132.389999
631 +630 134.069998
632 +631 137.730001
633 +632 138.099997
634 +633 138.910002
635 +634 138.119997
636 +635 140
637 +636 143.750006
638 +637 143.700005
639 +638 134.890003
640 +639 137.260004
641 +640 146.000004
642 +641 143.849995
643 +642 141.430006
644 +643 131.759998
645 +644 135.000006
646 +645 136.49
647 +646 131.85
648 +647 135.249998
649 +648 135.030006
650 +649 134.009996
651 +650 126.389996
652 +651 125.000004
653 +652 127.790001
654 +653 124.029995
655 +654 119.899994
656 +655 117.049995
657 +656 122.060003
658 +657 122.219994
659 +658 127.569996
660 +659 132.510002
661 +660 131.069996
662 +661 135.299999
663 +662 132.249996
664 +663 126.820005
665 +664 134.079998
666 +665 136.249994
667 +666 138.480005
668 +667 144.160002
669 +668 136.760006
670 +669 135.010006
671 +670 131.769999
672 +671 136.710005
673 +672 135.490004
674 +673 136.849995
675 +674 137.200003
676 +675 138.809999
677 +676 138.410004
678 +677 140.919994
679 +678 140.770004
680 +679 140.309994
681 +680 144.150002
682 +681 148.280003
683 +682 153.179995
684 +683 152.769999
685 +684 154.499998
686 +685 153.470001
687 +686 156.34
688 +687 158.449995
689 +688 157.919996
690 +689 156.239998
691 +690 161.449997
692 +691 167.909998
693 +692 167.859997
694 +693 166.789999
695 +694 162.230001
696 +695 167.249996
697 +696 166.980003
698 +697 169.579996
699 +698 172.750002
700 +699 173.500006
701 +700 170.420002
702 +701 174.359999
703 +702 186.160002
704 +703 185.929996
705 +704 182.780005
706 +705 184.699995
707 +706 185.090004
708 +707 186.999994
709 +708 189.949995
710 +709 187.440004
711 +710 187.870001
712 +711 186.180002
713 +712 191.789997
714 +713 186.300005
715 +714 175.469997
716 +715 165.370007
717 +716 153.759995
718 +717 169.960005
719 +718 166.109997
720 +719 164.299995
721 +720 166.390003
722 +721 163.950001
723 +722 168.850006
724 +723 168.459997
725 +724 171.540001
726 +725 172.539997
727 +726 174.809996
728 +727 180.219999
729 +728 184.289999
730 +729 182.220005
731 +730 178.859995
732 +731 179.810003
733 +732 185.5
734 +733 189.949995
735 +734 194.300001
736 +735 194.209999
737 +736 188.540003
738 +737 190.860003
739 +738 191.829998
740 +739 190.390005
741 +740 184.400002
742 +741 182.979996
743 +742 183.119999
744 +743 187.209999
745 +744 193.910006
746 +745 198.799997
747 +746 198.950001
748 +747 198.570005
749 +748 199.829994
750 +749 198.079994
751 +750 194.84
752 +751 194.930002
753 +752 180.049995
754 +753 177.639994
755 +754 171.249994
756 +755 179.399994
757 +756 178.020002
758 +757 172.690001
759 +758 178.780006
760 +759 169.039997
761 +760 159.639996
762 +761 160.889997
763 +762 161.359995
764 +763 155.639997
765 +764 139.070005
766 +765 135.600006
767 +766 130.009998
768 +767 130.009998
769 +768 131.540007
770 +769 132.179995
771 +770 135.360001
772 +771 133.750004
773 +772 131.649996
774 +773 129.359997
775 +774 122.000002
776 +775 121.239998
777 +776 125.480001
778 +777 129.449999
779 +778 124.860001
780 +779 129.399998
781 +780 127.459993
782 +781 124.629995
783 +782 122.180006
784 +783 123.820004
785 +784 121.540005
786 +785 119.459997
787 +786 119.740004
788 +787 119.150003
789 +788 122.959997
790 +789 129.909996
791 +790 125.020004
792 +791 121.729996
793 +792 124.619995
794 +793 124.490005
795 +794 120.930004
796 +795 122.249994
797 +796 119.690002
798 +797 127.350004
799 +798 126.030001
800 +799 127.940004
801 +800 126.610001
802 +801 126.730003
803 +802 132.819996
804 +803 129.670004
805 +804 133.270006
806 +805 139.530003
807 +806 140.979996
808 +807 145.059996
809 +808 140.250006
810 +809 143.010002
811 +810 143.5
812 +811 149.530005
813 +812 147.489998
814 +813 151.609999
815 +814 153.080006
816 +815 155.890003
817 +816 152.84
818 +817 151.439995
819 +818 154.549999
820 +819 147.140003
821 +820 147.780005
822 +821 148.380005
823 +822 153.700006
824 +823 154.489998
825 +824 161.040001
826 +825 168.160004
827 +826 160.199995
828 +827 162.890003
829 +828 168.939995
830 +829 169.73
831 +830 172.240004
832 +831 175.050001
833 +832 173.950003
834 +833 179.999994
835 +834 180.940002
836 +835 184.729996
837 +836 186.66
838 +837 182.59
839 +838 185.060003
840 +839 183.450006
841 +840 188.159994
842 +841 189.959995
843 +842 186.260004
844 +843 189.730003
845 +844 187.619995
846 +845 183.599997
847 +846 185.899996
848 +847 178.190006
849 +848 177.049994
850 +849 181.169994
851 +850 186.429995
852 +851 187.009995
853 +852 186.690001
854 +853 188.749994
855 +854 186.1
856 +855 185.369997
857 +856 185.190006
858 +857 189.429996
859 +858 185.640003
860 +859 181.610004
861 +860 185.640003
862 +861 180.809999
863 +862 173.26
864 +863 172.370007
865 +864 176.840002
866 +865 181.43
867 +866 178.750006
868 +867 180.900002
869 +868 175.270006
870 +869 173.159998
871 +870 173.25
872 +871 177.390001
873 +872 168.260006
874 +873 170.089994
875 +874 167.440001
876 +875 174.680006
877 +876 168.180004
878 +877 170.119995
879 +878 175.160004
880 +879 179.549997
881 +880 174.249996
882 +881 176.629997
883 +882 172.579998
884 +883 173.880001
885 +884 169.639997
886 +885 172.810003
887 +886 171.809994
888 +887 165.150002
889 +888 166.290001
890 +889 162.019997
891 +890 166.26
892 +891 159.029995
893 +892 162.119999
894 +893 154.399996
895 +894 157.080004
896 +895 159.880001
897 +896 158.950006
898 +897 156.659994
899 +898 153.229996
900 +899 160.640005
901 +900 164.190006
902 +901 163.570005
903 +902 169.549995
904 +903 173.559994
905 +904 176.73
906 +905 179.300005
907 +906 179.320005
908 +907 175.740004
909 +908 175.389996
910 +909 173.530006
911 +910 175.840006
912 +911 174.289997
913 +912 176.790001
914 +913 172.549997
915 +914 173.639996
916 +915 174.670006
917 +916 173.739998
918 +917 169.529995
919 +918 166.189999
920 +919 166.960003
921 +920 161.220005
922 +921 160.179995
923 +922 157.919996
924 +923 151.68
925 +924 151.609999
926 +925 152.649996
927 +926 148.940004
928 +927 140.359995
929 +928 139.879997
930 +929 127.830002
931 +930 134.089998
932 +931 140.909994
933 +932 131.049995
934 +933 126.840006
935 +934 128.709995
936 +935 131.930002
937 +936 128.239998
938 +937 105.259999
939 +938 113.659998
940 +939 109.120001
941 +940 100.100001
942 +941 97.069999
943 +942 98.140003
944 +943 89.159998
945 +944 89.789999
946 +945 88.740002
947 +946 96.799999
948 +947 110.26
949 +948 104.079999
950 +949 97.949999
951 +950 101.890002
952 +951 97.4
953 +952 98.439997
954 +953 91.489998
955 +954 96.870001
956 +955 98.229999
957 +956 96.380003
958 +957 92.089998
959 +958 99.909997
960 +959 104.550003
961 +960 111.039998
962 +961 107.589999
963 +962 106.959998
964 +963 110.989997
965 +964 103.300001
966 +965 99.099998
967 +966 98.239999
968 +967 95.879998
969 +968 94.77
970 +969 90.12
971 +970 96.439998
972 +971 90.240003
973 +972 88.140001
974 +973 89.910002
975 +974 86.289999
976 +975 80.49
977 +976 82.580001
978 +977 92.949998
979 +978 90.800002
980 +979 94.999998
981 +980 92.669998
982 +981 88.929999
983 +982 92.47
984 +983 95.899999
985 +984 91.410003
986 +985 94.000002
987 +986 99.719999
988 +987 100.06
989 +988 98.209998
990 +989 94.999998
991 +990 98.27
992 +991 94.749999
993 +992 95.430001
994 +993 89.159998
995 +994 89.429997
996 +995 89.999997
997 +996 85.74
998 +997 86.380001
999 +998 85.039997
1000 +999 85.810001
1001 +1000 86.61
1002 +1001 86.289999
1003 +1002 85.349998
1004 +1003 90.750001
1005 +1004 94.580002
1006 +1005 93.02
1007 +1006 91.01
1008 +1007 92.699999
1009 +1008 90.579997
1010 +1009 88.66
1011 +1010 87.709998
1012 +1011 85.329997
1013 +1012 83.379999
1014 +1013 82.330002
1015 +1014 78.200001
1016 +1015 82.83
1017 +1016 88.36
1018 +1017 88.36
1019 +1018 89.640002
1020 +1019 90.73
1021 +1020 94.2
1022 +1021 92.999999
1023 +1022 90.13
1024 +1023 91.509998
1025 +1024 92.979999
1026 +1025 93.549998
1027 +1026 96.459998
1028 +1027 99.719999
1029 +1028 102.510003
1030 +1029 97.830003
1031 +1030 96.82
1032 +1031 99.270002
1033 +1032 99.16
1034 +1033 94.530001
1035 +1034 94.369997
1036 +1035 90.639998
1037 +1036 91.199998
1038 +1037 86.950001
1039 +1038 90.250003
1040 +1039 91.159997
1041 +1040 89.189999
1042 +1041 89.310001
1043 +1042 87.939997
1044 +1043 88.37
1045 +1044 91.169997
1046 +1045 88.839997
1047 +1046 85.300003
1048 +1047 83.11
1049 +1048 88.629999
1050 +1049 92.679998
1051 +1050 96.350002
1052 +1051 95.929999
1053 +1052 95.420001
1054 +1053 99.659998
1055 +1054 101.52
1056 +1055 101.620003
1057 +1056 101.590002
1058 +1057 107.660001
1059 +1058 106.500001
1060 +1059 106.490001
1061 +1060 109.869998
1062 +1061 106.850002
1063 +1062 104.490002
1064 +1063 105.120003
1065 +1064 108.689998
1066 +1065 112.710003
1067 +1066 115.989998
1068 +1067 118.450001
1069 +1068 115.000002
1070 +1069 116.320005
1071 +1070 119.57
1072 +1071 120.220001
1073 +1072 118.309998
1074 +1073 117.639996
1075 +1074 121.450003
1076 +1075 123.419994
1077 +1076 120.499994
1078 +1077 121.759996
1079 +1078 121.510004
1080 +1079 125.4
1081 +1080 123.900005
1082 +1081 124.729998
1083 +1082 123.900005
1084 +1083 125.139994
1085 +1084 125.829996
1086 +1085 127.240002
1087 +1086 132.070005
1088 +1087 132.709993
1089 +1088 132.500002
1090 +1089 129.060003
1091 +1090 129.190006
1092 +1091 129.570002
1093 +1092 124.420004
1094 +1093 119.489998
1095 +1094 122.949997
1096 +1095 122.419998
1097 +1096 126.650002
1098 +1097 127.450006
1099 +1098 125.869997
1100 +1099 124.179998
1101 +1100 122.5
1102 +1101 130.780003
1103 +1102 133.050001
1104 +1103 135.069994
1105 +1104 135.809998
1106 +1105 139.349998
1107 +1106 139.490002
1108 +1107 140.949995
1109 +1108 143.740005
1110 +1109 144.67
1111 +1110 143.849995
1112 +1111 142.719995
1113 +1112 140.250006
1114 +1113 139.949999
1115 +1114 136.969997
1116 +1115 136.090004
1117 +1116 136.349997
1118 +1117 135.580006
1119 +1118 135.879999
1120 +1119 139.480001
1121 +1120 137.370007
1122 +1121 134.009996
1123 +1122 136.219994
1124 +1123 139.859997
1125 +1124 142.440002
1126 +1125 141.970005
1127 +1126 142.430002
1128 +1127 142.829998
1129 +1128 140.02
1130 +1129 138.609995
1131 +1130 135.400002
1132 +1131 137.220003
1133 +1132 136.359997
1134 +1133 138.520006
1135 +1134 142.34
1136 +1135 142.269999
1137 +1136 146.879997
1138 +1137 147.519999
1139 +1138 151.750002
1140 +1139 152.910002
1141 +1140 151.509996
1142 +1141 156.739996
1143 +1142 157.819994
1144 +1143 159.990004
1145 +1144 160.100006
1146 +1145 160.000004
1147 +1146 160.030005
1148 +1147 162.790001
1149 +1148 163.390001
1150 +1149 166.430004
1151 +1150 165.549997
1152 +1151 165.110001
1153 +1152 163.91
1154 +1153 165.509996
1155 +1154 164.720005
1156 +1155 162.830002
1157 +1156 165.310005
1158 +1157 168.419996
1159 +1158 166.779999
1160 +1159 159.589994
1161 +1160 164.000002
1162 +1161 164.600002
1163 +1162 166.330002
1164 +1163 169.220001
1165 +1164 169.059998
1166 +1165 169.400005
1167 +1166 167.41
1168 +1167 169.450006
1169 +1168 170.049994
1170 +1169 168.210005
1171 +1170 165.300005
1172 +1171 165.180002
1173 +1172 166.549994
1174 +1173 170.309999
1175 +1174 172.930006
1176 +1175 171.140005
1177 +1176 172.559998
1178 +1177 172.160002
1179 +1178 173.719997
1180 +1179 175.160004
1181 +1180 181.869997
1182 +1181 184.550005
1183 +1182 185.020002
1184 +1183 184.020006
1185 +1184 184.480003
1186 +1185 185.5
1187 +1186 183.820002
1188 +1187 182.369995
1189 +1188 186.150002
1190 +1189 185.379997
1191 +1190 185.349997
1192 +1191 180.860001
1193 +1192 184.9
1194 +1193 186.019999
1195 +1194 190.009996
1196 +1195 190.250002
1197 +1196 189.270006
1198 +1197 190.469994
1199 +1198 190.810001
1200 +1199 190.019997
1201 +1200 191.289999
1202 +1201 190.559996
1203 +1202 188.050005
1204 +1203 189.860006
1205 +1204 198.759996
1206 +1205 204.920004
1207 +1206 205.199997
1208 +1207 203.939995
1209 +1208 202.480001
1210 +1209 197.370005
1211 +1210 192.399998
1212 +1211 196.349995
1213 +1212 188.500002
1214 +1213 189.309994
1215 +1214 188.749994
1216 +1215 190.810001
1217 +1216 194.029995
1218 +1217 194.340002
1219 +1218 201.460005
1220 +1219 202.98
1221 +1220 203.250006
1222 +1221 201.990004
1223 +1222 204.450006
1224 +1223 206.630003
1225 +1224 206.999998
1226 +1225 205.960001
1227 +1226 200.509996
1228 +1227 199.919996
1229 +1228 205.879999
1230 +1229 204.440006
1231 +1230 204.190001
1232 +1231 200.589998
1233 +1232 199.909996
1234 +1233 196.969995
1235 +1234 196.230005
1236 +1235 196.479998
1237 +1236 193.320005
1238 +1237 188.949999
1239 +1238 189.870007
1240 +1239 197.800001
1241 +1240 196.429996
1242 +1241 194.669996
1243 +1242 196.979996
1244 +1243 194.169998
1245 +1244 195.030005
1246 +1245 191.859999
1247 +1246 195.43
1248 +1247 198.229998
1249 +1248 200.360006
1250 +1249 202.100006
1251 +1250 209.040005
1252 +1251 211.609997
1253 +1252 209.100006
1254 +1253 211.639997
1255 +1254 210.730003
1 +days ReturnClose
2 +1 -0.41877255
3 +2 -6.38051161
4 +3 1.39405355
5 +4 6.63000122
6 +5 0.57306016
7 +6 0.64103136
8 +7 -1.08988673
9 +8 0.82999431
10 +9 0.04257877
11 +10 0.38303165
12 +11 1.82306819
13 +12 0.27758223
14 +13 0.53979517
15 +14 1.8447109
16 +15 3.94701281
17 +16 0.8192471
18 +17 2.70863024
19 +18 -2.28557202
20 +19 1.32374118
21 +20 0.12683409
22 +21 2.48290353
23 +22 -2.66996532
24 +23 -0.48260351
25 +24 3.6370662
26 +25 4.21130141
27 +26 4.46650119
28 +27 1.94548013
29 +28 -2.57405969
30 +29 -1.13882474
31 +30 -1.75094463
32 +31 3.44705581
33 +32 0.79338323
34 +33 0.06747105
35 +34 -49.58984325
36 +35 -0.80249444
37 +36 -0.85393708
38 +37 -5.28104965
39 +38 2.44077536
40 +39 -0.14015184
41 +40 -5.19298246
42 +41 -2.9114261
43 +42 1.21982468
44 +43 1.10469495
45 +44 0.12416687
46 +45 1.58729655
47 +46 0.53711182
48 +47 2.59834379
49 +48 1.68047341
50 +49 1.72253263
51 +50 -1.99084673
52 +51 -0.65374739
53 +52 -0.11750412
54 +53 0.07058353
55 +54 -1.83399487
56 +55 2.51497239
57 +56 -2.6401938
58 +57 -1.8718455
59 +58 0.48911224
60 +59 1.94695064
61 +60 1.05036999
62 +61 2.90573582
63 +62 0.41322545
64 +63 -4.1609465
65 +64 1.76525755
66 +65 -3.7974615
67 +66 -9.21052365
68 +67 -5.12614304
69 +68 0.76379064
70 +69 4.12689489
71 +70 -4.25990552
72 +71 4.70289469
73 +72 -4.51855568
74 +73 4.16900833
75 +74 -2.13628454
76 +75 -0.66317489
77 +76 -1.14047016
78 +77 1.46314579
79 +78 1.02606209
80 +79 -0.60389241
81 +80 2.59596237
82 +81 -1.26514404
83 +82 1.52672582
84 +83 -0.72503489
85 +84 -1.4876901
86 +85 -2.22404729
87 +86 -4.15613861
88 +87 1.87518605
89 +88 2.24330738
90 +89 -0.53445009
91 +90 1.3574575
92 +91 4.77121386
93 +92 0
94 +93 5.88547772
95 +94 -0.15090041
96 +95 0.20150629
97 +96 2.41327809
98 +97 -0.44182866
99 +98 -1.97238905
100 +99 1.35815396
101 +100 -0.64516624
102 +101 -4.49549961
103 +102 -0.83682268
104 +103 -3.63924314
105 +104 1.03996171
106 +105 1.97724805
107 +106 -4.88712338
108 +107 0.25133204
109 +108 0.27854874
110 +109 3.13888611
111 +110 2.28925942
112 +111 0.86887307
113 +112 -1.8271966
114 +113 0.66471417
115 +114 1.82250929
116 +115 0.88196625
117 +116 -2.90562877
118 +117 -1.74787871
119 +118 0.56603233
120 +119 -2.51942649
121 +120 1.20978825
122 +121 -0.84216243
123 +122 4.05479441
124 +123 -1.55344914
125 +124 0.64188016
126 +125 1.64762424
127 +126 -0.39215685
128 +127 0.36745406
129 +128 0.28765428
130 +129 6.25815124
131 +130 1.96318768
132 +131 -0.14440915
133 +132 4.09738029
134 +133 1.01875432
135 +134 -0.77928488
136 +135 1.64010168
137 +136 -0.43181365
138 +137 -0.41086509
139 +138 0.82511573
140 +139 -0.43191181
141 +140 -2.62556843
142 +141 0.23446189
143 +142 1.02924211
144 +143 0.06946052
145 +144 -1.18001154
146 +145 0.65558183
147 +146 -0.79087464
148 +147 2.74325896
149 +148 -1.00410767
150 +149 1.42922308
151 +150 4.77272965
152 +151 3.42733189
153 +152 -2.99916107
154 +153 1.94594811
155 +154 -1.802755
156 +155 -1.01512091
157 +156 0.08727253
158 +157 -0.28340747
159 +158 0.06559029
160 +159 0.63360061
161 +160 -0.694746
162 +161 0.2186314
163 +162 1.5924956
164 +163 0.68713119
165 +164 -1.34356372
166 +165 -0.08646995
167 +166 5.58199697
168 +167 -0.24590164
169 +168 2.25965904
170 +169 3.07351942
171 +170 0.17541025
172 +171 -1.12840463
173 +172 -2.38095425
174 +173 0.52408586
175 +174 2.68698817
176 +175 2.79242135
177 +176 1.04483093
178 +177 -2.03045313
179 +178 -0.40299558
180 +179 2.50481503
181 +180 1.2030113
182 +181 -0.74294204
183 +182 -4.41616758
184 +183 2.46671687
185 +184 2.42644058
186 +185 1.54822238
187 +186 -1.26745405
188 +187 -1.80464934
189 +188 -2.04623342
190 +189 -0.77369249
191 +190 -1.81286163
192 +191 2.42207261
193 +192 -4.53576283
194 +193 9.11674942
195 +194 0.48381654
196 +195 -1.03703702
197 +196 -2.30164853
198 +197 5.22888144
199 +198 2.18420827
200 +199 -0.85501242
201 +200 2.0301815
202 +201 -1.21499564
203 +202 1.65774504
204 +203 -2.84060509
205 +204 -1.69644829
206 +205 5.72793302
207 +206 -0.1562806
208 +207 4.26086253
209 +208 3.16931292
210 +209 -1.13177365
211 +210 -1.50449882
212 +211 -0.54789144
213 +212 0.35058263
214 +213 1.78006485
215 +214 0.58843088
216 +215 -0.14624959
217 +216 1.35069004
218 +217 4.28709237
219 +218 -0.66204313
220 +219 0.06199783
221 +220 0.61957247
222 +221 2.40148099
223 +222 0.8869528
224 +223 3.32289818
225 +224 0.46149554
226 +225 -2.23944304
227 +226 -0.41116885
228 +227 5.57358621
229 +228 1.43854324
230 +229 -1.11523888
231 +230 3.10498042
232 +231 -0.13504659
233 +232 0.17579852
234 +233 0.33747165
235 +234 0.78030406
236 +235 0.0934468
237 +236 -3.96105762
238 +237 0.23608388
239 +238 -1.48241058
240 +239 0.37969203
241 +240 1.02270107
242 +241 1.92760777
243 +242 0.70748163
244 +243 -0.90515538
245 +244 1.19971911
246 +245 -0.88912168
247 +246 -2.88161199
248 +247 0.61581945
249 +248 3.97829862
250 +249 0.2943117
251 +250 -0.78698146
252 +251 2.58133504
253 +252 -0.3276527
254 +253 6.32479175
255 +254 3.75957943
256 +255 0.46484149
257 +256 1.54229802
258 +257 -1.02815746
259 +258 -2.62071057
260 +259 -4.18232397
261 +260 -3.73228871
262 +261 2.07649234
263 +262 -2.09862747
264 +263 -2.41977255
265 +264 -2.52021957
266 +265 -0.41476566
267 +266 4.12328335
268 +267 0.67999732
269 +268 -0.11919216
270 +269 -4.40201025
271 +270 -0.34673925
272 +271 -6.33264283
273 +272 0.44576524
274 +273 1.78994236
275 +274 -5.6096513
276 +275 3.63356593
277 +276 -3.86272476
278 +277 4.52789375
279 +278 2.33590039
280 +279 1.95030043
281 +280 -0.39676917
282 +281 -1.72144117
283 +282 3.24261441
284 +283 0.60292066
285 +284 -0.40418118
286 +285 -0.65770641
287 +286 -3.52162825
288 +287 0.89064098
289 +288 0.73805934
290 +289 -2.71512291
291 +290 -3.3077436
292 +291 1.26757031
293 +292 -0.98024579
294 +293 -2.63478522
295 +294 -1.15752071
296 +295 3.94050179
297 +296 2.49695185
298 +297 -1.61913104
299 +298 -2.89898384
300 +299 0.54423104
301 +300 -1.0361816
302 +301 -3.4067884
303 +302 -0.2264941
304 +303 -2.4485178
305 +304 -0.33245511
306 +305 -0.75049537
307 +306 -1.34430852
308 +307 6.16589316
309 +308 0.67383766
310 +309 -0.04781036
311 +310 -0.11160874
312 +311 -2.36233204
313 +312 9.8741298
314 +313 5.99612977
315 +314 -2.03537333
316 +315 -1.60481296
317 +316 -0.99024606
318 +317 -1.88263418
319 +318 -0.35976466
320 +319 -2.49736426
321 +320 2.17558861
322 +321 -0.86076563
323 +322 3.01599081
324 +323 -0.87239392
325 +324 -1.92421841
326 +325 0.63877716
327 +326 2.99229111
328 +327 1.77549675
329 +328 1.48501011
330 +329 -1.12232416
331 +330 2.9022975
332 +331 -0.67020109
333 +332 -0.01405679
334 +333 1.06847183
335 +334 0
336 +335 -1.19628038
337 +336 -0.60537381
338 +337 -3.47025779
339 +338 -0.66030376
340 +339 0.13294239
341 +340 -4.14515987
342 +341 0.43090028
343 +342 -3.18725263
344 +343 2.10510298
345 +344 -1.75166483
346 +345 -0.36289838
347 +346 0.30087729
348 +347 1.56298703
349 +348 -1.212495
350 +349 -3.66640446
351 +350 -2.36850543
352 +351 4.01539401
353 +352 -0.82032812
354 +353 -2.69219092
355 +354 -0.46666668
356 +355 -1.94239625
357 +356 3.75683579
358 +357 -2.50164738
359 +358 -3.78122546
360 +359 2.33332795
361 +360 -1.23436139
362 +361 3.07238343
363 +362 -3.06499679
364 +363 -0.62543604
365 +364 0.47202622
366 +365 0.67862015
367 +366 2.97269082
368 +367 -1.25881835
369 +368 0.27197689
370 +369 -2.64452102
371 +370 -2.45516111
372 +371 5.26597822
373 +372 -2.88282003
374 +373 1.1873511
375 +374 -1.63933569
376 +375 -2.15789813
377 +376 -0.66344091
378 +377 -0.72202166
379 +378 1.18181636
380 +379 -4.83377906
381 +380 -1.34063442
382 +381 -3.02392147
383 +382 3.35503835
384 +383 1.01202788
385 +384 2.26843294
386 +385 11.82995009
387 +386 0.36363304
388 +387 1.15282607
389 +388 0.8303566
390 +389 3.1325657
391 +390 -0.7358666
392 +391 3.45425383
393 +392 3.61335727
394 +393 -1.14773102
395 +394 1.45876149
396 +395 2.09800329
397 +396 -1.85370895
398 +397 -1.59589749
399 +398 -3.61553622
400 +399 -1.83698824
401 +400 0.75483252
402 +401 -0.65552679
403 +402 0.45561507
404 +403 3.92555202
405 +404 2.30248461
406 +405 -0.57370109
407 +406 0.47344431
408 +407 -1.98792228
409 +408 1.59254505
410 +409 -0.45844425
411 +410 0.74283763
412 +411 1.38621879
413 +412 -2.57454553
414 +413 -0.74648853
415 +414 0.72201715
416 +415 1.32915329
417 +416 0.78114225
418 +417 4.53348474
419 +418 -2.02853805
420 +419 3.9554419
421 +420 -0.38461403
422 +421 -0.02757998
423 +422 0.17931449
424 +423 2.16164533
425 +424 -0.04043261
426 +425 -0.09438047
427 +426 -0.28339811
428 +427 -0.16240628
429 +428 2.01979124
430 +429 -0.81052352
431 +430 -2.21031212
432 +431 3.76711771
433 +432 2.45544957
434 +433 -1.54619377
435 +434 0.78523753
436 +435 -0.03895598
437 +436 -2.75396467
438 +437 -1.04194232
439 +438 1.75486366
440 +439 -0.72964444
441 +440 -0.81517309
442 +441 0.55240634
443 +442 -1.09875253
444 +443 -0.78580138
445 +444 2.77208794
446 +445 -0.31889317
447 +446 0.50652625
448 +447 -1.47214727
449 +448 0.32305695
450 +449 5.98417037
451 +450 1.21534626
452 +451 1.88868165
453 +452 -0.50331818
454 +453 0.77729921
455 +454 0.62438663
456 +455 -2.16571483
457 +456 0.01243626
458 +457 0.82069388
459 +458 -2.36802787
460 +459 -0.22739261
461 +460 -0.87363384
462 +461 1.81376797
463 +462 1.00363565
464 +463 2.40963607
465 +464 1.07944211
466 +465 -0.26397649
467 +466 1.47978946
468 +467 0.77060106
469 +468 -1.11764937
470 +469 1.85603329
471 +470 0.28034693
472 +471 0.72219101
473 +472 2.4632762
474 +473 1.93002262
475 +474 1.46163551
476 +475 -2.28091343
477 +476 2.53517869
478 +477 -0.01089315
479 +478 -0.15250109
480 +479 -0.37093715
481 +480 -0.21900788
482 +481 0.16461479
483 +482 -1.57773639
484 +483 -3.1058633
485 +484 1.40164747
486 +485 0.55518357
487 +486 -2.940845
488 +487 3.37822142
489 +488 -0.56148792
490 +489 -0.93732245
491 +490 -2.56497726
492 +491 0.98279981
493 +492 -1.7958545
494 +493 -2.19442551
495 +494 -0.84439443
496 +495 -0.8394112
497 +496 0.01226843
498 +497 -0.79735154
499 +498 4.90910956
500 +499 -1.22583335
501 +500 2.21956558
502 +501 -0.71211886
503 +502 0.49383071
504 +503 8.30701182
505 +504 4.78556104
506 +505 -1.23710725
507 +506 -1.23173274
508 +507 2.62100605
509 +508 -2.21421423
510 +509 -6.19272689
511 +510 -0.63994609
512 +511 -1.9322101
513 +512 -1.2559028
514 +513 1.16686466
515 +514 -0.51903574
516 +515 -1.00869568
517 +516 0.65589132
518 +517 -0.45380034
519 +518 0.21040093
520 +519 -1.15479179
521 +520 0.0118008
522 +521 -0.95574989
523 +522 0.25017632
524 +523 2.37671429
525 +524 0.03482414
526 +525 -3.3766534
527 +526 1.93346576
528 +527 -0.21206056
529 +528 0.7083825
530 +529 -0.10551231
531 +530 -0.44595939
532 +531 1.26134388
533 +532 3.84167883
534 +533 0.34753364
535 +534 -0.49156072
536 +535 -0.62871896
537 +536 -5.17456202
538 +537 0.81020257
539 +538 2.89564114
540 +539 -1.89524919
541 +540 1.06545019
542 +541 2.16636006
543 +542 -0.53294477
544 +543 0.31919745
545 +544 -0.03409205
546 +545 2.15983183
547 +546 -1.63569599
548 +547 1.80995021
549 +548 -0.47777335
550 +549 0.0223289
551 +550 1.71894406
552 +551 0.38406122
553 +552 2.61259407
554 +553 0.09587941
555 +554 -0.46828756
556 +555 2.49144573
557 +556 -0.40688159
558 +557 -2.32558554
559 +558 0.54698092
560 +559 -0.89600637
561 +560 0.79647403
562 +561 0.90763373
563 +562 -0.24338519
564 +563 0.43491672
565 +564 -1.08787076
566 +565 0.64068339
567 +566 -1.76127107
568 +567 -0.43201424
569 +568 -2.11519468
570 +569 1.31870563
571 +570 -1.18123697
572 +571 0.05534256
573 +572 -0.14380199
574 +573 0.77544697
575 +574 2.79212711
576 +575 -0.28873811
577 +576 2.26297838
578 +577 3.6601993
579 +578 1.09267302
580 +579 -0.12009208
581 +580 -0.33066232
582 +581 0.92490299
583 +582 0.00996214752503349
584 +583 0.40836254
585 +584 3.08501544
586 +585 1.09699671
587 +586 1.73234531
588 +587 0.4303864
589 +588 1.30426588
590 +589 0.5701683
591 +590 -1.68251829
592 +591 -0.16740886
593 +592 1.95640116
594 +593 0.52997076
595 +594 1.78149335
596 +595 1.39310145
597 +596 -0.57248637
598 +597 -1.94879716
599 +598 2.64703232
600 +599 0.64249518
601 +600 3.86532138
602 +601 2.03755334
603 +602 -2.30216773
604 +603 2.47466216
605 +604 1.10442924
606 +605 0.79073528
607 +606 0.34778147
608 +607 0.33852584
609 +608 -3.45409577
610 +609 0.15808636
611 +610 -2.39242306
612 +611 1.06381952
613 +612 1.47368428
614 +613 3.80913878
615 +614 -1.14318165
616 +615 -1.7062874
617 +616 1.93336068
618 +617 -0.72639787
619 +618 -0.536587
620 +619 -2.19878542
621 +620 1.8721245
622 +621 -1.09115023
623 +622 1.22761036
624 +623 -0.63913879
625 +624 4.87382657
626 +625 4.3878226
627 +626 -0.33898081
628 +627 -1.48903329
629 +628 1.54990555
630 +629 0.03022365
631 +630 1.26897727
632 +631 2.72991949
633 +632 0.26863864
634 +633 0.58653513
635 +634 -0.56871715
636 +635 1.36113745
637 +636 2.67857571
638 +637 -0.0347833
639 +638 -6.13082929
640 +639 1.75698788
641 +640 6.36747759
642 +641 -1.47260886
643 +642 -1.68230037
644 +643 -6.83731004
645 +644 2.4590225
646 +645 1.10369921
647 +646 -3.39951645
648 +647 2.57868639
649 +648 -0.16265582
650 +649 -0.75539506
651 +650 -5.68614299
652 +651 -1.09976426
653 +652 2.23199753
654 +653 -2.94233193
655 +654 -3.3298405
656 +655 -2.3769801
657 +656 4.28022914
658 +657 0.1310757
659 +658 4.37735417
660 +659 3.87238861
661 +660 -1.08671495
662 +661 3.22728552
663 +662 -2.25425205
664 +663 -4.10585343
665 +664 5.72464336
666 +665 1.6184338
667 +666 1.63670539
668 +667 4.10167302
669 +668 -5.1331825
670 +669 -1.27961387
671 +670 -2.39982731
672 +671 3.7489611
673 +672 -0.89240067
674 +673 1.00375744
675 +674 0.25576033
676 +675 1.17346645
677 +676 -0.28816008
678 +677 1.81344551
679 +678 -0.10643628
680 +679 -0.32678127
681 +680 2.73680291
682 +681 2.86507176
683 +682 3.30455348
684 +683 -0.26765636
685 +684 1.13242064
686 +685 -0.66666473
687 +686 1.87007166
688 +687 1.34961942
689 +688 -0.33448975
690 +689 -1.06382855
691 +690 3.33461282
692 +691 4.00123947
693 +692 -0.02977845
694 +693 -0.63743478
695 +694 -2.73397567
696 +695 3.09436909
697 +696 -0.1614308
698 +697 1.55706848
699 +698 1.8693278
700 +699 0.43415571
701 +700 -1.77521838
702 +701 2.31193343
703 +702 6.76760901
704 +703 -0.12355286
705 +704 -1.69418118
706 +705 1.05043766
707 +706 0.2111581
708 +707 1.03192499
709 +708 1.57754069
710 +709 -1.32139567
711 +710 0.22940514
712 +711 -0.89955767
713 +712 3.0132103
714 +713 -2.86250174
715 +714 -5.81320865
716 +715 -5.75596408
717 +716 -7.02062739
718 +717 10.53590695
719 +718 -2.26524352
720 +719 -1.08964062
721 +720 1.27206821
722 +721 -1.46643546
723 +722 2.9887191
724 +723 -0.23097956
725 +724 1.82832961
726 +725 0.58295208
727 +726 1.3156364
728 +727 3.09479041
729 +728 2.25835092
730 +729 -1.12322644
731 +730 -1.84393036
732 +731 0.53114616
733 +732 3.16444964
734 +733 2.39891914
735 +734 2.29007955
736 +735 -0.04632115
737 +736 -2.91951806
738 +737 1.2305081
739 +738 0.5082233
740 +739 -0.75066101
741 +740 -3.14617514
742 +741 -0.77006832
743 +742 0.07651274
744 +743 2.23350809
745 +744 3.57887241
746 +745 2.52178374
747 +746 0.07545473
748 +747 -0.19100075
749 +748 0.63453138
750 +749 -0.87574441
751 +750 -1.63569977
752 +751 0.04619277
753 +752 -7.63351298
754 +753 -1.33851767
755 +754 -3.59716292
756 +755 4.75912425
757 +756 -0.76922634
758 +757 -2.99404614
759 +758 3.52655334
760 +759 -5.44804154
761 +760 -5.5608147
762 +761 0.78301242
763 +762 0.29212382
764 +763 -3.54486749
765 +764 -10.64635847
766 +765 -2.49514552
767 +766 -4.1224246
768 +767 0
769 +768 1.17683949
770 +769 0.48653487
771 +770 2.40581489
772 +771 -1.18941858
773 +772 -1.57009939
774 +773 -1.73945998
775 +774 -5.68954481
776 +775 -0.62295409
777 +776 3.49719818
778 +777 3.16384919
779 +778 -3.54576905
780 +779 3.63606997
781 +780 -1.49923109
782 +781 -2.22030296
783 +782 -1.96581008
784 +783 1.34228018
785 +784 -1.84138179
786 +785 -1.71137725
787 +786 0.23439395
788 +787 -0.49273508
789 +788 3.1976449
790 +789 5.65224396
791 +790 -3.76413837
792 +791 -2.63158526
793 +792 2.37410589
794 +793 -0.1043091
795 +794 -2.85966813
796 +795 1.09153226
797 +796 -2.09406309
798 +797 6.39986789
799 +798 -1.03651587
800 +799 1.51551455
801 +800 -1.0395521
802 +801 0.09478082
803 +802 4.80548635
804 +803 -2.37162483
805 +804 2.7762797
806 +805 4.69722872
807 +806 1.039198
808 +807 2.8940276
809 +808 -3.31586249
810 +809 1.9679115
811 +810 0.34263198
812 +811 4.20209408
813 +812 -1.36427936
814 +813 2.79341044
815 +814 0.96959766
816 +815 1.83563946
817 +816 -1.95650968
818 +817 -0.91599385
819 +818 2.0536213
820 +819 -4.79456231
821 +820 0.43496125
822 +821 0.40600892
823 +822 3.58538942
824 +823 0.51398306
825 +824 4.23975862
826 +825 4.42126363
827 +826 -4.7335923
828 +827 1.67915611
829 +828 3.71415795
830 +829 0.46762461
831 +830 1.47882166
832 +831 1.63144272
833 +832 -0.62839074
834 +833 3.47800569
835 +834 0.52222668
836 +835 2.09461366
837 +836 1.04477023
838 +837 -2.18043502
839 +838 1.35275919
840 +839 -0.86998648
841 +840 2.56745045
842 +841 0.95663322
843 +842 -1.94777379
844 +843 1.86298665
845 +844 -1.11211088
846 +845 -2.14262771
847 +846 1.25272279
848 +847 -4.14738578
849 +848 -0.63977325
850 +849 2.32702634
851 +850 2.90335109
852 +851 0.31110874
853 +852 -0.17111064
854 +853 1.10342974
855 +854 -1.40397038
856 +855 -0.39226384
857 +856 -0.09709824
858 +857 2.289535
859 +858 -2.00073541
860 +859 -2.17086777
861 +860 2.2190402
862 +861 -2.60181207
863 +862 -4.17565347
864 +863 -0.51367482
865 +864 2.59325568
866 +865 2.59556545
867 +866 -1.47715042
868 +867 1.20279492
869 +868 -3.11221445
870 +869 -1.20386143
871 +870 0.05197621
872 +871 2.38961097
873 +872 -5.14684872
874 +873 1.08759535
875 +874 -1.55799465
876 +875 4.32393989
877 +876 -3.72109101
878 +877 1.15352061
879 +878 2.96262
880 +879 2.50627592
881 +880 -2.95182461
882 +881 1.36585426
883 +882 -2.29292819
884 +883 0.75327559
885 +884 -2.43846559
886 +885 1.86866662
887 +886 -0.57867541
888 +887 -3.87637054
889 +888 0.69028095
890 +889 -2.56780563
891 +890 2.61696277
892 +891 -4.34861362
893 +892 1.94303219
894 +893 -4.76190664
895 +894 1.73575652
896 +895 1.78252924
897 +896 -0.58168313
898 +897 -1.44071212
899 +898 -2.18945368
900 +899 4.83587365
901 +900 2.20991091
902 +901 -0.3776119
903 +902 3.6559209
904 +903 2.36508353
905 +904 1.82646123
906 +905 1.45419849
907 +906 0.01115449
908 +907 -1.99643146
909 +908 -0.19916239
910 +909 -1.06048808
911 +910 1.33118188
912 +911 -0.88148825
913 +912 1.43439328
914 +913 -2.39832795
915 +914 0.63170039
916 +915 0.59318707
917 +916 -0.53243715
918 +917 -2.4231628
919 +918 -1.97015047
920 +919 0.46332752
921 +920 -3.43794795
922 +921 -0.64508744
923 +922 -1.41091214
924 +923 -3.95136535
925 +924 -0.04615045
926 +925 0.68596861
927 +926 -2.43039115
928 +927 -5.7607149
929 +928 -0.34197636
930 +929 -8.61452335
931 +930 4.89712579
932 +931 5.08613327
933 +932 -6.9973738
934 +933 -3.21250604
935 +934 1.47428959
936 +935 2.50175365
937 +936 -2.79694076
938 +937 -17.91952539
939 +938 7.98023853
940 +939 -3.9943666
941 +940 -8.26612896
942 +941 -3.02697499
943 +942 1.10230144
944 +943 -9.15019842
945 +944 0.70659602
946 +945 -1.16939193
947 +946 9.08270996
948 +947 13.90495985
949 +948 -5.6049347
950 +949 -5.88970029
951 +950 4.02246354
952 +951 -4.406715
953 +952 1.06775873
954 +953 -7.06013735
955 +954 5.8804275
956 +955 1.40394135
957 +956 -1.88333098
958 +957 -4.45113599
959 +958 8.49169201
960 +959 4.64418591
961 +960 6.20755123
962 +961 -3.10698763
963 +962 -0.58555721
964 +963 3.76776279
965 +964 -6.92854871
966 +965 -4.06583055
967 +966 -0.8678093
968 +967 -2.40228117
969 +968 -1.15769506
970 +969 -4.90661602
971 +970 7.01286951
972 +971 -6.42886264
973 +972 -2.3271298
974 +973 2.00816993
975 +974 -4.02625172
976 +975 -6.72151937
977 +976 2.59659709
978 +977 12.5575162
979 +978 -2.31306729
980 +979 4.62554615
981 +980 -2.45263163
982 +981 -4.03582506
983 +982 3.98066011
984 +983 3.70931005
985 +984 -4.68195625
986 +985 2.83338684
987 +986 6.08510306
988 +987 0.34095568
989 +988 -1.84889266
990 +989 -3.26850633
991 +990 3.44210744
992 +991 -3.58196906
993 +992 0.71768022
994 +993 -6.570264
995 +994 0.30282526
996 +995 0.63737003
997 +996 -4.73333016
998 +997 0.7464439
999 +998 -1.55128963
1000 +999 0.90546099
1001 +1000 0.9322911
1002 +1001 -0.3694735
1003 +1002 -1.08935104
1004 +1003 6.32689294
1005 +1004 4.22038673
1006 +1005 -1.64939942
1007 +1006 -2.16082563
1008 +1007 1.8569377
1009 +1008 -2.28694932
1010 +1009 -2.11966998
1011 +1010 -1.07151139
1012 +1011 -2.71348883
1013 +1012 -2.28524325
1014 +1013 -1.25929121
1015 +1014 -5.01639852
1016 +1015 5.92071476
1017 +1016 6.676325
1018 +1017 0
1019 +1018 1.44862155
1020 +1019 1.21597275
1021 +1020 3.82453433
1022 +1021 -1.27388641
1023 +1022 -3.08602046
1024 +1023 1.53111949
1025 +1024 1.60638294
1026 +1025 0.61303399
1027 +1026 3.11063609
1028 +1027 3.37964034
1029 +1028 2.79783797
1030 +1029 -4.56540812
1031 +1030 -1.03240618
1032 +1031 2.53047098
1033 +1032 -0.11081092
1034 +1033 -4.66922045
1035 +1034 -0.16926267
1036 +1035 -3.95252635
1037 +1036 0.61782879
1038 +1037 -4.66008453
1039 +1038 3.7952869
1040 +1039 1.00830357
1041 +1040 -2.16103342
1042 +1041 0.13454648
1043 +1042 -1.53398722
1044 +1043 0.48897318
1045 +1044 3.1684927
1046 +1045 -2.55566532
1047 +1046 -3.98468496
1048 +1047 -2.56741257
1049 +1048 6.64179882
1050 +1049 4.56955776
1051 +1050 3.95986629
1052 +1051 -0.43591385
1053 +1052 -0.53163557
1054 +1053 4.4435097
1055 +1054 1.86634762
1056 +1055 0.09850571
1057 +1056 -0.02952273
1058 +1057 5.97499644
1059 +1058 -1.07746609
1060 +1059 -0.00938967127332788
1061 +1060 3.1740041
1062 +1061 -2.74869942
1063 +1062 -2.20870375
1064 +1063 0.60292946
1065 +1064 3.39611387
1066 +1065 3.698597
1067 +1066 2.91011881
1068 +1067 2.12087511
1069 +1068 -2.91262049
1070 +1069 1.14782868
1071 +1070 2.79401209
1072 +1071 0.54361546
1073 +1072 -1.58875643
1074 +1073 -0.56631055
1075 +1074 3.23870038
1076 +1075 1.62205924
1077 +1076 -2.36590515
1078 +1077 1.04564487
1079 +1078 -0.20531538
1080 +1079 3.2013792
1081 +1080 -1.19616826
1082 +1081 0.6698894
1083 +1082 -0.66543174
1084 +1083 1.00079818
1085 +1084 0.55138408
1086 +1085 1.12056429
1087 +1086 3.79597841
1088 +1087 0.4845824
1089 +1088 -0.158233
1090 +1089 -2.59622562
1091 +1090 0.10073067
1092 +1091 0.2941373
1093 +1092 -3.97468389
1094 +1093 -3.96239016
1095 +1094 2.89563901
1096 +1095 -0.43106874
1097 +1096 3.45532108
1098 +1097 0.63166521
1099 +1098 -1.23970885
1100 +1099 -1.34265436
1101 +1100 -1.35287327
1102 +1101 6.75918612
1103 +1102 1.73573784
1104 +1103 1.51822096
1105 +1104 0.54786706
1106 +1105 2.60658276
1107 +1106 0.10046932
1108 +1107 1.04666498
1109 +1108 1.97943249
1110 +1109 0.64699803
1111 +1110 -0.56681067
1112 +1111 -0.78554052
1113 +1112 -1.73065379
1114 +1113 -0.21390873
1115 +1114 -2.12933335
1116 +1115 -0.64247136
1117 +1116 0.19104489
1118 +1117 -0.56471655
1119 +1118 0.2212664
1120 +1119 2.64939802
1121 +1120 -1.51275737
1122 +1121 -2.44595678
1123 +1122 1.64912922
1124 +1123 2.67215032
1125 +1124 1.84470546
1126 +1125 -0.32996138
1127 +1126 0.32400999
1128 +1127 0.2808369
1129 +1128 -1.96737243
1130 +1129 -1.00700257
1131 +1130 -2.31584526
1132 +1131 1.34416615
1133 +1132 -0.62673516
1134 +1133 1.58404888
1135 +1134 2.75772007
1136 +1135 -0.04917873
1137 +1136 3.24031632
1138 +1137 0.43573122
1139 +1138 2.86740986
1140 +1139 0.76441515
1141 +1140 -0.91557516
1142 +1141 3.45191746
1143 +1142 0.68903791
1144 +1143 1.37499055
1145 +1144 0.06875555
1146 +1145 -0.06246221
1147 +1146 0.01875062
1148 +1147 1.72467407
1149 +1148 0.36857301
1150 +1149 1.86058081
1151 +1150 -0.52875502
1152 +1151 -0.26577832
1153 +1152 -0.7267888
1154 +1153 0.97614301
1155 +1154 -0.47730712
1156 +1155 -1.14740344
1157 +1156 1.52306268
1158 +1157 1.88130839
1159 +1158 -0.97375433
1160 +1159 -4.3110715
1161 +1160 2.76333615
1162 +1161 0.36585365
1163 +1162 1.05103279
1164 +1163 1.73750915
1165 +1164 -0.09455324
1166 +1165 0.20111617
1167 +1166 -1.17473727
1168 +1167 1.21856878
1169 +1168 0.35407966
1170 +1169 -1.08202827
1171 +1170 -1.72998033
1172 +1171 -0.07259709
1173 +1172 0.82939338
1174 +1173 2.25758339
1175 +1174 1.53837532
1176 +1175 -1.03510145
1177 +1176 0.82972593
1178 +1177 -0.23180112
1179 +1178 0.90613091
1180 +1179 0.82892415
1181 +1180 3.8307792
1182 +1181 1.47358445
1183 +1182 0.2546719
1184 +1183 -0.54047994
1185 +1184 0.24997119
1186 +1185 0.55290383
1187 +1186 -0.9056593
1188 +1187 -0.78881894
1189 +1188 2.07271322
1190 +1189 -0.41364759
1191 +1190 -0.01618298
1192 +1191 -2.42244191
1193 +1192 2.23377141
1194 +1193 0.60573229
1195 +1194 2.14492905
1196 +1195 0.1263123
1197 +1196 -0.51510959
1198 +1197 0.63400854
1199 +1198 0.17850948
1200 +1199 -0.41402652
1201 +1200 0.66835176
1202 +1201 -0.3816211
1203 +1202 -1.31716575
1204 +1203 0.96251048
1205 +1204 4.68765918
1206 +1205 3.09921922
1207 +1206 0.13663527
1208 +1207 -0.61403607
1209 +1208 -0.71589391
1210 +1209 -2.52370406
1211 +1210 -2.51811667
1212 +1211 2.05301302
1213 +1212 -3.99795936
1214 +1213 0.42970397
1215 +1214 -0.29581111
1216 +1215 1.09139447
1217 +1216 1.68753943
1218 +1217 0.15977272
1219 +1218 3.66368371
1220 +1219 0.75448971
1221 +1220 0.13302099
1222 +1221 -0.61992716
1223 +1222 1.21788304
1224 +1223 1.06627387
1225 +1224 0.17906161
1226 +1225 -0.50241401
1227 +1226 -2.6461473
1228 +1227 -0.29424967
1229 +1228 2.98119404
1230 +1229 -0.69943317
1231 +1230 -0.12228771
1232 +1231 -1.76306527
1233 +1232 -0.33900095
1234 +1233 -1.47066233
1235 +1234 -0.37568666
1236 +1235 0.12739795
1237 +1236 -1.60830264
1238 +1237 -2.26050377
1239 +1238 0.48690553
1240 +1239 4.17653853
1241 +1240 -0.69262133
1242 +1241 -0.8959935
1243 +1242 1.18662354
1244 +1243 -1.42653978
1245 +1244 0.44291446
1246 +1245 -1.625394
1247 +1246 1.86073231
1248 +1247 1.43273704
1249 +1248 1.07451345
1250 +1249 0.86843679
1251 +1250 3.433943
1252 +1251 1.22942592
1253 +1252 -1.18614009
1254 +1253 1.21472546
1255 +1254 -0.4299726
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Mon Mar 20 23:41:51 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +
10 +import numpy as np
11 +import theano
12 +from theano import tensor as T
13 +
14 +
15 +class EmbeddingLayer(object):
16 + def __init__(self, num_vocab, word_dim, rng, embedding_w=None):
17 + '''
18 + word_dim :: dimension of the word embeddings
19 + num_vocab :: number of word embeddings in the vocabulary
20 + embedding_w :: pre-train word vector
21 + '''
22 +
23 + if embedding_w is None:
24 + word_vectors = rng.uniform(-1.0, 1.0, (num_vocab, word_dim))
25 + self.embedding_w = theano.shared(word_vectors,
26 + name="EmbeddingLayer_W") \
27 + .astype(theano.config.floatX)
28 + else:
29 + self.embedding_w = theano.shared(embedding_w,
30 + name="EmbeddingLayer_W") \
31 + .astype(theano.config.floatX)
32 +
33 + self.params = [self.embedding_w]
34 + self.infor = [num_vocab, word_dim]
35 +
36 + def words_ind_2vec(self, index):
37 + map_word_vectors = self.embedding_w[index]
38 + output = T.mean(map_word_vectors, axis=0)
39 + return output, map_word_vectors
40 +
41 +
42 +if __name__ == "__main__":
43 + rng = np.random.RandomState(220495)
44 + arrWords = T.ivector("words")
45 + EMBD = EmbeddingLayer(100, 150, rng=rng)
46 + Word2Vec = theano.function(
47 + inputs=[arrWords],
48 + outputs=EMBD.words_ind_2vec(arrWords)
49 + )
50 + Vec = Word2Vec([1, 2, 3, 4])
51 + Vec = Word2Vec([2, 3, 4])
52 + print("Dim: ", Vec.shape)
53 + print("Val: ", Vec)
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Sat Mar 25 17:36:33 2017
5 +
6 +@author: red-sky
7 +"""
8 +import sys
9 +import json
10 +import theano
11 +import pickle
12 +import os.path
13 +import numpy as np
14 +import theano.tensor as T
15 +from SmallUtils import createShareVar, ADAM_OPTIMIZER
16 +from EmbeddingLayer import EmbeddingLayer
17 +from RoleDependentLayer import RoleDependentLayer
18 +
19 +
20 +class Input(object):
21 + def __init__(self, object1, object1_fake, action, object2, rng,
22 + vovab_length=4000, wordDim=100, trainedWordsVectors=None,):
23 + # Init Embeding layer, input vector of index and ouput average
24 + # of word vector as ref Ding et al 2014
25 + self.EMBD = EmbeddingLayer(vovab_length, wordDim, rng=rng,
26 + embedding_w=trainedWordsVectors)
27 +
28 + object1_vector, _ = self.EMBD.words_ind_2vec(object1)
29 + action_vector, _ = self.EMBD.words_ind_2vec(action)
30 + object2_vector, _ = self.EMBD.words_ind_2vec(object2)
31 + object1_vector_fake, _ = self.EMBD.words_ind_2vec(object1_fake)
32 +
33 + self.output = [object1_vector, object1_vector_fake,
34 + action_vector, object2_vector]
35 + self.params = self.EMBD.params
36 +
37 + def get_params(self):
38 + trainParams = {
39 + "WordWvec": self.EMBD.embedding_w.get_value()
40 + }
41 + return(trainParams)
42 +
43 +
44 +class ModelBody(object):
45 + def __init__(self, vectorObjects, rng, n_out, n_in,
46 + trainedModelParams=None):
47 + if trainedModelParams is None:
48 + trainedModelParams = {
49 + "roleDependentLayer1_": {
50 + "T": None, "W1": None, "W2": None, "b": None
51 + },
52 + "roleDependentLayer2_": {
53 + "T": None, "W1": None, "W2": None, "b": None
54 + },
55 + "roleDependentLayer3_": {
56 + "T": None, "W1": None, "W2": None, "b": None
57 + }
58 + }
59 +
60 + Obj1, Ob1_fake, Act, Obj2 = vectorObjects
61 +
62 + self.RoleDepen1 = RoleDependentLayer(
63 + left_dependent=T.stack([Obj1, Ob1_fake], axis=0),
64 + right_dependent=Act,
65 + n_in=n_in, n_out=n_out, rng=rng,
66 + trainedParams=trainedModelParams,
67 + name="roleDependentLayer1_"
68 + )
69 + self.RoleDepen1_output = self.RoleDepen1.output
70 +
71 + self.RoleDepen2 = RoleDependentLayer(
72 + left_dependent=Obj2,
73 + right_dependent=Act,
74 + n_in=n_in, n_out=n_out, rng=rng,
75 + trainedParams=trainedModelParams,
76 + name="roleDependentLayer2_"
77 + )
78 + self.RoleDepen2_output = T.flatten(self.RoleDepen2.output, outdim=1)
79 +
80 + self.RoleDepen3 = RoleDependentLayer(
81 + left_dependent=self.RoleDepen1_output,
82 + right_dependent=self.RoleDepen2_output,
83 + n_in=n_out, n_out=n_out, rng=rng,
84 + trainedParams=trainedModelParams,
85 + name="roleDependentLayer3_"
86 + )
87 +
88 + self.params = self.RoleDepen1.params + self.RoleDepen2.params + \
89 + self.RoleDepen3.params
90 +
91 + self.L2 = (
92 + self.RoleDepen1.L2 +
93 + self.RoleDepen2.L2 +
94 + self.RoleDepen3.L2
95 + )
96 + self.output = self.RoleDepen3.output
97 +
98 + def get_params(self):
99 + trainedModelParams = {
100 + "roleDependentLayer1_": self.RoleDepen1.get_params(),
101 + "roleDependentLayer2_": self.RoleDepen2.get_params(),
102 + "roleDependentLayer3_": self.RoleDepen3.get_params()
103 + }
104 + return(trainedModelParams)
105 +
106 +
107 +class LogisticRegression(object):
108 +
109 + def __init__(self, rng, layerInput, n_in, n_out,
110 + paramsLayer=None,
111 + name="LogisticRegression_"):
112 +
113 + self.layerInput = layerInput
114 + if paramsLayer is None:
115 + self.W = createShareVar(rng=rng, name=name+"W",
116 + factor_for_init=n_out + n_in,
117 + dim=(n_in, n_out))
118 + else:
119 + self.W = theano.shared(value=paramsLayer["W"],
120 + name=name+"W", borrow=True)
121 +
122 + if paramsLayer is None:
123 + b_values = np.zeros((n_out,), dtype=theano.config.floatX)
124 + self.b = theano.shared(value=b_values,
125 + name=name+"b", borrow=True)
126 + else:
127 + self.b = theano.shared(value=paramsLayer["b"],
128 + name=name+"b", borrow=True)
129 +
130 + step1 = T.dot(self.layerInput, self.W)
131 + self.prob_givenX = T.tanh(step1 + self.b)
132 + self.y_predict = T.argmax(self.prob_givenX, axis=1)
133 +
134 + self.params = [self.W, self.b]
135 + self.L2 = sum([(param**2).sum() for param in self.params])
136 +
137 + def get_params(self):
138 + trainedParams = {
139 + "W": self.W.get_value(), "b": self.b.get_value()
140 + }
141 + return(trainedParams)
142 +
143 + def neg_log_likelihood(self, y_true):
144 + y_true = T.cast(y_true, "int32")
145 + log_prob = T.log(self.prob_givenX)
146 + nll = -T.mean(log_prob[T.arange(y_true.shape[0]), y_true])
147 + return nll
148 +
149 + def margin_loss(self):
150 + loss = T.max([0, 1 - self.prob_givenX[0, 0] + self.prob_givenX[1, 0]])
151 + return loss
152 +
153 + def cal_errors(self, y_true):
154 + if y_true.ndim != self.y_predict.ndim:
155 + raise TypeError(
156 + "y should have the same shape as self.y_pred",
157 + ("y_true", y_true.ndim, "y_pred", self.y_predict.ndim)
158 + )
159 + if y_true.dtype.startswith("int"):
160 + return T.mean(T.neq(self.y_predict, y_true))
161 + else:
162 + raise TypeError(
163 + "y_true should have type int ...",
164 + ("y_true", y_true.type, "y_pred", self.y_predict.type)
165 + )
166 +
167 +
168 +def main(dataPath, trainedParamsPath="modelTrained.pickle",
169 + outputVectorPath="resultEmbeding.pickle",
170 + learning_rate=0.005, L2_reg=0.0001,
171 + n_epochs=500, num_K=150, word_dim=150):
172 + # CONSTANT VARIABLES
173 + RNG = np.random.RandomState(220495 + 280295 + 1)
174 + LABEL_NUM = 2
175 + if os.path.isfile(trainedParamsPath):
176 + with open(trainedParamsPath, 'rb') as handle:
177 + trainedParams = pickle.load(handle)
178 + else:
179 + print("No Trained Model, create new")
180 + trainedParams = {
181 + "Input": {"WordWvec": None}, "Body": None, "Output": None
182 + }
183 +
184 + OPTIMIZER = ADAM_OPTIMIZER
185 + # INPUT DATA
186 + data_indexed_events = np.load(dataPath,allow_pickle=True)
187 + N_sample = len(data_indexed_events)
188 +# N_sample = 1
189 + all_index = list(set(sum(np.concatenate(data_indexed_events).ravel(), [])))
190 +# all_train_index = list(set(np.hstack(data_indexed_events[0:NNN].flat)))
191 + # Snip tensor at begin
192 + object1 = T.ivector("object1")
193 + object1_fake = T.ivector("object1_fake")
194 + action = T.ivector("action")
195 + object2 = T.ivector("object2")
196 +
197 + constainY = theano.shared(
198 + np.asarray([1, 0], dtype=theano.config.floatX),
199 + borrow=True
200 + )
201 +
202 + # WORDS EMBEDING VECTOR
203 + wordsEmbedLayer = Input(
204 + object1=object1, object1_fake=object1_fake,
205 + action=action, object2=object2, rng=RNG,
206 + wordDim=word_dim, vovab_length=len(all_index),
207 + trainedWordsVectors=trainedParams["Input"]["WordWvec"]
208 + )
209 +
210 + Obj1, Ob1_fake, Act, Obj2 = wordsEmbedLayer.output
211 +
212 + # EVENTS EMBEDING LAYER - THREE ROLE DEPENTDENT LAYER
213 + eventsEmbedingLayer = ModelBody(
214 + vectorObjects=wordsEmbedLayer.output,
215 + n_out=num_K, n_in=word_dim, rng=RNG,
216 + trainedModelParams=trainedParams["Body"]
217 + )
218 +
219 + # CLASSIFY LAYER
220 + predict_layers = LogisticRegression(
221 + layerInput=eventsEmbedingLayer.output,
222 + rng=RNG, n_in=num_K, n_out=1,
223 + paramsLayer=trainedParams["Output"]
224 + )
225 +
226 + # COST FUNCTION
227 + COST = (
228 + predict_layers.margin_loss() +
229 + L2_reg * predict_layers.L2 +
230 + L2_reg * eventsEmbedingLayer.L2
231 + )
232 +
233 + # GRADIENT CALCULATION and UPDATE
234 + all_params = wordsEmbedLayer.params + \
235 + eventsEmbedingLayer.params + predict_layers.params
236 + print("TRAIN: ", all_params)
237 +
238 + UPDATE = OPTIMIZER(COST, all_params, learning_rate=learning_rate)
239 +
240 + # TRAIN MODEL
241 + GET_COST = theano.function(
242 + inputs=[object1, object1_fake, action, object2],
243 + outputs=[predict_layers.margin_loss(),
244 + predict_layers.prob_givenX],
245 + )
246 +
247 +# TEST = theano.function(
248 +# inputs=[object1, object1_fake, action, object2],
249 +# outputs=eventsEmbedingLayer.RoleDepen2.test,
250 +# on_unused_input='warn'
251 +# )
252 +
253 + TRAIN = theano.function(
254 + inputs=[object1, object1_fake, action, object2],
255 + outputs=[predict_layers.margin_loss()],
256 + updates=UPDATE
257 + )
258 +
259 + GET_EVENT_VECTOR = theano.function(
260 + inputs=[object1, object1_fake, action, object2],
261 + outputs=[predict_layers.margin_loss(),
262 + eventsEmbedingLayer.output],
263 + )
264 +
265 + def generate_fake_object(all_index, RNG, obj):
266 + fake_obj = list(RNG.choice(all_index, len(obj)))
267 + while sorted(fake_obj) == sorted(obj):
268 + print("WRONG faking object 1", obj)
269 + fake_obj = list(RNG.choice(all_index, len(obj)))
270 + return(fake_obj)
271 +
272 + def generate_list_object(data_indexed_events, all_index, RNG):
273 + list_fake_object1 = [
274 + generate_fake_object(all_index, RNG, events[0])
275 + for events in data_indexed_events
276 + ]
277 + list_real_object = set([
278 + "_".join([str(a) for a in sorted(events[0])])
279 + for events in data_indexed_events
280 + ])
281 + wrong = 0
282 + while True:
283 + valid = True
284 + wrong += 1
285 + for i, obj in enumerate(list_fake_object1):
286 + s = "_".join([str(a) for a in sorted(obj)])
287 + if s in list_real_object:
288 + valid = valid and False
289 + list_fake_object1[i] = \
290 + generate_fake_object(all_index, RNG, s)
291 + else:
292 + valid = valid and True
293 + if valid:
294 + break
295 + print("There are %d wrong random loops" % wrong)
296 + return(list_fake_object1)
297 +
298 + print("*"*72)
299 + print("Begin Training process")
300 +
301 + for epoch in range(n_epochs):
302 + # create false label
303 + print("Begin new epoch: %d" % epoch)
304 +
305 + list_fake_object1 = generate_list_object(data_indexed_events,
306 + all_index, RNG)
307 + cost_of_epoch = []
308 + set_index = set(range(N_sample))
309 + temp_variable = N_sample
310 + print("*" * 72+"\n")
311 + print("*" * 72+"\n")
312 + # train
313 + model_train = {
314 + "Input": wordsEmbedLayer.get_params(),
315 + "Body": eventsEmbedingLayer.get_params(),
316 + "Output": predict_layers.get_params()
317 + }
318 + RESULT = {}
319 + outCOST = []
320 + Max_inter = len(set_index)*2
321 + iter_num = 0
322 + while len(set_index) > 0 and iter_num <= Max_inter:
323 + iter_num += 1
324 + index = set_index.pop()
325 + ob1_real, act, obj2 = data_indexed_events[index]
326 + ob1_fake = list_fake_object1[index]
327 + cost, probY = GET_COST(ob1_real, ob1_fake, act, obj2)
328 + outCOST.append(cost)
329 +# test = TEST(ob1_real, ob1_fake, act, obj2)
330 +# for a in test:
331 +# print(a, a.shape)
332 +
333 + if cost > 0:
334 + set_index.add(index)
335 + c = TRAIN(ob1_real, ob1_fake, act, obj2)
336 + else:
337 + RESULT[index] = GET_EVENT_VECTOR(ob1_real, ob1_fake, act, obj2)
338 +
339 + if (len(set_index) % 50 == 0 and
340 + temp_variable != len(set_index)):
341 + temp_variable = len(set_index)
342 + print("There are %f %% left in this %d "
343 + "epoch with average cost %f"
344 + % (len(set_index)/float(N_sample)*100,
345 + epoch, np.mean(outCOST[-50:])))
346 + if iter_num > Max_inter - 5:
347 + print(set_index, ob1_real, ob1_fake, act, obj2)
348 +
349 + with open(trainedParamsPath, 'wb') as handle:
350 + pickle.dump(model_train, handle,
351 + protocol=pickle.HIGHEST_PROTOCOL)
352 +
353 + with open(outputVectorPath, 'wb') as handle:
354 + pickle.dump(RESULT, handle,
355 + protocol=pickle.HIGHEST_PROTOCOL)
356 +
357 +if __name__ == "__main__":
358 +# arg = ["", "Data/Query_Apple/2005-2010/IndexedEvents.npy",
359 +# "Data/Query_Apple/2005-2010/linhtinh/", "20"]
360 + arg = sys.argv
361 + main(dataPath="../../Thesis_data/IndexedEvents.npy", trainedParamsPath="TrainedParams.pickle",
362 + outputVectorPath="resultEmbeding.pickle", n_epochs=20)
363 +
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Sat Mar 25 16:13:18 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +import theano
10 +import numpy as np
11 +import theano.tensor as T
12 +from SmallUtils import createShareVar
13 +
14 +
15 +class RoleDependentLayer(object):
16 + def __init__(self, left_dependent, right_dependent, rng,
17 + n_in=100, n_out=4, trainedParams=None,
18 + name="RoleDependentEmbedding_"):
19 + if trainedParams is None:
20 + trainedParams = {
21 + name: {
22 + "T": None, "W1": None, "W2": None, "b": None
23 + }
24 + }
25 +
26 + if trainedParams[name]["T"] is not None:
27 + assert trainedParams[name]["T"].shape == (n_out, n_in, n_in)
28 + self.T = theano.shared(value=trainedParams[name]["T"],
29 + name=name+"T", borrow=True)
30 + else:
31 + self.T = createShareVar(rng=rng, name=name+"T",
32 + factor_for_init=n_out + n_in,
33 + dim=(n_out, n_in, n_in))
34 +
35 + if trainedParams[name]["W1"] is not None:
36 + assert trainedParams[name]["W1"].shape == (n_in, n_out)
37 + self.W1 = theano.shared(value=trainedParams[name]["W1"],
38 + name=name+"W1", borrow=True)
39 + else:
40 + self.W1 = createShareVar(rng=rng, name=name+"W1",
41 + factor_for_init=n_out + n_in,
42 + dim=(n_in, n_out))
43 +
44 + if trainedParams[name]["W2"] is not None:
45 + assert trainedParams[name]["W2"].shape == (n_in, n_out)
46 + self.W2 = theano.shared(value=trainedParams[name]["W2"],
47 + name=name+"W2", borrow=True)
48 + else:
49 + self.W2 = createShareVar(rng=rng, name=name+"W2",
50 + factor_for_init=n_out + n_in,
51 + dim=(n_in, n_out))
52 +
53 + if trainedParams[name]["b"] is not None:
54 + assert trainedParams[name]["b"].shape == (n_out,)
55 + self.b = theano.shared(value=trainedParams[name]["b"],
56 + name=name+"b", borrow=True)
57 + else:
58 + b_values = np.zeros(shape=(n_out,), dtype=theano.config.floatX)
59 + self.b = theano.shared(value=b_values, name=name+"b", borrow=True)
60 +
61 + # list of layer params
62 + self.params = [self.T, self.W1, self.W2, self.b]
63 +
64 + # L2 regulation
65 + self.L2 = sum([(param**2).sum() for param in self.params])
66 +
67 + # Bi-linear step
68 + def one_kernel(Tk, left, right):
69 + first_bi_libear = theano.dot(left, Tk)
70 + seccon_bi_linear = theano.dot(first_bi_libear, right)
71 + return(seccon_bi_linear.flatten())
72 +
73 + bi_1, _ = theano.scan(
74 + fn=one_kernel,
75 + sequences=[self.T],
76 + non_sequences=[left_dependent, right_dependent],
77 + n_steps=n_out
78 + )
79 +
80 + # Feed forward network step
81 + feedforward_step1 = theano.dot(left_dependent, self.W1)
82 + feedforward_step2 = theano.dot(right_dependent, self.W2)
83 + feedforward_step3 = (feedforward_step1 +
84 + feedforward_step2.dimshuffle("x", 0) +
85 + self.b.dimshuffle("x", 0))
86 + feedforward_step4 = bi_1.dimshuffle(1, 0) + feedforward_step3
87 + self.output = theano.tensor.tanh(feedforward_step4)
88 + self.test = [feedforward_step3]
89 +
90 + def output_(self, left_dependent, right_dependent):
91 +
92 + def one_kernel(Tk, left, right):
93 + first_bi_libear = theano.dot(left, Tk)
94 + seccon_bi_linear = theano.dot(first_bi_libear, right)
95 + return(seccon_bi_linear.flatten())
96 +
97 + bi_linear_tensor, _ = theano.scan(
98 + fn=one_kernel,
99 + sequences=[self.T],
100 + non_sequences=[left_dependent, right_dependent],
101 + n_steps=n_out
102 + )
103 +
104 + bi_linear_tensor = bi_linear_tensor.dimshuffle(1, 0)
105 + feedforward_step1 = theano.dot(left_dependent, self.W1)
106 + feedforward_step2 = theano.dot(right_dependent, self.W2)
107 + feedforward_step3 = (feedforward_step1 +
108 + feedforward_step2.dimshuffle("x", 0) +
109 + self.b.dimshuffle("x", 0))
110 + feedforward_step4 = bi_linear_tensor + feedforward_step3
111 + output = theano.tensor.tanh(feedforward_step4)
112 + return(output)
113 +
114 + def get_params(self):
115 + trainedParams = {
116 + "T": self.T.get_value(), "W1": self.W1.get_value(),
117 + "W2": self.W2.get_value(), "b": self.b.get_value()
118 + }
119 + return(trainedParams)
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Sat Mar 25 15:55:14 2017
5 +
6 +@author: red-sky
7 +"""
8 +import theano
9 +import theano.tensor as T
10 +import numpy as np
11 +
12 +def createShareVar(rng, dim, name, factor_for_init):
13 + var_values = np.asarray(
14 + rng.uniform(
15 + low=-np.sqrt(6.0 / factor_for_init),
16 + high=np.sqrt(6.0 / factor_for_init),
17 + size=dim,
18 + )
19 + )
20 + Var = theano.shared(value=var_values, name=name, borrow=True)
21 + return Var
22 +
23 +
24 +def adadelta(lr, tparams, cost, grads, listInput):
25 + """
26 + An adaptive learning rate optimizer
27 +
28 + Parameters
29 + ----------
30 + lr : Theano SharedVariable
31 + Initial learning rate
32 + tpramas: Theano SharedVariable
33 + Model parameters
34 + grads: Theano variable
35 + Gradients of cost w.r.t to parameres
36 +
37 + cost: Theano variable
38 + Objective fucntion to minimize
39 +
40 + Notes
41 + -----
42 + For more information, see [ADADELTA]_.
43 +
44 + .. [ADADELTA] Matthew D. Zeiler, *ADADELTA: An Adaptive Learning
45 + Rate Method*, arXiv:1212.5701.
46 + """
47 + np_float = np.asarray(0., dtype=theano.config.floatX)
48 + zipped_grads = [theano.shared(p.get_value() * np_float,
49 + name='%s_grad' % k)
50 + for k, p in enumerate(tparams)]
51 + running_up2 = [theano.shared(p.get_value() * np_float,
52 + name='%s_rup2' % k)
53 + for k, p in enumerate(tparams)]
54 + running_grads2 = [theano.shared(p.get_value() * np_float,
55 + name='%s_rgrad2' % k)
56 + for k, p in enumerate(tparams)]
57 +
58 + zgup = [(zg, g) for zg, g in zip(zipped_grads, grads)]
59 + rg2up = [(rg2, 0.95 * rg2 + 0.05 * (g ** 2))
60 + for rg2, g in zip(running_grads2, grads)]
61 +
62 + f_grad_shared = theano.function(inputs=listInput,
63 + outputs=cost,
64 + updates=zgup + rg2up,
65 + name='adadelta_f_grad_shared')
66 +
67 + updir = [-T.sqrt(ru2 + 1e-6) / T.sqrt(rg2 + 1e-6) * zg
68 + for zg, ru2, rg2 in zip(zipped_grads,
69 + running_up2,
70 + running_grads2)]
71 + ru2up = [(ru2, 0.95 * ru2 + 0.05 * (ud ** 2))
72 + for ru2, ud in zip(running_up2, updir)]
73 + param_up = [(p, p + ud) for p, ud in zip(tparams, updir)]
74 +
75 + f_update = theano.function([lr], [], updates=ru2up + param_up,
76 + on_unused_input='ignore',
77 + name='adadelta_f_update')
78 +
79 + return f_grad_shared, f_update
80 +
81 +
82 +def ADAM_OPTIMIZER(loss, all_params, learning_rate=0.001,
83 + b1=0.9, b2=0.999, e=1e-8, gamma=1-1e-8):
84 + """
85 + CITE: http://sebastianruder.com/optimizing-gradient-descent/index.html#adam
86 + ADAM update rules
87 + Default values are taken from [Kingma2014]
88 + References:
89 + [Kingma2014] Kingma, Diederik, and Jimmy Ba.
90 + "Adam: A Method for Stochastic Optimization."
91 + arXiv preprint arXiv:1412.6980 (2014).
92 + http://arxiv.org/pdf/1412.6980v4.pdf
93 + """
94 + updates = []
95 + all_grads = theano.grad(loss, all_params)
96 + alpha = learning_rate
97 + t = theano.shared(np.float32(1))
98 + # (Decay the first moment running average coefficient)
99 + b1_t = b1*gamma**(t-1)
100 +
101 + for params_previous, g in zip(all_params, all_grads):
102 + init_moment = np.zeros(params_previous.get_value().shape,
103 + dtype=theano.config.floatX)
104 + # (the mean)
105 + first_moment = theano.shared(init_moment)
106 + # (the uncentered variance)
107 + second_moment = theano.shared(init_moment)
108 +
109 + # (Update biased first moment estimate)
110 + bias_m = b1_t*first_moment + (1 - b1_t)*g
111 +
112 + # (Update biased second raw moment estimate)
113 + bias_v = b2*second_moment + (1 - b2)*g**2
114 +
115 + # (Compute bias-corrected first moment estimate)
116 + unbias_m = bias_m / (1-b1**t)
117 +
118 + # (Compute bias-corrected second raw moment estimate)
119 + unbias_v = bias_v / (1-b2**t)
120 +
121 + # (Update parameters)
122 + update_term = (alpha * unbias_m) / (T.sqrt(unbias_v) + e)
123 + params_new = params_previous - update_term
124 +
125 + updates.append((first_moment, bias_m))
126 + updates.append((second_moment, bias_v))
127 + updates.append((params_previous, params_new))
128 + updates.append((t, t + 1.))
129 + return updates
...\ No newline at end of file ...\ No newline at end of file
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Thu Apr 13 17:01:36 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +import sys
10 +import numpy as np
11 +np.random.seed(280295)
12 +import keras.backend as K
13 +from keras.models import Sequential
14 +from keras.layers import Dense, Activation, Dropout, Flatten
15 +from keras.layers import Conv1D, GlobalAveragePooling1D, MaxPooling1D
16 +from keras.callbacks import ModelCheckpoint, EarlyStopping
17 +from keras import regularizers, optimizers
18 +
19 +
20 +def recall(y_true, y_pred):
21 + """Recall metric.
22 +
23 + Only computes a batch-wise average of recall.
24 +
25 + Computes the recall, a metric for multi-label classification of
26 + how many relevant items are selected.
27 + """
28 + true_positives = K.sum(K.round(K.clip(y_true[:, 1] * y_pred[:, 1], 0, 1)))
29 + possible_positives = K.sum(K.round(K.clip(y_true[:, 1], 0, 1)))
30 + recall = true_positives / (possible_positives + K.epsilon())
31 + return recall
32 +
33 +
34 +def precision(y_true, y_pred):
35 + """Precision metric.
36 +
37 + Only computes a batch-wise average of precision.
38 +
39 + Computes the precision, a metric for multi-label classification of
40 + how many selected items are relevant.
41 + """
42 + true_positives = K.sum(K.round(K.clip(y_true[:, 1] * y_pred[:, 1], 0, 1)))
43 + predicted_positives = K.sum(K.round(K.clip(y_pred[:, 1], 0, 1)))
44 + precision = true_positives / (predicted_positives + K.epsilon())
45 + return precision
46 +
47 +
48 +def fbeta_score(y_true, y_pred):
49 +
50 + # If there are no true positives, fix the F score at 0 like sklearn.
51 + if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:
52 + return 0
53 +
54 + p = precision(y_true, y_pred)
55 + r = recall(y_true, y_pred)
56 + bb = 1 ** 2
57 + fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())
58 + return fbeta_score
59 +
60 +
61 +def main(dataX_path, dataY_path, result_path,
62 + n_epoch, input_dim, days):
63 +
64 + # load data
65 + np.random.seed(2204)
66 + X = np.load(dataX_path)
67 + Y = np.load(dataY_path)
68 +
69 + # build Model
70 + model = Sequential()
71 + model.add(Conv1D(128, 1, activation='relu', input_shape=(days, input_dim)))
72 + model.add(Conv1D(128, 3, activation='relu', padding='same'))
73 + model.add(MaxPooling1D(2))
74 + model.add(Flatten())
75 + model.add(Dropout(0.8))
76 + model.add(Dense(2, activation='softmax'))
77 + adam = optimizers.Adam(lr=0.001)
78 + model.compile(loss='categorical_crossentropy',
79 + optimizer=adam,
80 + metrics=['accuracy', recall, precision, fbeta_score])
81 +
82 + # model Compile
83 + model_name = result_path+'model2_price_move_predict.hdf5'
84 + checkpointer = ModelCheckpoint(filepath=model_name,
85 + monitor='val_fbeta_score',
86 + verbose=2, save_best_only=True)
87 + earlystopper = EarlyStopping(monitor='val_loss', patience=20, verbose=2)
88 +
89 + outmodel = open(result_path+'model2_price_move_predict.json', 'w')
90 + outmodel.write(model.to_json())
91 + outmodel.close()
92 +
93 + # process Training
94 + model.fit(X, Y, batch_size=32, verbose=2,
95 + validation_split=0.1, epochs=n_epoch,
96 + callbacks=[checkpointer])
97 +
98 +
99 +if __name__ == "__main__":
100 + dataX = sys.argv[1]
101 + dataY = sys.argv[2]
102 + model_path = sys.argv[3]
103 + n_epoch = int(sys.argv[4])
104 + input_dim = int(sys.argv[5])
105 + days = int(sys.argv[6])
106 + main(dataX, dataY, model_path, n_epoch, input_dim, days)
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Thu Apr 13 17:01:36 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +import sys
10 +import numpy as np
11 +np.random.seed(280295)
12 +import keras.backend as K
13 +from keras.models import Sequential
14 +from keras import regularizers, optimizers
15 +from keras.layers import Dense, Activation, LSTM, Dropout
16 +from keras.callbacks import ModelCheckpoint, EarlyStopping
17 +
18 +def recall(y_true, y_pred):
19 + """Recall metric.
20 +
21 + Only computes a batch-wise average of recall.
22 +
23 + Computes the recall, a metric for multi-label classification of
24 + how many relevant items are selected.
25 + """
26 + true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))
27 + possible_positives = K.sum(K.round(K.clip(y_true[:, 0], 0, 1)))
28 + recall = true_positives / (possible_positives + K.epsilon())
29 + return recall
30 +
31 +
32 +def precision(y_true, y_pred):
33 + """Precision metric.
34 +
35 + Only computes a batch-wise average of precision.
36 +
37 + Computes the precision, a metric for multi-label classification of
38 + how many selected items are relevant.
39 + """
40 + true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))
41 + predicted_positives = K.sum(K.round(K.clip(y_pred[:, 0], 0, 1)))
42 + precision = true_positives / (predicted_positives + K.epsilon())
43 + return precision
44 +
45 +
46 +def fbeta_score(y_true, y_pred):
47 +
48 + # If there are no true positives, fix the F score at 0 like sklearn.
49 + if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:
50 + return 0
51 +
52 + p = precision(y_true, y_pred)
53 + r = recall(y_true, y_pred)
54 + bb = 1 ** 2
55 + fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())
56 + return fbeta_score
57 +
58 +
59 +def main(dataX_path, dataY_path, result_path,
60 + n_epoch, input_dim, days):
61 +
62 + # load data
63 + np.random.seed(2204)
64 + X = np.load(dataX_path)
65 + Y = np.load(dataY_path)
66 +
67 + # build Model
68 + model = Sequential()
69 + model.add(LSTM(256, input_shape=(days, input_dim),
70 + kernel_regularizer=regularizers.l2(0.001)))
71 +
72 + model.add(Dropout(0.6))
73 + model.add(Dense(2, activation='softmax',
74 + kernel_regularizer=regularizers.l2(0.001)))
75 + adam = optimizers.Adam(lr=0.001)
76 + model.compile(loss='categorical_crossentropy',
77 + optimizer=adam,
78 + metrics=['accuracy', recall, precision, fbeta_score])
79 +
80 + # model Compile
81 + model_name = result_path+'model2_price_move_predict.hdf5'
82 + checkpointer = ModelCheckpoint(filepath=model_name,
83 + monitor='val_fbeta_score', mode="max",
84 + verbose=2, save_best_only=True)
85 + earlystopper = EarlyStopping(monitor='val_loss', patience=20, verbose=2)
86 +
87 + outmodel = open(result_path+'model2_price_move_predict.json', 'w')
88 + outmodel.write(model.to_json())
89 + outmodel.close()
90 +
91 + # process Training
92 + model.fit(X, Y, batch_size=32, verbose=2,
93 + validation_split=0.1, epochs=n_epoch,
94 + callbacks=[checkpointer])
95 +
96 +if __name__ == "__main__":
97 + dataX = sys.argv[1]
98 + dataY = sys.argv[2]
99 + model_path = sys.argv[3]
100 + n_epoch = int(sys.argv[4])
101 + input_dim = int(sys.argv[5])
102 + days = int(sys.argv[6])
103 + main(dataX, dataY, model_path, n_epoch, input_dim, days)
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Thu Apr 13 17:01:36 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +import sys
10 +import numpy as np
11 +np.random.seed(280295)
12 +import keras.backend as K
13 +from keras.models import Sequential
14 +from keras.layers import Dense, Activation, Dropout, Flatten
15 +from keras.layers import Conv1D, GlobalAveragePooling1D, MaxPooling1D
16 +from keras.callbacks import ModelCheckpoint, EarlyStopping
17 +from keras import regularizers, optimizers
18 +
19 +
20 +def recall(y_true, y_pred):
21 + """Recall metric.
22 +
23 + Only computes a batch-wise average of recall.
24 +
25 + Computes the recall, a metric for multi-label classification of
26 + how many relevant items are selected.
27 + """
28 + true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))
29 + possible_positives = K.sum(K.round(K.clip(y_true[:, 0], 0, 1)))
30 + recall = true_positives / (possible_positives + K.epsilon())
31 + return recall
32 +
33 +
34 +def precision(y_true, y_pred):
35 + """Precision metric.
36 +
37 + Only computes a batch-wise average of precision.
38 +
39 + Computes the precision, a metric for multi-label classification of
40 + how many selected items are relevant.
41 + """
42 + true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))
43 + predicted_positives = K.sum(K.round(K.clip(y_pred[:, 0], 0, 1)))
44 + precision = true_positives / (predicted_positives + K.epsilon())
45 + return precision
46 +
47 +
48 +def fbeta_score(y_true, y_pred):
49 +
50 + # If there are no true positives, fix the F score at 0 like sklearn.
51 + if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:
52 + return 0
53 +
54 + p = precision(y_true, y_pred)
55 + r = recall(y_true, y_pred)
56 + bb = 1 ** 2
57 + fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())
58 + return fbeta_score
59 +
60 +
61 +def main(dataX_path, dataY_path, result_path,
62 + n_epoch, input_dim, days):
63 +
64 + # load data
65 + np.random.seed(2204)
66 + X = np.load(dataX_path)
67 + Y = np.load(dataY_path)
68 +
69 + # build Model
70 + model = Sequential()
71 + model.add(Flatten(input_shape=(days, input_dim)))
72 + model.add(Dense(512, activation='sigmoid'))
73 + model.add(Dropout(0.8))
74 + model.add(Dense(1024, activation='sigmoid'))
75 + model.add(Dropout(0.8))
76 +# model.add(Dense(1024, activation='sigmoid'))
77 + model.add(Dropout(0.8))
78 + model.add(Dense(2, activation='softmax'))
79 +
80 + adam = optimizers.Adam(lr=0.001)
81 + model.compile(loss='categorical_crossentropy',
82 + optimizer=adam,
83 + metrics=['accuracy', recall, precision, fbeta_score])
84 +
85 + # model Compile
86 + model_name = result_path+'model2_price_move_predict.hdf5'
87 + checkpointer = ModelCheckpoint(filepath=model_name, monitor='val_acc',
88 + verbose=2, save_best_only=True)
89 + earlystopper = EarlyStopping(monitor='val_loss', patience=20, verbose=2)
90 +
91 + outmodel = open(result_path+'model2_price_move_predict.json', 'w')
92 + outmodel.write(model.to_json())
93 + outmodel.close()
94 +
95 + # process Training
96 + model.fit(X, Y, batch_size=32, verbose=2,
97 + validation_split=0.1, epochs=n_epoch,
98 + callbacks=[checkpointer])
99 +
100 +
101 +if __name__ == "__main__":
102 + dataX = sys.argv[1]
103 + dataY = sys.argv[2]
104 + model_path = sys.argv[3]
105 + n_epoch = int(sys.argv[4])
106 + input_dim = int(sys.argv[5])
107 + days = int(sys.argv[6])
108 + main(dataX, dataY, model_path, n_epoch, input_dim, days)
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Thu Mar 16 21:57:57 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +import bs4
10 +import json
11 +import sys
12 +import urllib.request as urlreq
13 +from bs4 import BeautifulSoup
14 +import requests
15 +
16 +BLOOMBERG_params = {
17 + "sort_by_newest": "time:desc",
18 + "sort_by_oldest": "time:asc",
19 + "source_from_bloomberg": "sites=bview",
20 + "end_time": "2017-03-12T15:20:16.240Z"
21 +}
22 +
23 +DATA_TO_EXTRACT = {
24 + "query_list_news": ["div", {"class": "search-result-story__container"}],
25 + "query_headline": ["h1", {"class": "search-result-story__headline"}],
26 + "query_time_published": ["time", {"class": "published-at"}],
27 + "query_body": ["div", {"class": "search-result-story__body"}]
28 +}
29 +
30 +
31 +def parser_url(query_string, page,
32 + sort_by="sort_by_oldest",
33 + source="source_from_bloomberg"):
34 + url = "https://www.bloomberg.com/"
35 + # add search query
36 + url = url + "search?query=" + query_string + "&"
37 + # add sort by
38 + url = url + "sort=" + BLOOMBERG_params[sort_by] + "&"
39 + # add time to query -- use present time
40 + url = url + "sites=" + BLOOMBERG_params[source] + "&"
41 + # add page number
42 + url = url + "page=" + str(page)
43 + return url
44 +
45 +
46 +def get_rid_off_key(list_contents):
47 + body_string = ""
48 + for substring in list_contents:
49 + if (type(substring) == bs4.element.Tag):
50 + # join all body string and
51 + # eliminate highlight query string key
52 + body_string += substring.string
53 + else:
54 + if (type(substring.string) == bs4.element.NavigableString):
55 + body_string += substring.string
56 + return(body_string)
57 +
58 +
59 +def extract_from_url(url):
60 + try:
61 + with requests.get(url) as response:
62 + html_of_page = response.read()
63 + soup_object = BeautifulSoup(html_of_page, "lxml")
64 + # Extract list of news in soup object
65 + param_to_find = DATA_TO_EXTRACT["query_list_news"]
66 + list_of_news = soup_object.find_all(param_to_find[0],
67 + attrs=param_to_find[1])
68 + if (len(list_of_news) == 0):
69 + return None
70 + # create list result extracted
71 + result = []
72 + for block_new in list_of_news:
73 + # extract time from block
74 + param_to_find = DATA_TO_EXTRACT["query_time_published"]
75 + time = block_new.find_all(param_to_find[0],
76 + attrs=param_to_find[1])
77 + time = time[0]["datetime"]
78 +
79 + # extract new headline
80 + param_to_find = DATA_TO_EXTRACT["query_headline"]
81 + headline = block_new.find_all(param_to_find[0],
82 + attrs=param_to_find[1])
83 + headline = get_rid_off_key(headline[0].a.contents)
84 +
85 + # extract new body list if string
86 + param_to_find = DATA_TO_EXTRACT["query_body"]
87 + body = block_new.find_all(param_to_find[0],
88 + attrs=param_to_find[1])
89 + print(body)
90 +
91 + body_string = get_rid_off_key(body[0].contents)
92 + extracted_from_block = {"time": time,
93 + "headline": headline,
94 + "body": body_string}
95 + # for debug :
96 + # print("\t".join(extracted_from_block))
97 + if len(body_string) >= 5:
98 + result.append(extracted_from_block)
99 + except Exception as inst:
100 + print("Something whenwrong :)", inst)
101 + print("ULR: ", url)
102 + result = []
103 + return(result)
104 +
105 +
106 +def Query(key, max_page=5000):
107 + # Init page and looping until return None
108 + page = 1
109 + result = "not None"
110 + all_result_query = []
111 + error = 0
112 + while True and page < max_page:
113 + print("Colected: %d articles" % len(all_result_query))
114 + new_url = parser_url(key, page)
115 + result = extract_from_url(new_url)
116 + if len(result) > 0 or error > 10:
117 + page += 1
118 + error = 0
119 + else:
120 + error += 1
121 +
122 + if result is not None:
123 + all_result_query += result
124 + else:
125 + break
126 + return(all_result_query)
127 +
128 +
129 +if __name__ == "__main__":
130 + print("Begin query information about: ", sys.argv[1])
131 + print("Then will save result in: ", sys.argv[2])
132 +
133 + News = Query(sys.argv[1], int(sys.argv[4]))
134 + file_name1 = sys.argv[2]
135 +
136 + with open(file_name1, "w") as W:
137 + json.dump(News, W, indent=1)
138 +
139 + file_name2 = sys.argv[3]
140 + with open(file_name2, "w") as W:
141 + W.write("\n".join([new["body"] for new in News]))
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Mon Mar 20 17:52:11 2017
5 +
6 +@author: red-sky
7 +"""
8 +import sys
9 +import json
10 +import numpy as np
11 +
12 +
13 +def updateDict(words, dictUp):
14 + # update word dictionary with given "words" and the dict "dictUp"
15 + for w in words:
16 + if w in dictUp:
17 + dictUp[w] += 1
18 + else:
19 + dictUp[w] = 0
20 + return dictUp
21 +
22 +def extractVocab(eventsFile, fromIndex=0, toIndex="END"):
23 + # from Events file, extract infor about words and create a mapping
24 + vocab = dict()
25 + with open(eventsFile, "r") as file:
26 + list_events = file.read().strip().splitlines()
27 + if toIndex == -1:
28 + list_events = list_events[fromIndex:]
29 + else:
30 + list_events = sorted(set(list_events[fromIndex:toIndex]))
31 + for i, event in enumerate(list_events):
32 + if event[0] != "\t":
33 + index = i
34 + break
35 + list_events = list_events[index:]
36 + for event in list_events:
37 + event = event.split("\t")
38 + words = event[1].split(" ") + \
39 + event[2].split(" ") + \
40 + event[3].split(" ")
41 + vocab = updateDict(words, vocab)
42 + vocab_words = vocab.keys()
43 + support_words = ["NOISEWORDS"]
44 + vocab_words = support_words + \
45 + sorted(vocab_words, key=lambda x: vocab[x], reverse=True)
46 + IndexWords = range(len(vocab_words))
47 + Count = ["NOISEWORDS"] + [vocab[w] for w in vocab_words[1:]]
48 + result = [dict(zip(vocab_words, Count)),
49 + dict(zip(IndexWords, vocab_words)),
50 + dict(zip(vocab_words, IndexWords))]
51 + return result, list_events
52 +
53 +
54 +def convertEvent(eventsFile, vocabMapping, countMin=5):
55 + # convert all Events to index for training
56 + wordCount, _, word2index = vocabMapping
57 + Events = []
58 + with open(eventsFile, "r") as file:
59 + list_events = file.read().strip().splitlines()
60 +
61 + for event in list_events:
62 + event = event.split("\t")
63 + list_obj = [event[1].split(" "),
64 + event[2].split(" "),
65 + event[3].split(" ")]
66 +
67 + # Covert only words that appear more than countMin
68 + wordsIndexed = []
69 + for obj in list_obj:
70 + objIndex = []
71 + for w in obj:
72 + if wordCount[w] >= countMin:
73 + objIndex.append(word2index[w])
74 + else:
75 + objIndex.append(0)
76 + wordsIndexed.append(objIndex)
77 + Events.append(wordsIndexed)
78 + return Events
79 +
80 +
81 +if __name__ == "__main__":
82 + # in
83 + EventPath = "../../Thesis_data/Apple_query_result_body.txt"
84 + fromIndex = 0
85 + toIndex = -1
86 + minCountWord = 5
87 + # out
88 + EventNewPath = "./Events_for_training.txt"
89 + VocabPath = "./Vocab_in_events_for_training.json"
90 + IndexdEventPath = "./IndexedEvents_for_training.npy"
91 +
92 + vocabMapping, EventNew = extractVocab(EventPath, fromIndex, toIndex)
93 + with open(VocabPath, "w") as W:
94 + json.dump(vocabMapping, W, indent=2)
95 +
96 + with open(EventNewPath, "w") as W:
97 + W.write("\n".join(EventNew))
98 +
99 + indexed_events = convertEvent(EventNewPath, vocabMapping, minCountWord)
100 + np.save(arr=np.array(indexed_events), file=IndexdEventPath)
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Mon Mar 20 11:58:54 2017
5 +
6 +@author: red-sky
7 +"""
8 +
9 +import sys
10 +import json
11 +
12 +
13 +def findDate(news_body, list_news):
14 + date = ""
15 + for ind, new in enumerate(list_news):
16 + if news_body in new["body"]:
17 + date = new["time"]
18 + break
19 + return date
20 +
21 +
22 +def extractAllDate(list_events, list_news, choosedInfor=[1, 2, 3, 0, 6]):
23 + list_result = []
24 + N = len(list_events)
25 + i = 0.0
26 + for event in list_events:
27 + i += 1
28 + if i % 1000 == 0:
29 + print("Done %f percents" % (i/N*100))
30 + date = [findDate(event[6], list_news)]
31 + infor = date + [event[i] for i in choosedInfor]
32 + list_result.append(infor)
33 + return list_result
34 +
35 +if __name__ == "__main__":
36 + events = open(sys.argv[1], "r").read().strip().splitlines()
37 + events = [event.split("\t") for event in events
38 + if len(event.split("\t")) > 5]
39 + news = json.load(open(sys.argv[2], "r"))
40 + result = extractAllDate(events, news)
41 +
42 + with open(sys.argv[3], "w") as W:
43 + for line in result[1:]:
44 + W.write("\t".join(line)+"\n")
1 +#!/usr/bin/env python3
2 +# -*- coding: utf-8 -*-
3 +"""
4 +Created on Thu Apr 13 16:57:11 2017
5 +
6 +@author: red-sky
7 +"""
8 +import sys
9 +import numpy as np
10 +import pickle
11 +import pandas as pd
12 +
13 +
14 +def main(VectorsPath, EventPath, StockPricePath, days):
15 +
16 + with open(VectorsPath, "rb") as H:
17 + Vec = pickle.load(H)
18 + Vectors = np.array([list(b[0]) for a, b in Vec.values()])
19 +# Vectors = np.load(VectorsPath)
20 + with open(EventPath, "r") as H:
21 + F = np.array([a.split("\t")[0:4] for a in H.read().splitlines()])
22 +
23 + D = {}
24 + for date, vec in zip(F[:, 0], Vectors):
25 + if date[:10] in D:
26 + D[date[:10]].append(vec)
27 + else:
28 + D[date[:10]] = [vec]
29 +
30 + D2 = {}
31 + for date in sorted(D.keys()):
32 + D2[date] = np.mean(D[date], 0)
33 +
34 + Dates = np.array(sorted(D2.keys()))
35 + SampleIndex = [list(range(i-days, i)) for i in range(5, len(Dates))]
36 + DataX = []
37 + DateX = []
38 + for listIndex in SampleIndex:
39 + DataX.append([D2[date] for date in Dates[listIndex]])
40 + DateX.append(Dates[listIndex[-1]])
41 +
42 + Df = pd.read_csv(StockPricePath)
43 + LabelY = []
44 + DataX_yesData = []
45 + for i, date in enumerate(DateX):
46 + retu = list(Df.loc[Df["Date"] == date]["ReturnOpen"])
47 + print(retu)
48 + if len(retu) > 0:
49 + retu = float(retu[0])*100
50 + if retu > 0:
51 + LabelY.append([1, 0])
52 + if retu < -0:
53 + LabelY.append([0, 1])
54 + if retu <= 0 and retu >= -0:
55 + LabelY.append([0, 1])
56 + DataX_yesData.append(list(DataX[i]))
57 + print(date)
58 +# else:
59 +
60 + dataX = np.array(DataX_yesData)
61 + dataY = np.array(LabelY)
62 + print("DataX:", dataX.shape)
63 + print("DataY:", dataY.shape, np.sum(dataY, 0) / np.sum(dataY))
64 + return (dataX, dataY)
65 +
66 +if __name__ == "__main__":
67 + VectorsPath = sys.argv[1]
68 + EventPath = sys.argv[2]
69 + StockPricePath = sys.argv[3]
70 + days = int(sys.argv[5])
71 + DataX, LabelY = main(VectorsPath, EventPath, StockPricePath, days)
72 + DataPath = sys.argv[4]
73 + np.save(arr=DataX, file=DataPath+"/DailyVector" + sys.argv[5] + ".npy")
74 + np.save(arr=LabelY, file=DataPath+"/DailyReturn" + sys.argv[5] + ".npy")