최재은

Add : 모든 코드와 데이터 추가

This diff could not be displayed because it is too large.
1 +{
2 + "nbformat": 4,
3 + "nbformat_minor": 0,
4 + "metadata": {
5 + "colab": {
6 + "name": "bert_news_label.ipynb",
7 + "provenance": []
8 + },
9 + "kernelspec": {
10 + "name": "python3",
11 + "display_name": "Python 3"
12 + },
13 + "accelerator": "GPU"
14 + },
15 + "cells": [
16 + {
17 + "cell_type": "code",
18 + "metadata": {
19 + "id": "58B51bnMtDVX",
20 + "colab_type": "code",
21 + "colab": {
22 + "base_uri": "https://localhost:8080/",
23 + "height": 122
24 + },
25 + "outputId": "107c91fd-3ff1-4816-e90e-7f8bbc006cdc"
26 + },
27 + "source": [
28 + "from google.colab import auth\n",
29 + "auth.authenticate_user()\n",
30 + "\n",
31 + "from google.colab import drive\n",
32 + "drive.mount('/content/gdrive')"
33 + ],
34 + "execution_count": null,
35 + "outputs": [
36 + {
37 + "output_type": "stream",
38 + "text": [
39 + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n",
40 + "\n",
41 + "Enter your authorization code:\n",
42 + "··········\n",
43 + "Mounted at /content/gdrive\n"
44 + ],
45 + "name": "stdout"
46 + }
47 + ]
48 + },
49 + {
50 + "cell_type": "code",
51 + "metadata": {
52 + "id": "2GWn_WDkvp3g",
53 + "colab_type": "code",
54 + "colab": {}
55 + },
56 + "source": [
57 + "import pandas as pd\n",
58 + "combined_data = pd.read_csv('gdrive/My Drive/capstone 2/event_embedding/Thesis_data/combined_data3.csv', encoding='utf-8') \n",
59 + "combined_data\n",
60 + "\n",
61 + "\n",
62 + "path = \"gdrive/My Drive/capstone 2/\""
63 + ],
64 + "execution_count": null,
65 + "outputs": []
66 + },
67 + {
68 + "cell_type": "code",
69 + "metadata": {
70 + "id": "XBgA_6YRv3KB",
71 + "colab_type": "code",
72 + "colab": {
73 + "base_uri": "https://localhost:8080/",
74 + "height": 1000
75 + },
76 + "outputId": "a356cb4f-98bd-49e0-d29a-c054e41df970"
77 + },
78 + "source": [
79 + "%tensorflow_version 1.x\n",
80 + "import tensorflow as tf\n",
81 + "\n",
82 + "import pandas as pd\n",
83 + "import numpy as np \n",
84 + "import re\n",
85 + "import pickle\n",
86 + "\n",
87 + "import keras as keras\n",
88 + "from keras.models import load_model\n",
89 + "from keras import backend as K\n",
90 + "from keras import Input, Model\n",
91 + "from keras import optimizers\n",
92 + "\n",
93 + "import codecs\n",
94 + "from tqdm import tqdm\n",
95 + "import shutil\n",
96 + "import warnings\n",
97 + "import tensorflow as tf\n",
98 + "import os\n",
99 + "warnings.filterwarnings(action='ignore')\n",
100 + "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' \n",
101 + "tf.logging.set_verbosity(tf.logging.ERROR)\n",
102 + "\n",
103 + "!pip install keras-bert\n",
104 + "!pip install keras-radam"
105 + ],
106 + "execution_count": null,
107 + "outputs": [
108 + {
109 + "output_type": "stream",
110 + "text": [
111 + "TensorFlow 1.x selected.\n"
112 + ],
113 + "name": "stdout"
114 + },
115 + {
116 + "output_type": "stream",
117 + "text": [
118 + "Using TensorFlow backend.\n"
119 + ],
120 + "name": "stderr"
121 + },
122 + {
123 + "output_type": "stream",
124 + "text": [
125 + "Collecting keras-bert\n",
126 + " Downloading https://files.pythonhosted.org/packages/2c/0f/cdc886c1018943ea62d3209bc964413d5aa9d0eb7e493abd8545be679294/keras-bert-0.81.0.tar.gz\n",
127 + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from keras-bert) (1.18.4)\n",
128 + "Requirement already satisfied: Keras in /usr/local/lib/python3.6/dist-packages (from keras-bert) (2.3.1)\n",
129 + "Collecting keras-transformer>=0.30.0\n",
130 + " Downloading https://files.pythonhosted.org/packages/22/b9/9040ec948ef895e71df6bee505a1f7e1c99ffedb409cb6eb329f04ece6e0/keras-transformer-0.33.0.tar.gz\n",
131 + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.12.0)\n",
132 + "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (2.10.0)\n",
133 + "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.1.2)\n",
134 + "Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.4.1)\n",
135 + "Requirement already satisfied: keras-applications>=1.0.6 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.0.8)\n",
136 + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (3.13)\n",
137 + "Collecting keras-pos-embd>=0.10.0\n",
138 + " Downloading https://files.pythonhosted.org/packages/09/70/b63ed8fc660da2bb6ae29b9895401c628da5740c048c190b5d7107cadd02/keras-pos-embd-0.11.0.tar.gz\n",
139 + "Collecting keras-multi-head>=0.22.0\n",
140 + " Downloading https://files.pythonhosted.org/packages/a5/f0/a9a7528b8fefacaa9c5db736036fd8c061d754830a29c34129f6847bd338/keras-multi-head-0.24.0.tar.gz\n",
141 + "Collecting keras-layer-normalization>=0.12.0\n",
142 + " Downloading https://files.pythonhosted.org/packages/a4/0e/d1078df0494bac9ce1a67954e5380b6e7569668f0f3b50a9531c62c1fc4a/keras-layer-normalization-0.14.0.tar.gz\n",
143 + "Collecting keras-position-wise-feed-forward>=0.5.0\n",
144 + " Downloading https://files.pythonhosted.org/packages/e3/59/f0faa1037c033059e7e9e7758e6c23b4d1c0772cd48de14c4b6fd4033ad5/keras-position-wise-feed-forward-0.6.0.tar.gz\n",
145 + "Collecting keras-embed-sim>=0.7.0\n",
146 + " Downloading https://files.pythonhosted.org/packages/bc/20/735fd53f6896e2af63af47e212601c1b8a7a80d00b6126c388c9d1233892/keras-embed-sim-0.7.0.tar.gz\n",
147 + "Collecting keras-self-attention==0.41.0\n",
148 + " Downloading https://files.pythonhosted.org/packages/1b/1c/01599219bef7266fa43b3316e4f55bcb487734d3bafdc60ffd564f3cfe29/keras-self-attention-0.41.0.tar.gz\n",
149 + "Building wheels for collected packages: keras-bert, keras-transformer, keras-pos-embd, keras-multi-head, keras-layer-normalization, keras-position-wise-feed-forward, keras-embed-sim, keras-self-attention\n",
150 + " Building wheel for keras-bert (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
151 + " Created wheel for keras-bert: filename=keras_bert-0.81.0-cp36-none-any.whl size=37913 sha256=5dd389965def97a4a8c8d39e14ca195c9b94b145d800a124a5071199150739a2\n",
152 + " Stored in directory: /root/.cache/pip/wheels/bd/27/da/ffc2d573aa48b87440ec4f98bc7c992e3a2d899edb2d22ef9e\n",
153 + " Building wheel for keras-transformer (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
154 + " Created wheel for keras-transformer: filename=keras_transformer-0.33.0-cp36-none-any.whl size=13260 sha256=112c74364559b6c3b6f5e7191c44dff75a1b4fef7061cce9e0dcd04ab1279b47\n",
155 + " Stored in directory: /root/.cache/pip/wheels/26/98/13/a28402939e1d48edd8704e6b02f223795af4a706815f4bf6d8\n",
156 + " Building wheel for keras-pos-embd (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
157 + " Created wheel for keras-pos-embd: filename=keras_pos_embd-0.11.0-cp36-none-any.whl size=7554 sha256=db69d8f347ba30f1ea87c225fa8896a9084a71c2d57a5b036be3cde055085aa7\n",
158 + " Stored in directory: /root/.cache/pip/wheels/5b/a1/a0/ce6b1d49ba1a9a76f592e70cf297b05c96bc9f418146761032\n",
159 + " Building wheel for keras-multi-head (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
160 + " Created wheel for keras-multi-head: filename=keras_multi_head-0.24.0-cp36-none-any.whl size=15511 sha256=a225eb00e6cfcf846e376c79a204d34b75851566bb1108363183a2147aff38ef\n",
161 + " Stored in directory: /root/.cache/pip/wheels/b6/84/01/dbcb50629030c8647a19dd0b7134574fad56c531bdb243bd20\n",
162 + " Building wheel for keras-layer-normalization (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
163 + " Created wheel for keras-layer-normalization: filename=keras_layer_normalization-0.14.0-cp36-none-any.whl size=5268 sha256=22d5729069e599ecee71ffcbc33cd327965def07bf8cd8ee645dd148210a23e5\n",
164 + " Stored in directory: /root/.cache/pip/wheels/54/80/22/a638a7d406fd155e507aa33d703e3fa2612b9eb7bb4f4fe667\n",
165 + " Building wheel for keras-position-wise-feed-forward (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
166 + " Created wheel for keras-position-wise-feed-forward: filename=keras_position_wise_feed_forward-0.6.0-cp36-none-any.whl size=5623 sha256=ee2d8f747442c1a158ef3fe0c059663bfeb6ba3868bb0be793338ba0427c5ff7\n",
167 + " Stored in directory: /root/.cache/pip/wheels/39/e2/e2/3514fef126a00574b13bc0b9e23891800158df3a3c19c96e3b\n",
168 + " Building wheel for keras-embed-sim (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
169 + " Created wheel for keras-embed-sim: filename=keras_embed_sim-0.7.0-cp36-none-any.whl size=4676 sha256=e94547926c0972d80319af9726f2f8efa1fa826d34e97fb5e91a3a580449a8e9\n",
170 + " Stored in directory: /root/.cache/pip/wheels/d1/bc/b1/b0c45cee4ca2e6c86586b0218ffafe7f0703c6d07fdf049866\n",
171 + " Building wheel for keras-self-attention (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
172 + " Created wheel for keras-self-attention: filename=keras_self_attention-0.41.0-cp36-none-any.whl size=17288 sha256=50e66b5411c995d037e45c754fe47a6f41780ac7c97757ce4b99389e3bcdf2fc\n",
173 + " Stored in directory: /root/.cache/pip/wheels/cc/dc/17/84258b27a04cd38ac91998abe148203720ca696186635db694\n",
174 + "Successfully built keras-bert keras-transformer keras-pos-embd keras-multi-head keras-layer-normalization keras-position-wise-feed-forward keras-embed-sim keras-self-attention\n",
175 + "Installing collected packages: keras-pos-embd, keras-self-attention, keras-multi-head, keras-layer-normalization, keras-position-wise-feed-forward, keras-embed-sim, keras-transformer, keras-bert\n",
176 + "Successfully installed keras-bert-0.81.0 keras-embed-sim-0.7.0 keras-layer-normalization-0.14.0 keras-multi-head-0.24.0 keras-pos-embd-0.11.0 keras-position-wise-feed-forward-0.6.0 keras-self-attention-0.41.0 keras-transformer-0.33.0\n",
177 + "Collecting keras-radam\n",
178 + " Downloading https://files.pythonhosted.org/packages/46/8d/b83ccaa94253fbc920b21981f038393041d92236bb541751b98a66a2ac1d/keras-radam-0.15.0.tar.gz\n",
179 + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from keras-radam) (1.18.4)\n",
180 + "Requirement already satisfied: Keras in /usr/local/lib/python3.6/dist-packages (from keras-radam) (2.3.1)\n",
181 + "Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.4.1)\n",
182 + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.12.0)\n",
183 + "Requirement already satisfied: keras-applications>=1.0.6 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.0.8)\n",
184 + "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.1.2)\n",
185 + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (3.13)\n",
186 + "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (2.10.0)\n",
187 + "Building wheels for collected packages: keras-radam\n",
188 + " Building wheel for keras-radam (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
189 + " Created wheel for keras-radam: filename=keras_radam-0.15.0-cp36-none-any.whl size=14685 sha256=913acbe80a0080d1fbac38daff360cdb8e2a3ba65b22fe989d8e125ea7d87e5f\n",
190 + " Stored in directory: /root/.cache/pip/wheels/79/a0/c0/670b0a118e8f078539fafec7bd02eba0af921f745660c7f83f\n",
191 + "Successfully built keras-radam\n",
192 + "Installing collected packages: keras-radam\n",
193 + "Successfully installed keras-radam-0.15.0\n"
194 + ],
195 + "name": "stdout"
196 + }
197 + ]
198 + },
199 + {
200 + "cell_type": "code",
201 + "metadata": {
202 + "id": "V7_zjhL5wGeB",
203 + "colab_type": "code",
204 + "colab": {}
205 + },
206 + "source": [
207 + "from keras_bert import load_trained_model_from_checkpoint, load_vocabulary\n",
208 + "from keras_bert import Tokenizer\n",
209 + "from keras_bert import AdamWarmup, calc_train_steps\n",
210 + "\n",
211 + "from keras_radam import RAdam"
212 + ],
213 + "execution_count": null,
214 + "outputs": []
215 + },
216 + {
217 + "cell_type": "code",
218 + "metadata": {
219 + "id": "RE5pjPZjwG3q",
220 + "colab_type": "code",
221 + "colab": {
222 + "base_uri": "https://localhost:8080/",
223 + "height": 102
224 + },
225 + "outputId": "497a5561-c8ad-40a7-fe72-5773da840971"
226 + },
227 + "source": [
228 + "os.listdir(path+'/bert')"
229 + ],
230 + "execution_count": null,
231 + "outputs": [
232 + {
233 + "output_type": "execute_result",
234 + "data": {
235 + "text/plain": [
236 + "['bert_config.json',\n",
237 + " 'vocab.txt',\n",
238 + " 'bert_model.ckpt.index',\n",
239 + " 'bert_model.ckpt.data-00000-of-00001',\n",
240 + " 'bert_model.ckpt.meta']"
241 + ]
242 + },
243 + "metadata": {
244 + "tags": []
245 + },
246 + "execution_count": 5
247 + }
248 + ]
249 + },
250 + {
251 + "cell_type": "code",
252 + "metadata": {
253 + "id": "yWqOLyGWwIMf",
254 + "colab_type": "code",
255 + "colab": {}
256 + },
257 + "source": [
258 + "SEQ_LEN = 256\n",
259 + "BATCH_SIZE = 16\n",
260 + "EPOCHS=2\n",
261 + "LR=1e-5\n",
262 + "DATA_COLUMN = \"body\"\n",
263 + "LABEL_COLUMN = \"index\"\n",
264 + "\n",
265 + "pretrained_path = path+\"/bert\"\n",
266 + "config_path = os.path.join(pretrained_path, 'bert_config.json')\n",
267 + "checkpoint_path = os.path.join(pretrained_path, 'bert_model.ckpt')\n",
268 + "vocab_path = os.path.join(pretrained_path, 'vocab.txt')"
269 + ],
270 + "execution_count": null,
271 + "outputs": []
272 + },
273 + {
274 + "cell_type": "code",
275 + "metadata": {
276 + "id": "G4E3vhF5wKmg",
277 + "colab_type": "code",
278 + "colab": {}
279 + },
280 + "source": [
281 + "token_dict = {}\n",
282 + "with codecs.open(vocab_path, 'r', 'utf8') as reader:\n",
283 + " for line in reader:\n",
284 + " token = line.strip()\n",
285 + " token_dict[token] = len(token_dict)"
286 + ],
287 + "execution_count": null,
288 + "outputs": []
289 + },
290 + {
291 + "cell_type": "code",
292 + "metadata": {
293 + "id": "c5a7hPzfwRcr",
294 + "colab_type": "code",
295 + "colab": {}
296 + },
297 + "source": [
298 + "tokenizer = Tokenizer(token_dict)"
299 + ],
300 + "execution_count": null,
301 + "outputs": []
302 + },
303 + {
304 + "cell_type": "code",
305 + "metadata": {
306 + "id": "jj3zRxUHMQAD",
307 + "colab_type": "code",
308 + "colab": {
309 + "base_uri": "https://localhost:8080/",
310 + "height": 34
311 + },
312 + "outputId": "e25e40d5-5932-4b61-e8dd-1a6fa4482c30"
313 + },
314 + "source": [
315 + "tokenizer.tokenize(\"This is unbelievable.\")"
316 + ],
317 + "execution_count": null,
318 + "outputs": [
319 + {
320 + "output_type": "execute_result",
321 + "data": {
322 + "text/plain": [
323 + "['[CLS]', 'this', 'is', 'un', '##believable', '.', '[SEP]']"
324 + ]
325 + },
326 + "metadata": {
327 + "tags": []
328 + },
329 + "execution_count": 15
330 + }
331 + ]
332 + },
333 + {
334 + "cell_type": "code",
335 + "metadata": {
336 + "id": "vehabKa5wTKG",
337 + "colab_type": "code",
338 + "colab": {}
339 + },
340 + "source": [
341 + "def convert_data(data_df):\n",
342 + " global tokenizer\n",
343 + " indices, targets = [], []\n",
344 + " for i in tqdm(range(len(data_df))):\n",
345 + " ids, segments = tokenizer.encode((data_df.iloc[i])[DATA_COLUMN], max_len=SEQ_LEN)\n",
346 + " indices.append(ids)\n",
347 + " targets.append((data_df.iloc[i])[LABEL_COLUMN])\n",
348 + " items = list(zip(indices, targets))\n",
349 + " \n",
350 + " indices, targets = zip(*items)\n",
351 + " indices = np.array(indices)\n",
352 + " return [indices, np.zeros_like(indices)], np.array(targets)\n",
353 + "\n",
354 + "def load_data(pandas_dataframe):\n",
355 + " data_df = pandas_dataframe\n",
356 + " data_x, data_y = convert_data(data_df)\n",
357 + "\n",
358 + " return data_x, data_y"
359 + ],
360 + "execution_count": null,
361 + "outputs": []
362 + },
363 + {
364 + "cell_type": "code",
365 + "metadata": {
366 + "id": "V8xrXJlywXG-",
367 + "colab_type": "code",
368 + "colab": {
369 + "base_uri": "https://localhost:8080/",
370 + "height": 51
371 + },
372 + "outputId": "6af2ec8a-d87e-42c8-eab5-4fd60802d8d6"
373 + },
374 + "source": [
375 + "# from sklearn.model_selection import train_test_split\n",
376 + "# train,val = train_test_split(combined_data,test_size = 0.2)\n",
377 + "\n",
378 + "train = combined_data[0:20246].copy()\n",
379 + "val = combined_data[20246:].copy()\n",
380 + "train_x, train_y = load_data(train)\n",
381 + "test_x, test_y = load_data(val)"
382 + ],
383 + "execution_count": null,
384 + "outputs": [
385 + {
386 + "output_type": "stream",
387 + "text": [
388 + "100%|██████████| 20246/20246 [00:21<00:00, 936.33it/s]\n",
389 + "100%|██████████| 3805/3805 [00:04<00:00, 946.42it/s]\n"
390 + ],
391 + "name": "stderr"
392 + }
393 + ]
394 + },
395 + {
396 + "cell_type": "code",
397 + "metadata": {
398 + "id": "BusGgqtlOY5R",
399 + "colab_type": "code",
400 + "colab": {
401 + "base_uri": "https://localhost:8080/",
402 + "height": 255
403 + },
404 + "outputId": "1c489ae6-999c-4ba5-b546-9f9e1a970b20"
405 + },
406 + "source": [
407 + "test_x"
408 + ],
409 + "execution_count": null,
410 + "outputs": [
411 + {
412 + "output_type": "execute_result",
413 + "data": {
414 + "text/plain": [
415 + "[array([[ 101, 2319, 117, ..., 0, 0, 0],\n",
416 + " [ 101, 1419, 112, ..., 0, 0, 0],\n",
417 + " [ 101, 170, 17619, ..., 0, 0, 0],\n",
418 + " ...,\n",
419 + " [ 101, 9700, 1158, ..., 0, 0, 0],\n",
420 + " [ 101, 190, 4832, ..., 0, 0, 0],\n",
421 + " [ 101, 3775, 4688, ..., 0, 0, 0]]),\n",
422 + " array([[0, 0, 0, ..., 0, 0, 0],\n",
423 + " [0, 0, 0, ..., 0, 0, 0],\n",
424 + " [0, 0, 0, ..., 0, 0, 0],\n",
425 + " ...,\n",
426 + " [0, 0, 0, ..., 0, 0, 0],\n",
427 + " [0, 0, 0, ..., 0, 0, 0],\n",
428 + " [0, 0, 0, ..., 0, 0, 0]])]"
429 + ]
430 + },
431 + "metadata": {
432 + "tags": []
433 + },
434 + "execution_count": 18
435 + }
436 + ]
437 + },
438 + {
439 + "cell_type": "code",
440 + "metadata": {
441 + "id": "VyyTba9swZgM",
442 + "colab_type": "code",
443 + "colab": {}
444 + },
445 + "source": [
446 + "layer_num = 12\n",
447 + "model = load_trained_model_from_checkpoint(\n",
448 + " config_path,\n",
449 + " checkpoint_path,\n",
450 + " training=True,\n",
451 + " trainable=True,\n",
452 + " seq_len=SEQ_LEN,)"
453 + ],
454 + "execution_count": null,
455 + "outputs": []
456 + },
457 + {
458 + "cell_type": "code",
459 + "metadata": {
460 + "id": "yIIDeSlDTeGb",
461 + "colab_type": "code",
462 + "colab": {
463 + "base_uri": "https://localhost:8080/",
464 + "height": 1000
465 + },
466 + "outputId": "889ee5fd-7d04-4ab5-de31-795c3e993eb1"
467 + },
468 + "source": [
469 + "model.summary()"
470 + ],
471 + "execution_count": null,
472 + "outputs": [
473 + {
474 + "output_type": "stream",
475 + "text": [
476 + "Model: \"model_1\"\n",
477 + "__________________________________________________________________________________________________\n",
478 + "Layer (type) Output Shape Param # Connected to \n",
479 + "==================================================================================================\n",
480 + "Input-Token (InputLayer) (None, 256) 0 \n",
481 + "__________________________________________________________________________________________________\n",
482 + "Input-Segment (InputLayer) (None, 256) 0 \n",
483 + "__________________________________________________________________________________________________\n",
484 + "Embedding-Token (TokenEmbedding [(None, 256, 768), ( 22268928 Input-Token[0][0] \n",
485 + "__________________________________________________________________________________________________\n",
486 + "Embedding-Segment (Embedding) (None, 256, 768) 1536 Input-Segment[0][0] \n",
487 + "__________________________________________________________________________________________________\n",
488 + "Embedding-Token-Segment (Add) (None, 256, 768) 0 Embedding-Token[0][0] \n",
489 + " Embedding-Segment[0][0] \n",
490 + "__________________________________________________________________________________________________\n",
491 + "Embedding-Position (PositionEmb (None, 256, 768) 196608 Embedding-Token-Segment[0][0] \n",
492 + "__________________________________________________________________________________________________\n",
493 + "Embedding-Dropout (Dropout) (None, 256, 768) 0 Embedding-Position[0][0] \n",
494 + "__________________________________________________________________________________________________\n",
495 + "Embedding-Norm (LayerNormalizat (None, 256, 768) 1536 Embedding-Dropout[0][0] \n",
496 + "__________________________________________________________________________________________________\n",
497 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 2362368 Embedding-Norm[0][0] \n",
498 + "__________________________________________________________________________________________________\n",
499 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-1-MultiHeadSelfAttention[\n",
500 + "__________________________________________________________________________________________________\n",
501 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 0 Embedding-Norm[0][0] \n",
502 + " Encoder-1-MultiHeadSelfAttention-\n",
503 + "__________________________________________________________________________________________________\n",
504 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-1-MultiHeadSelfAttention-\n",
505 + "__________________________________________________________________________________________________\n",
506 + "Encoder-1-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-1-MultiHeadSelfAttention-\n",
507 + "__________________________________________________________________________________________________\n",
508 + "Encoder-1-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-1-FeedForward[0][0] \n",
509 + "__________________________________________________________________________________________________\n",
510 + "Encoder-1-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-1-MultiHeadSelfAttention-\n",
511 + " Encoder-1-FeedForward-Dropout[0][\n",
512 + "__________________________________________________________________________________________________\n",
513 + "Encoder-1-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-1-FeedForward-Add[0][0] \n",
514 + "__________________________________________________________________________________________________\n",
515 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-1-FeedForward-Norm[0][0] \n",
516 + "__________________________________________________________________________________________________\n",
517 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-2-MultiHeadSelfAttention[\n",
518 + "__________________________________________________________________________________________________\n",
519 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-1-FeedForward-Norm[0][0] \n",
520 + " Encoder-2-MultiHeadSelfAttention-\n",
521 + "__________________________________________________________________________________________________\n",
522 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-2-MultiHeadSelfAttention-\n",
523 + "__________________________________________________________________________________________________\n",
524 + "Encoder-2-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-2-MultiHeadSelfAttention-\n",
525 + "__________________________________________________________________________________________________\n",
526 + "Encoder-2-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-2-FeedForward[0][0] \n",
527 + "__________________________________________________________________________________________________\n",
528 + "Encoder-2-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-2-MultiHeadSelfAttention-\n",
529 + " Encoder-2-FeedForward-Dropout[0][\n",
530 + "__________________________________________________________________________________________________\n",
531 + "Encoder-2-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-2-FeedForward-Add[0][0] \n",
532 + "__________________________________________________________________________________________________\n",
533 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-2-FeedForward-Norm[0][0] \n",
534 + "__________________________________________________________________________________________________\n",
535 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-3-MultiHeadSelfAttention[\n",
536 + "__________________________________________________________________________________________________\n",
537 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-2-FeedForward-Norm[0][0] \n",
538 + " Encoder-3-MultiHeadSelfAttention-\n",
539 + "__________________________________________________________________________________________________\n",
540 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-3-MultiHeadSelfAttention-\n",
541 + "__________________________________________________________________________________________________\n",
542 + "Encoder-3-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-3-MultiHeadSelfAttention-\n",
543 + "__________________________________________________________________________________________________\n",
544 + "Encoder-3-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-3-FeedForward[0][0] \n",
545 + "__________________________________________________________________________________________________\n",
546 + "Encoder-3-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-3-MultiHeadSelfAttention-\n",
547 + " Encoder-3-FeedForward-Dropout[0][\n",
548 + "__________________________________________________________________________________________________\n",
549 + "Encoder-3-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-3-FeedForward-Add[0][0] \n",
550 + "__________________________________________________________________________________________________\n",
551 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-3-FeedForward-Norm[0][0] \n",
552 + "__________________________________________________________________________________________________\n",
553 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-4-MultiHeadSelfAttention[\n",
554 + "__________________________________________________________________________________________________\n",
555 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-3-FeedForward-Norm[0][0] \n",
556 + " Encoder-4-MultiHeadSelfAttention-\n",
557 + "__________________________________________________________________________________________________\n",
558 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-4-MultiHeadSelfAttention-\n",
559 + "__________________________________________________________________________________________________\n",
560 + "Encoder-4-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-4-MultiHeadSelfAttention-\n",
561 + "__________________________________________________________________________________________________\n",
562 + "Encoder-4-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-4-FeedForward[0][0] \n",
563 + "__________________________________________________________________________________________________\n",
564 + "Encoder-4-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-4-MultiHeadSelfAttention-\n",
565 + " Encoder-4-FeedForward-Dropout[0][\n",
566 + "__________________________________________________________________________________________________\n",
567 + "Encoder-4-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-4-FeedForward-Add[0][0] \n",
568 + "__________________________________________________________________________________________________\n",
569 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-4-FeedForward-Norm[0][0] \n",
570 + "__________________________________________________________________________________________________\n",
571 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-5-MultiHeadSelfAttention[\n",
572 + "__________________________________________________________________________________________________\n",
573 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-4-FeedForward-Norm[0][0] \n",
574 + " Encoder-5-MultiHeadSelfAttention-\n",
575 + "__________________________________________________________________________________________________\n",
576 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-5-MultiHeadSelfAttention-\n",
577 + "__________________________________________________________________________________________________\n",
578 + "Encoder-5-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-5-MultiHeadSelfAttention-\n",
579 + "__________________________________________________________________________________________________\n",
580 + "Encoder-5-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-5-FeedForward[0][0] \n",
581 + "__________________________________________________________________________________________________\n",
582 + "Encoder-5-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-5-MultiHeadSelfAttention-\n",
583 + " Encoder-5-FeedForward-Dropout[0][\n",
584 + "__________________________________________________________________________________________________\n",
585 + "Encoder-5-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-5-FeedForward-Add[0][0] \n",
586 + "__________________________________________________________________________________________________\n",
587 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-5-FeedForward-Norm[0][0] \n",
588 + "__________________________________________________________________________________________________\n",
589 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-6-MultiHeadSelfAttention[\n",
590 + "__________________________________________________________________________________________________\n",
591 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-5-FeedForward-Norm[0][0] \n",
592 + " Encoder-6-MultiHeadSelfAttention-\n",
593 + "__________________________________________________________________________________________________\n",
594 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-6-MultiHeadSelfAttention-\n",
595 + "__________________________________________________________________________________________________\n",
596 + "Encoder-6-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-6-MultiHeadSelfAttention-\n",
597 + "__________________________________________________________________________________________________\n",
598 + "Encoder-6-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-6-FeedForward[0][0] \n",
599 + "__________________________________________________________________________________________________\n",
600 + "Encoder-6-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-6-MultiHeadSelfAttention-\n",
601 + " Encoder-6-FeedForward-Dropout[0][\n",
602 + "__________________________________________________________________________________________________\n",
603 + "Encoder-6-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-6-FeedForward-Add[0][0] \n",
604 + "__________________________________________________________________________________________________\n",
605 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-6-FeedForward-Norm[0][0] \n",
606 + "__________________________________________________________________________________________________\n",
607 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-7-MultiHeadSelfAttention[\n",
608 + "__________________________________________________________________________________________________\n",
609 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-6-FeedForward-Norm[0][0] \n",
610 + " Encoder-7-MultiHeadSelfAttention-\n",
611 + "__________________________________________________________________________________________________\n",
612 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-7-MultiHeadSelfAttention-\n",
613 + "__________________________________________________________________________________________________\n",
614 + "Encoder-7-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-7-MultiHeadSelfAttention-\n",
615 + "__________________________________________________________________________________________________\n",
616 + "Encoder-7-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-7-FeedForward[0][0] \n",
617 + "__________________________________________________________________________________________________\n",
618 + "Encoder-7-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-7-MultiHeadSelfAttention-\n",
619 + " Encoder-7-FeedForward-Dropout[0][\n",
620 + "__________________________________________________________________________________________________\n",
621 + "Encoder-7-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-7-FeedForward-Add[0][0] \n",
622 + "__________________________________________________________________________________________________\n",
623 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-7-FeedForward-Norm[0][0] \n",
624 + "__________________________________________________________________________________________________\n",
625 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-8-MultiHeadSelfAttention[\n",
626 + "__________________________________________________________________________________________________\n",
627 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-7-FeedForward-Norm[0][0] \n",
628 + " Encoder-8-MultiHeadSelfAttention-\n",
629 + "__________________________________________________________________________________________________\n",
630 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-8-MultiHeadSelfAttention-\n",
631 + "__________________________________________________________________________________________________\n",
632 + "Encoder-8-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-8-MultiHeadSelfAttention-\n",
633 + "__________________________________________________________________________________________________\n",
634 + "Encoder-8-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-8-FeedForward[0][0] \n",
635 + "__________________________________________________________________________________________________\n",
636 + "Encoder-8-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-8-MultiHeadSelfAttention-\n",
637 + " Encoder-8-FeedForward-Dropout[0][\n",
638 + "__________________________________________________________________________________________________\n",
639 + "Encoder-8-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-8-FeedForward-Add[0][0] \n",
640 + "__________________________________________________________________________________________________\n",
641 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-8-FeedForward-Norm[0][0] \n",
642 + "__________________________________________________________________________________________________\n",
643 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-9-MultiHeadSelfAttention[\n",
644 + "__________________________________________________________________________________________________\n",
645 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-8-FeedForward-Norm[0][0] \n",
646 + " Encoder-9-MultiHeadSelfAttention-\n",
647 + "__________________________________________________________________________________________________\n",
648 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-9-MultiHeadSelfAttention-\n",
649 + "__________________________________________________________________________________________________\n",
650 + "Encoder-9-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-9-MultiHeadSelfAttention-\n",
651 + "__________________________________________________________________________________________________\n",
652 + "Encoder-9-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-9-FeedForward[0][0] \n",
653 + "__________________________________________________________________________________________________\n",
654 + "Encoder-9-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-9-MultiHeadSelfAttention-\n",
655 + " Encoder-9-FeedForward-Dropout[0][\n",
656 + "__________________________________________________________________________________________________\n",
657 + "Encoder-9-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-9-FeedForward-Add[0][0] \n",
658 + "__________________________________________________________________________________________________\n",
659 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 2362368 Encoder-9-FeedForward-Norm[0][0] \n",
660 + "__________________________________________________________________________________________________\n",
661 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-10-MultiHeadSelfAttention\n",
662 + "__________________________________________________________________________________________________\n",
663 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-9-FeedForward-Norm[0][0] \n",
664 + " Encoder-10-MultiHeadSelfAttention\n",
665 + "__________________________________________________________________________________________________\n",
666 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 1536 Encoder-10-MultiHeadSelfAttention\n",
667 + "__________________________________________________________________________________________________\n",
668 + "Encoder-10-FeedForward (FeedFor (None, 256, 768) 4722432 Encoder-10-MultiHeadSelfAttention\n",
669 + "__________________________________________________________________________________________________\n",
670 + "Encoder-10-FeedForward-Dropout (None, 256, 768) 0 Encoder-10-FeedForward[0][0] \n",
671 + "__________________________________________________________________________________________________\n",
672 + "Encoder-10-FeedForward-Add (Add (None, 256, 768) 0 Encoder-10-MultiHeadSelfAttention\n",
673 + " Encoder-10-FeedForward-Dropout[0]\n",
674 + "__________________________________________________________________________________________________\n",
675 + "Encoder-10-FeedForward-Norm (La (None, 256, 768) 1536 Encoder-10-FeedForward-Add[0][0] \n",
676 + "__________________________________________________________________________________________________\n",
677 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 2362368 Encoder-10-FeedForward-Norm[0][0]\n",
678 + "__________________________________________________________________________________________________\n",
679 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-11-MultiHeadSelfAttention\n",
680 + "__________________________________________________________________________________________________\n",
681 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-10-FeedForward-Norm[0][0]\n",
682 + " Encoder-11-MultiHeadSelfAttention\n",
683 + "__________________________________________________________________________________________________\n",
684 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 1536 Encoder-11-MultiHeadSelfAttention\n",
685 + "__________________________________________________________________________________________________\n",
686 + "Encoder-11-FeedForward (FeedFor (None, 256, 768) 4722432 Encoder-11-MultiHeadSelfAttention\n",
687 + "__________________________________________________________________________________________________\n",
688 + "Encoder-11-FeedForward-Dropout (None, 256, 768) 0 Encoder-11-FeedForward[0][0] \n",
689 + "__________________________________________________________________________________________________\n",
690 + "Encoder-11-FeedForward-Add (Add (None, 256, 768) 0 Encoder-11-MultiHeadSelfAttention\n",
691 + " Encoder-11-FeedForward-Dropout[0]\n",
692 + "__________________________________________________________________________________________________\n",
693 + "Encoder-11-FeedForward-Norm (La (None, 256, 768) 1536 Encoder-11-FeedForward-Add[0][0] \n",
694 + "__________________________________________________________________________________________________\n",
695 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 2362368 Encoder-11-FeedForward-Norm[0][0]\n",
696 + "__________________________________________________________________________________________________\n",
697 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-12-MultiHeadSelfAttention\n",
698 + "__________________________________________________________________________________________________\n",
699 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-11-FeedForward-Norm[0][0]\n",
700 + " Encoder-12-MultiHeadSelfAttention\n",
701 + "__________________________________________________________________________________________________\n",
702 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 1536 Encoder-12-MultiHeadSelfAttention\n",
703 + "__________________________________________________________________________________________________\n",
704 + "Encoder-12-FeedForward (FeedFor (None, 256, 768) 4722432 Encoder-12-MultiHeadSelfAttention\n",
705 + "__________________________________________________________________________________________________\n",
706 + "Encoder-12-FeedForward-Dropout (None, 256, 768) 0 Encoder-12-FeedForward[0][0] \n",
707 + "__________________________________________________________________________________________________\n",
708 + "Encoder-12-FeedForward-Add (Add (None, 256, 768) 0 Encoder-12-MultiHeadSelfAttention\n",
709 + " Encoder-12-FeedForward-Dropout[0]\n",
710 + "__________________________________________________________________________________________________\n",
711 + "Encoder-12-FeedForward-Norm (La (None, 256, 768) 1536 Encoder-12-FeedForward-Add[0][0] \n",
712 + "__________________________________________________________________________________________________\n",
713 + "MLM-Dense (Dense) (None, 256, 768) 590592 Encoder-12-FeedForward-Norm[0][0]\n",
714 + "__________________________________________________________________________________________________\n",
715 + "MLM-Norm (LayerNormalization) (None, 256, 768) 1536 MLM-Dense[0][0] \n",
716 + "__________________________________________________________________________________________________\n",
717 + "Extract (Extract) (None, 768) 0 Encoder-12-FeedForward-Norm[0][0]\n",
718 + "__________________________________________________________________________________________________\n",
719 + "MLM-Sim (EmbeddingSimilarity) (None, 256, 28996) 28996 MLM-Norm[0][0] \n",
720 + " Embedding-Token[0][1] \n",
721 + "__________________________________________________________________________________________________\n",
722 + "Input-Masked (InputLayer) (None, 256) 0 \n",
723 + "__________________________________________________________________________________________________\n",
724 + "NSP-Dense (Dense) (None, 768) 590592 Extract[0][0] \n",
725 + "__________________________________________________________________________________________________\n",
726 + "MLM (Masked) (None, 256, 28996) 0 MLM-Sim[0][0] \n",
727 + " Input-Masked[0][0] \n",
728 + "__________________________________________________________________________________________________\n",
729 + "NSP (Dense) (None, 2) 1538 NSP-Dense[0][0] \n",
730 + "==================================================================================================\n",
731 + "Total params: 108,736,326\n",
732 + "Trainable params: 108,736,326\n",
733 + "Non-trainable params: 0\n",
734 + "__________________________________________________________________________________________________\n"
735 + ],
736 + "name": "stdout"
737 + }
738 + ]
739 + },
740 + {
741 + "cell_type": "code",
742 + "metadata": {
743 + "id": "7jO_vzY6w_qa",
744 + "colab_type": "code",
745 + "colab": {}
746 + },
747 + "source": [
748 + "from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
749 + "def recall(y_true, y_pred):\n",
750 + " true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))\n",
751 + " possible_positives = K.sum(K.round(K.clip(y_true[:, 0], 0, 1)))\n",
752 + " recall = true_positives / (possible_positives + K.epsilon())\n",
753 + " return recall\n",
754 + "\n",
755 + "\n",
756 + "def precision(y_true, y_pred):\n",
757 + " true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))\n",
758 + " predicted_positives = K.sum(K.round(K.clip(y_pred[:, 0], 0, 1)))\n",
759 + " precision = true_positives / (predicted_positives + K.epsilon())\n",
760 + " return precision\n",
761 + "\n",
762 + "\n",
763 + "def fbeta_score(y_true, y_pred):\n",
764 + " if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:\n",
765 + " return 0\n",
766 + "\n",
767 + " p = precision(y_true, y_pred)\n",
768 + " r = recall(y_true, y_pred)\n",
769 + " bb = 1 ** 2\n",
770 + " fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())\n",
771 + " return fbeta_score\n",
772 + "\n",
773 + "def get_bert_finetuning_model(model):\n",
774 + " inputs = model.inputs[:2]\n",
775 + " dense = model.layers[-3].output\n",
776 + "\n",
777 + " outputs = keras.layers.Dense(1, activation='sigmoid',kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.02),\n",
778 + " name = 'output')(dense)\n",
779 + "\n",
780 + " bert_model = keras.models.Model(inputs, outputs)\n",
781 + " bert_model.compile(\n",
782 + " optimizer=RAdam(learning_rate=0.00001, weight_decay=0.0025),\n",
783 + " loss='binary_crossentropy',\n",
784 + " metrics=['accuracy', recall, precision, fbeta_score])\n",
785 + " \n",
786 + " return bert_model\n",
787 + " \n",
788 + "model_name = path + \"event_news_label_bert.h5\"\n",
789 + "checkpointer = ModelCheckpoint(filepath=model_name,\n",
790 + " monitor='val_fbeta_score', mode=\"max\",\n",
791 + " verbose=2, save_best_only=True)\n",
792 + "earlystopper = EarlyStopping(monitor='val_loss', patience=20, verbose=2, mode = \"min\")"
793 + ],
794 + "execution_count": null,
795 + "outputs": []
796 + },
797 + {
798 + "cell_type": "code",
799 + "metadata": {
800 + "id": "66Rd4Xl-TzcS",
801 + "colab_type": "code",
802 + "colab": {
803 + "base_uri": "https://localhost:8080/",
804 + "height": 1000
805 + },
806 + "outputId": "da0ee542-0ee0-44ed-d900-64a7174fe21a"
807 + },
808 + "source": [
809 + "sess = K.get_session()\n",
810 + "uninitialized_variables = set([i.decode('ascii') for i in sess.run(tf.report_uninitialized_variables())])\n",
811 + "init = tf.variables_initializer([v for v in tf.global_variables() if v.name.split(':')[0] in uninitialized_variables])\n",
812 + "sess.run(init)\n",
813 + "\n",
814 + "bert_model = get_bert_finetuning_model(model)\n",
815 + "bert_model.summary()"
816 + ],
817 + "execution_count": null,
818 + "outputs": [
819 + {
820 + "output_type": "stream",
821 + "text": [
822 + "Model: \"model_6\"\n",
823 + "__________________________________________________________________________________________________\n",
824 + "Layer (type) Output Shape Param # Connected to \n",
825 + "==================================================================================================\n",
826 + "Input-Token (InputLayer) (None, 256) 0 \n",
827 + "__________________________________________________________________________________________________\n",
828 + "Input-Segment (InputLayer) (None, 256) 0 \n",
829 + "__________________________________________________________________________________________________\n",
830 + "Embedding-Token (TokenEmbedding [(None, 256, 768), ( 22268928 Input-Token[0][0] \n",
831 + "__________________________________________________________________________________________________\n",
832 + "Embedding-Segment (Embedding) (None, 256, 768) 1536 Input-Segment[0][0] \n",
833 + "__________________________________________________________________________________________________\n",
834 + "Embedding-Token-Segment (Add) (None, 256, 768) 0 Embedding-Token[0][0] \n",
835 + " Embedding-Segment[0][0] \n",
836 + "__________________________________________________________________________________________________\n",
837 + "Embedding-Position (PositionEmb (None, 256, 768) 196608 Embedding-Token-Segment[0][0] \n",
838 + "__________________________________________________________________________________________________\n",
839 + "Embedding-Dropout (Dropout) (None, 256, 768) 0 Embedding-Position[0][0] \n",
840 + "__________________________________________________________________________________________________\n",
841 + "Embedding-Norm (LayerNormalizat (None, 256, 768) 1536 Embedding-Dropout[0][0] \n",
842 + "__________________________________________________________________________________________________\n",
843 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 2362368 Embedding-Norm[0][0] \n",
844 + "__________________________________________________________________________________________________\n",
845 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-1-MultiHeadSelfAttention[\n",
846 + "__________________________________________________________________________________________________\n",
847 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 0 Embedding-Norm[0][0] \n",
848 + " Encoder-1-MultiHeadSelfAttention-\n",
849 + "__________________________________________________________________________________________________\n",
850 + "Encoder-1-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-1-MultiHeadSelfAttention-\n",
851 + "__________________________________________________________________________________________________\n",
852 + "Encoder-1-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-1-MultiHeadSelfAttention-\n",
853 + "__________________________________________________________________________________________________\n",
854 + "Encoder-1-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-1-FeedForward[0][0] \n",
855 + "__________________________________________________________________________________________________\n",
856 + "Encoder-1-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-1-MultiHeadSelfAttention-\n",
857 + " Encoder-1-FeedForward-Dropout[0][\n",
858 + "__________________________________________________________________________________________________\n",
859 + "Encoder-1-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-1-FeedForward-Add[0][0] \n",
860 + "__________________________________________________________________________________________________\n",
861 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-1-FeedForward-Norm[0][0] \n",
862 + "__________________________________________________________________________________________________\n",
863 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-2-MultiHeadSelfAttention[\n",
864 + "__________________________________________________________________________________________________\n",
865 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-1-FeedForward-Norm[0][0] \n",
866 + " Encoder-2-MultiHeadSelfAttention-\n",
867 + "__________________________________________________________________________________________________\n",
868 + "Encoder-2-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-2-MultiHeadSelfAttention-\n",
869 + "__________________________________________________________________________________________________\n",
870 + "Encoder-2-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-2-MultiHeadSelfAttention-\n",
871 + "__________________________________________________________________________________________________\n",
872 + "Encoder-2-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-2-FeedForward[0][0] \n",
873 + "__________________________________________________________________________________________________\n",
874 + "Encoder-2-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-2-MultiHeadSelfAttention-\n",
875 + " Encoder-2-FeedForward-Dropout[0][\n",
876 + "__________________________________________________________________________________________________\n",
877 + "Encoder-2-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-2-FeedForward-Add[0][0] \n",
878 + "__________________________________________________________________________________________________\n",
879 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-2-FeedForward-Norm[0][0] \n",
880 + "__________________________________________________________________________________________________\n",
881 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-3-MultiHeadSelfAttention[\n",
882 + "__________________________________________________________________________________________________\n",
883 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-2-FeedForward-Norm[0][0] \n",
884 + " Encoder-3-MultiHeadSelfAttention-\n",
885 + "__________________________________________________________________________________________________\n",
886 + "Encoder-3-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-3-MultiHeadSelfAttention-\n",
887 + "__________________________________________________________________________________________________\n",
888 + "Encoder-3-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-3-MultiHeadSelfAttention-\n",
889 + "__________________________________________________________________________________________________\n",
890 + "Encoder-3-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-3-FeedForward[0][0] \n",
891 + "__________________________________________________________________________________________________\n",
892 + "Encoder-3-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-3-MultiHeadSelfAttention-\n",
893 + " Encoder-3-FeedForward-Dropout[0][\n",
894 + "__________________________________________________________________________________________________\n",
895 + "Encoder-3-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-3-FeedForward-Add[0][0] \n",
896 + "__________________________________________________________________________________________________\n",
897 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-3-FeedForward-Norm[0][0] \n",
898 + "__________________________________________________________________________________________________\n",
899 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-4-MultiHeadSelfAttention[\n",
900 + "__________________________________________________________________________________________________\n",
901 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-3-FeedForward-Norm[0][0] \n",
902 + " Encoder-4-MultiHeadSelfAttention-\n",
903 + "__________________________________________________________________________________________________\n",
904 + "Encoder-4-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-4-MultiHeadSelfAttention-\n",
905 + "__________________________________________________________________________________________________\n",
906 + "Encoder-4-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-4-MultiHeadSelfAttention-\n",
907 + "__________________________________________________________________________________________________\n",
908 + "Encoder-4-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-4-FeedForward[0][0] \n",
909 + "__________________________________________________________________________________________________\n",
910 + "Encoder-4-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-4-MultiHeadSelfAttention-\n",
911 + " Encoder-4-FeedForward-Dropout[0][\n",
912 + "__________________________________________________________________________________________________\n",
913 + "Encoder-4-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-4-FeedForward-Add[0][0] \n",
914 + "__________________________________________________________________________________________________\n",
915 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-4-FeedForward-Norm[0][0] \n",
916 + "__________________________________________________________________________________________________\n",
917 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-5-MultiHeadSelfAttention[\n",
918 + "__________________________________________________________________________________________________\n",
919 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-4-FeedForward-Norm[0][0] \n",
920 + " Encoder-5-MultiHeadSelfAttention-\n",
921 + "__________________________________________________________________________________________________\n",
922 + "Encoder-5-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-5-MultiHeadSelfAttention-\n",
923 + "__________________________________________________________________________________________________\n",
924 + "Encoder-5-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-5-MultiHeadSelfAttention-\n",
925 + "__________________________________________________________________________________________________\n",
926 + "Encoder-5-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-5-FeedForward[0][0] \n",
927 + "__________________________________________________________________________________________________\n",
928 + "Encoder-5-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-5-MultiHeadSelfAttention-\n",
929 + " Encoder-5-FeedForward-Dropout[0][\n",
930 + "__________________________________________________________________________________________________\n",
931 + "Encoder-5-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-5-FeedForward-Add[0][0] \n",
932 + "__________________________________________________________________________________________________\n",
933 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-5-FeedForward-Norm[0][0] \n",
934 + "__________________________________________________________________________________________________\n",
935 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-6-MultiHeadSelfAttention[\n",
936 + "__________________________________________________________________________________________________\n",
937 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-5-FeedForward-Norm[0][0] \n",
938 + " Encoder-6-MultiHeadSelfAttention-\n",
939 + "__________________________________________________________________________________________________\n",
940 + "Encoder-6-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-6-MultiHeadSelfAttention-\n",
941 + "__________________________________________________________________________________________________\n",
942 + "Encoder-6-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-6-MultiHeadSelfAttention-\n",
943 + "__________________________________________________________________________________________________\n",
944 + "Encoder-6-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-6-FeedForward[0][0] \n",
945 + "__________________________________________________________________________________________________\n",
946 + "Encoder-6-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-6-MultiHeadSelfAttention-\n",
947 + " Encoder-6-FeedForward-Dropout[0][\n",
948 + "__________________________________________________________________________________________________\n",
949 + "Encoder-6-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-6-FeedForward-Add[0][0] \n",
950 + "__________________________________________________________________________________________________\n",
951 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-6-FeedForward-Norm[0][0] \n",
952 + "__________________________________________________________________________________________________\n",
953 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-7-MultiHeadSelfAttention[\n",
954 + "__________________________________________________________________________________________________\n",
955 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-6-FeedForward-Norm[0][0] \n",
956 + " Encoder-7-MultiHeadSelfAttention-\n",
957 + "__________________________________________________________________________________________________\n",
958 + "Encoder-7-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-7-MultiHeadSelfAttention-\n",
959 + "__________________________________________________________________________________________________\n",
960 + "Encoder-7-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-7-MultiHeadSelfAttention-\n",
961 + "__________________________________________________________________________________________________\n",
962 + "Encoder-7-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-7-FeedForward[0][0] \n",
963 + "__________________________________________________________________________________________________\n",
964 + "Encoder-7-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-7-MultiHeadSelfAttention-\n",
965 + " Encoder-7-FeedForward-Dropout[0][\n",
966 + "__________________________________________________________________________________________________\n",
967 + "Encoder-7-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-7-FeedForward-Add[0][0] \n",
968 + "__________________________________________________________________________________________________\n",
969 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-7-FeedForward-Norm[0][0] \n",
970 + "__________________________________________________________________________________________________\n",
971 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-8-MultiHeadSelfAttention[\n",
972 + "__________________________________________________________________________________________________\n",
973 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-7-FeedForward-Norm[0][0] \n",
974 + " Encoder-8-MultiHeadSelfAttention-\n",
975 + "__________________________________________________________________________________________________\n",
976 + "Encoder-8-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-8-MultiHeadSelfAttention-\n",
977 + "__________________________________________________________________________________________________\n",
978 + "Encoder-8-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-8-MultiHeadSelfAttention-\n",
979 + "__________________________________________________________________________________________________\n",
980 + "Encoder-8-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-8-FeedForward[0][0] \n",
981 + "__________________________________________________________________________________________________\n",
982 + "Encoder-8-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-8-MultiHeadSelfAttention-\n",
983 + " Encoder-8-FeedForward-Dropout[0][\n",
984 + "__________________________________________________________________________________________________\n",
985 + "Encoder-8-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-8-FeedForward-Add[0][0] \n",
986 + "__________________________________________________________________________________________________\n",
987 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 2362368 Encoder-8-FeedForward-Norm[0][0] \n",
988 + "__________________________________________________________________________________________________\n",
989 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-9-MultiHeadSelfAttention[\n",
990 + "__________________________________________________________________________________________________\n",
991 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 0 Encoder-8-FeedForward-Norm[0][0] \n",
992 + " Encoder-9-MultiHeadSelfAttention-\n",
993 + "__________________________________________________________________________________________________\n",
994 + "Encoder-9-MultiHeadSelfAttentio (None, 256, 768) 1536 Encoder-9-MultiHeadSelfAttention-\n",
995 + "__________________________________________________________________________________________________\n",
996 + "Encoder-9-FeedForward (FeedForw (None, 256, 768) 4722432 Encoder-9-MultiHeadSelfAttention-\n",
997 + "__________________________________________________________________________________________________\n",
998 + "Encoder-9-FeedForward-Dropout ( (None, 256, 768) 0 Encoder-9-FeedForward[0][0] \n",
999 + "__________________________________________________________________________________________________\n",
1000 + "Encoder-9-FeedForward-Add (Add) (None, 256, 768) 0 Encoder-9-MultiHeadSelfAttention-\n",
1001 + " Encoder-9-FeedForward-Dropout[0][\n",
1002 + "__________________________________________________________________________________________________\n",
1003 + "Encoder-9-FeedForward-Norm (Lay (None, 256, 768) 1536 Encoder-9-FeedForward-Add[0][0] \n",
1004 + "__________________________________________________________________________________________________\n",
1005 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 2362368 Encoder-9-FeedForward-Norm[0][0] \n",
1006 + "__________________________________________________________________________________________________\n",
1007 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-10-MultiHeadSelfAttention\n",
1008 + "__________________________________________________________________________________________________\n",
1009 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-9-FeedForward-Norm[0][0] \n",
1010 + " Encoder-10-MultiHeadSelfAttention\n",
1011 + "__________________________________________________________________________________________________\n",
1012 + "Encoder-10-MultiHeadSelfAttenti (None, 256, 768) 1536 Encoder-10-MultiHeadSelfAttention\n",
1013 + "__________________________________________________________________________________________________\n",
1014 + "Encoder-10-FeedForward (FeedFor (None, 256, 768) 4722432 Encoder-10-MultiHeadSelfAttention\n",
1015 + "__________________________________________________________________________________________________\n",
1016 + "Encoder-10-FeedForward-Dropout (None, 256, 768) 0 Encoder-10-FeedForward[0][0] \n",
1017 + "__________________________________________________________________________________________________\n",
1018 + "Encoder-10-FeedForward-Add (Add (None, 256, 768) 0 Encoder-10-MultiHeadSelfAttention\n",
1019 + " Encoder-10-FeedForward-Dropout[0]\n",
1020 + "__________________________________________________________________________________________________\n",
1021 + "Encoder-10-FeedForward-Norm (La (None, 256, 768) 1536 Encoder-10-FeedForward-Add[0][0] \n",
1022 + "__________________________________________________________________________________________________\n",
1023 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 2362368 Encoder-10-FeedForward-Norm[0][0]\n",
1024 + "__________________________________________________________________________________________________\n",
1025 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-11-MultiHeadSelfAttention\n",
1026 + "__________________________________________________________________________________________________\n",
1027 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-10-FeedForward-Norm[0][0]\n",
1028 + " Encoder-11-MultiHeadSelfAttention\n",
1029 + "__________________________________________________________________________________________________\n",
1030 + "Encoder-11-MultiHeadSelfAttenti (None, 256, 768) 1536 Encoder-11-MultiHeadSelfAttention\n",
1031 + "__________________________________________________________________________________________________\n",
1032 + "Encoder-11-FeedForward (FeedFor (None, 256, 768) 4722432 Encoder-11-MultiHeadSelfAttention\n",
1033 + "__________________________________________________________________________________________________\n",
1034 + "Encoder-11-FeedForward-Dropout (None, 256, 768) 0 Encoder-11-FeedForward[0][0] \n",
1035 + "__________________________________________________________________________________________________\n",
1036 + "Encoder-11-FeedForward-Add (Add (None, 256, 768) 0 Encoder-11-MultiHeadSelfAttention\n",
1037 + " Encoder-11-FeedForward-Dropout[0]\n",
1038 + "__________________________________________________________________________________________________\n",
1039 + "Encoder-11-FeedForward-Norm (La (None, 256, 768) 1536 Encoder-11-FeedForward-Add[0][0] \n",
1040 + "__________________________________________________________________________________________________\n",
1041 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 2362368 Encoder-11-FeedForward-Norm[0][0]\n",
1042 + "__________________________________________________________________________________________________\n",
1043 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-12-MultiHeadSelfAttention\n",
1044 + "__________________________________________________________________________________________________\n",
1045 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 0 Encoder-11-FeedForward-Norm[0][0]\n",
1046 + " Encoder-12-MultiHeadSelfAttention\n",
1047 + "__________________________________________________________________________________________________\n",
1048 + "Encoder-12-MultiHeadSelfAttenti (None, 256, 768) 1536 Encoder-12-MultiHeadSelfAttention\n",
1049 + "__________________________________________________________________________________________________\n",
1050 + "Encoder-12-FeedForward (FeedFor (None, 256, 768) 4722432 Encoder-12-MultiHeadSelfAttention\n",
1051 + "__________________________________________________________________________________________________\n",
1052 + "Encoder-12-FeedForward-Dropout (None, 256, 768) 0 Encoder-12-FeedForward[0][0] \n",
1053 + "__________________________________________________________________________________________________\n",
1054 + "Encoder-12-FeedForward-Add (Add (None, 256, 768) 0 Encoder-12-MultiHeadSelfAttention\n",
1055 + " Encoder-12-FeedForward-Dropout[0]\n",
1056 + "__________________________________________________________________________________________________\n",
1057 + "Encoder-12-FeedForward-Norm (La (None, 256, 768) 1536 Encoder-12-FeedForward-Add[0][0] \n",
1058 + "__________________________________________________________________________________________________\n",
1059 + "Extract (Extract) (None, 768) 0 Encoder-12-FeedForward-Norm[0][0]\n",
1060 + "__________________________________________________________________________________________________\n",
1061 + "NSP-Dense (Dense) (None, 768) 590592 Extract[0][0] \n",
1062 + "__________________________________________________________________________________________________\n",
1063 + "output (Dense) (None, 1) 769 NSP-Dense[0][0] \n",
1064 + "==================================================================================================\n",
1065 + "Total params: 108,114,433\n",
1066 + "Trainable params: 108,114,433\n",
1067 + "Non-trainable params: 0\n",
1068 + "__________________________________________________________________________________________________\n"
1069 + ],
1070 + "name": "stdout"
1071 + }
1072 + ]
1073 + },
1074 + {
1075 + "cell_type": "code",
1076 + "metadata": {
1077 + "id": "VFVmzqXvxV2I",
1078 + "colab_type": "code",
1079 + "colab": {
1080 + "base_uri": "https://localhost:8080/",
1081 + "height": 955
1082 + },
1083 + "outputId": "0dee0c30-ecbd-4d06-e741-a8042b468f62"
1084 + },
1085 + "source": [
1086 + "sess = K.get_session()\n",
1087 + "uninitialized_variables = set([i.decode('ascii') for i in sess.run(tf.report_uninitialized_variables())])\n",
1088 + "init = tf.variables_initializer([v for v in tf.global_variables() if v.name.split(':')[0] in uninitialized_variables])\n",
1089 + "sess.run(init)\n",
1090 + "\n",
1091 + "bert_model = get_bert_finetuning_model(model)\n",
1092 + "bert_model.load_weights(\"gdrive/My Drive/body_bert_256.h5\")\n",
1093 + "history = bert_model.fit(train_x, train_y, epochs=25, batch_size=16, verbose = 1, validation_data=(test_x, test_y))\n",
1094 + "bert_model.save_weights(\"gdrive/My Drive/body_bert_256_epoch50.h5\")"
1095 + ],
1096 + "execution_count": null,
1097 + "outputs": [
1098 + {
1099 + "output_type": "stream",
1100 + "text": [
1101 + "Train on 20246 samples, validate on 3805 samples\n",
1102 + "Epoch 1/25\n",
1103 + "20246/20246 [==============================] - 1330s 66ms/step - loss: 0.6926 - accuracy: 0.5180 - recall: 0.8852 - precision: 0.5221 - fbeta_score: 0.6343 - val_loss: 0.6932 - val_accuracy: 0.5030 - val_recall: 0.3662 - val_precision: 0.5922 - val_fbeta_score: 0.4242\n",
1104 + "Epoch 2/25\n",
1105 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.6915 - accuracy: 0.5221 - recall: 0.8501 - precision: 0.5207 - fbeta_score: 0.6184 - val_loss: 0.6887 - val_accuracy: 0.5640 - val_recall: 0.7281 - val_precision: 0.5785 - val_fbeta_score: 0.6145\n",
1106 + "Epoch 3/25\n",
1107 + "20246/20246 [==============================] - 1318s 65ms/step - loss: 0.6789 - accuracy: 0.5738 - recall: 0.7190 - precision: 0.5819 - fbeta_score: 0.6126 - val_loss: 0.7050 - val_accuracy: 0.5075 - val_recall: 0.4514 - val_precision: 0.5760 - val_fbeta_score: 0.4712\n",
1108 + "Epoch 4/25\n",
1109 + "20246/20246 [==============================] - 1320s 65ms/step - loss: 0.5551 - accuracy: 0.7174 - recall: 0.7525 - precision: 0.7280 - fbeta_score: 0.7196 - val_loss: 0.8139 - val_accuracy: 0.4933 - val_recall: 0.3668 - val_precision: 0.5788 - val_fbeta_score: 0.4179\n",
1110 + "Epoch 5/25\n",
1111 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.3120 - accuracy: 0.8694 - recall: 0.8784 - precision: 0.8759 - fbeta_score: 0.8684 - val_loss: 1.1761 - val_accuracy: 0.4991 - val_recall: 0.3568 - val_precision: 0.5796 - val_fbeta_score: 0.4116\n",
1112 + "Epoch 6/25\n",
1113 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.1799 - accuracy: 0.9314 - recall: 0.9350 - precision: 0.9361 - fbeta_score: 0.9308 - val_loss: 1.3997 - val_accuracy: 0.5193 - val_recall: 0.4819 - val_precision: 0.5775 - val_fbeta_score: 0.4954\n",
1114 + "Epoch 7/25\n",
1115 + "20246/20246 [==============================] - 1321s 65ms/step - loss: 0.1268 - accuracy: 0.9533 - recall: 0.9570 - precision: 0.9547 - fbeta_score: 0.9528 - val_loss: 1.5101 - val_accuracy: 0.5114 - val_recall: 0.4414 - val_precision: 0.5792 - val_fbeta_score: 0.4693\n",
1116 + "Epoch 8/25\n",
1117 + "20246/20246 [==============================] - 1320s 65ms/step - loss: 0.1018 - accuracy: 0.9645 - recall: 0.9649 - precision: 0.9671 - fbeta_score: 0.9635 - val_loss: 1.7262 - val_accuracy: 0.5017 - val_recall: 0.3862 - val_precision: 0.5778 - val_fbeta_score: 0.4340\n",
1118 + "Epoch 9/25\n",
1119 + "20246/20246 [==============================] - 1318s 65ms/step - loss: 0.0911 - accuracy: 0.9676 - recall: 0.9683 - precision: 0.9701 - fbeta_score: 0.9671 - val_loss: 1.8218 - val_accuracy: 0.4886 - val_recall: 0.3260 - val_precision: 0.5849 - val_fbeta_score: 0.3844\n",
1120 + "Epoch 10/25\n",
1121 + "20246/20246 [==============================] - 1321s 65ms/step - loss: 0.0785 - accuracy: 0.9713 - recall: 0.9724 - precision: 0.9732 - fbeta_score: 0.9710 - val_loss: 1.8267 - val_accuracy: 0.5070 - val_recall: 0.4098 - val_precision: 0.5780 - val_fbeta_score: 0.4490\n",
1122 + "Epoch 11/25\n",
1123 + "20246/20246 [==============================] - 1323s 65ms/step - loss: 0.0693 - accuracy: 0.9737 - recall: 0.9732 - precision: 0.9767 - fbeta_score: 0.9731 - val_loss: 1.8569 - val_accuracy: 0.5188 - val_recall: 0.4916 - val_precision: 0.5792 - val_fbeta_score: 0.5005\n",
1124 + "Epoch 12/25\n",
1125 + "20246/20246 [==============================] - 1318s 65ms/step - loss: 0.0618 - accuracy: 0.9773 - recall: 0.9777 - precision: 0.9788 - fbeta_score: 0.9768 - val_loss: 1.7918 - val_accuracy: 0.5067 - val_recall: 0.4202 - val_precision: 0.5774 - val_fbeta_score: 0.4556\n",
1126 + "Epoch 13/25\n",
1127 + "20246/20246 [==============================] - 1321s 65ms/step - loss: 0.0593 - accuracy: 0.9776 - recall: 0.9781 - precision: 0.9794 - fbeta_score: 0.9772 - val_loss: 1.8888 - val_accuracy: 0.5091 - val_recall: 0.4414 - val_precision: 0.5775 - val_fbeta_score: 0.4697\n",
1128 + "Epoch 14/25\n",
1129 + "20246/20246 [==============================] - 1321s 65ms/step - loss: 0.0534 - accuracy: 0.9793 - recall: 0.9790 - precision: 0.9816 - fbeta_score: 0.9791 - val_loss: 2.2426 - val_accuracy: 0.4886 - val_recall: 0.3233 - val_precision: 0.5795 - val_fbeta_score: 0.3868\n",
1130 + "Epoch 15/25\n",
1131 + "20246/20246 [==============================] - 1322s 65ms/step - loss: 0.0471 - accuracy: 0.9804 - recall: 0.9798 - precision: 0.9821 - fbeta_score: 0.9796 - val_loss: 2.1971 - val_accuracy: 0.5057 - val_recall: 0.4143 - val_precision: 0.5818 - val_fbeta_score: 0.4528\n",
1132 + "Epoch 16/25\n",
1133 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.0495 - accuracy: 0.9793 - recall: 0.9796 - precision: 0.9814 - fbeta_score: 0.9792 - val_loss: 2.2741 - val_accuracy: 0.5059 - val_recall: 0.4057 - val_precision: 0.5816 - val_fbeta_score: 0.4502\n",
1134 + "Epoch 17/25\n",
1135 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.0483 - accuracy: 0.9799 - recall: 0.9806 - precision: 0.9815 - fbeta_score: 0.9797 - val_loss: 2.0932 - val_accuracy: 0.5104 - val_recall: 0.4322 - val_precision: 0.5771 - val_fbeta_score: 0.4645\n",
1136 + "Epoch 18/25\n",
1137 + "20246/20246 [==============================] - 1320s 65ms/step - loss: 0.0394 - accuracy: 0.9831 - recall: 0.9824 - precision: 0.9859 - fbeta_score: 0.9830 - val_loss: 2.1859 - val_accuracy: 0.5051 - val_recall: 0.3847 - val_precision: 0.5814 - val_fbeta_score: 0.4328\n",
1138 + "Epoch 19/25\n",
1139 + "20246/20246 [==============================] - 1322s 65ms/step - loss: 0.0462 - accuracy: 0.9812 - recall: 0.9804 - precision: 0.9839 - fbeta_score: 0.9809 - val_loss: 2.2411 - val_accuracy: 0.5072 - val_recall: 0.4112 - val_precision: 0.5771 - val_fbeta_score: 0.4479\n",
1140 + "Epoch 20/25\n",
1141 + "20246/20246 [==============================] - 1321s 65ms/step - loss: 0.0353 - accuracy: 0.9841 - recall: 0.9852 - precision: 0.9846 - fbeta_score: 0.9838 - val_loss: 2.2884 - val_accuracy: 0.5151 - val_recall: 0.4217 - val_precision: 0.5828 - val_fbeta_score: 0.4573\n",
1142 + "Epoch 21/25\n",
1143 + "20246/20246 [==============================] - 1318s 65ms/step - loss: 0.0408 - accuracy: 0.9824 - recall: 0.9818 - precision: 0.9843 - fbeta_score: 0.9818 - val_loss: 2.5317 - val_accuracy: 0.4857 - val_recall: 0.2767 - val_precision: 0.5815 - val_fbeta_score: 0.3478\n",
1144 + "Epoch 22/25\n",
1145 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.0372 - accuracy: 0.9828 - recall: 0.9815 - precision: 0.9859 - fbeta_score: 0.9826 - val_loss: 2.2843 - val_accuracy: 0.5043 - val_recall: 0.3758 - val_precision: 0.5849 - val_fbeta_score: 0.4276\n",
1146 + "Epoch 23/25\n",
1147 + "20246/20246 [==============================] - 1316s 65ms/step - loss: 0.0348 - accuracy: 0.9856 - recall: 0.9868 - precision: 0.9863 - fbeta_score: 0.9856 - val_loss: 2.2134 - val_accuracy: 0.5277 - val_recall: 0.4676 - val_precision: 0.5825 - val_fbeta_score: 0.4887\n",
1148 + "Epoch 24/25\n",
1149 + "20246/20246 [==============================] - 1317s 65ms/step - loss: 0.0351 - accuracy: 0.9847 - recall: 0.9840 - precision: 0.9869 - fbeta_score: 0.9845 - val_loss: 2.1987 - val_accuracy: 0.5148 - val_recall: 0.4272 - val_precision: 0.5806 - val_fbeta_score: 0.4617\n",
1150 + "Epoch 25/25\n",
1151 + "20246/20246 [==============================] - 1319s 65ms/step - loss: 0.0330 - accuracy: 0.9855 - recall: 0.9871 - precision: 0.9851 - fbeta_score: 0.9852 - val_loss: 2.2073 - val_accuracy: 0.5125 - val_recall: 0.3833 - val_precision: 0.5833 - val_fbeta_score: 0.4322\n"
1152 + ],
1153 + "name": "stdout"
1154 + }
1155 + ]
1156 + },
1157 + {
1158 + "cell_type": "code",
1159 + "metadata": {
1160 + "id": "jBpYE9eVxfXv",
1161 + "colab_type": "code",
1162 + "colab": {}
1163 + },
1164 + "source": [
1165 + "test = pd.read_csv('gdrive/My Drive/capstone 2/event_embedding/Thesis_data/combined_data2015.csv', encoding='utf-8') "
1166 + ],
1167 + "execution_count": null,
1168 + "outputs": []
1169 + },
1170 + {
1171 + "cell_type": "code",
1172 + "metadata": {
1173 + "id": "NQu0eoaWxfsv",
1174 + "colab_type": "code",
1175 + "colab": {}
1176 + },
1177 + "source": [
1178 + "def predict_convert_data(data_df):\n",
1179 + " global tokenizer\n",
1180 + " indices = []\n",
1181 + " for i in tqdm(range(len(data_df))):\n",
1182 + " ids, segments = tokenizer.encode(data_df[DATA_COLUMN][i], max_len=SEQ_LEN)\n",
1183 + " indices.append(ids)\n",
1184 + " \n",
1185 + " items = indices\n",
1186 + " \n",
1187 + " \n",
1188 + " indices = np.array(indices)\n",
1189 + " return [indices, np.zeros_like(indices)]\n",
1190 + "\n",
1191 + "def predict_load_data(x): #Pandas Dataframe을 인풋으로 받는다\n",
1192 + " data_df = x\n",
1193 + " data_df[DATA_COLUMN] = data_df[DATA_COLUMN].astype(str)\n",
1194 + " data_x = predict_convert_data(data_df)\n",
1195 + "\n",
1196 + " return data_x"
1197 + ],
1198 + "execution_count": null,
1199 + "outputs": []
1200 + },
1201 + {
1202 + "cell_type": "code",
1203 + "metadata": {
1204 + "id": "DBY60yKJxnKL",
1205 + "colab_type": "code",
1206 + "colab": {
1207 + "base_uri": "https://localhost:8080/",
1208 + "height": 34
1209 + },
1210 + "outputId": "91537660-419c-4197-e583-f852b99e1c86"
1211 + },
1212 + "source": [
1213 + "test_set = predict_load_data(test)"
1214 + ],
1215 + "execution_count": null,
1216 + "outputs": [
1217 + {
1218 + "output_type": "stream",
1219 + "text": [
1220 + "100%|██████████| 3692/3692 [00:01<00:00, 2444.62it/s]\n"
1221 + ],
1222 + "name": "stderr"
1223 + }
1224 + ]
1225 + },
1226 + {
1227 + "cell_type": "code",
1228 + "metadata": {
1229 + "id": "jf9yeGiVbFxO",
1230 + "colab_type": "code",
1231 + "colab": {
1232 + "base_uri": "https://localhost:8080/",
1233 + "height": 170
1234 + },
1235 + "outputId": "26775179-5501-4dfa-d099-57129d8b0bbc"
1236 + },
1237 + "source": [
1238 + "bert_model = get_bert_finetuning_model(model)\n",
1239 + "bert_model.load_weights(\"gdrive/My Drive/body_bert_256.h5\")\n",
1240 + "preds = bert_model.predict(test_set)\n",
1241 + "from sklearn.metrics import classification_report\n",
1242 + "y_true = test['index']\n",
1243 + "# F1 Score 확인\n",
1244 + "print(classification_report(y_true, np.round(preds,0)))"
1245 + ],
1246 + "execution_count": null,
1247 + "outputs": [
1248 + {
1249 + "output_type": "stream",
1250 + "text": [
1251 + " precision recall f1-score support\n",
1252 + "\n",
1253 + " 0 0.50 0.52 0.51 1867\n",
1254 + " 1 0.49 0.48 0.49 1825\n",
1255 + "\n",
1256 + " accuracy 0.50 3692\n",
1257 + " macro avg 0.50 0.50 0.50 3692\n",
1258 + "weighted avg 0.50 0.50 0.50 3692\n",
1259 + "\n"
1260 + ],
1261 + "name": "stdout"
1262 + }
1263 + ]
1264 + }
1265 + ]
1266 +}
...\ No newline at end of file ...\ No newline at end of file
1 +{
2 + "nbformat": 4,
3 + "nbformat_minor": 0,
4 + "metadata": {
5 + "colab": {
6 + "name": "bert news label.ipynb",
7 + "provenance": []
8 + },
9 + "kernelspec": {
10 + "name": "python3",
11 + "display_name": "Python 3"
12 + },
13 + "accelerator": "GPU"
14 + },
15 + "cells": [
16 + {
17 + "cell_type": "code",
18 + "metadata": {
19 + "id": "58B51bnMtDVX",
20 + "colab_type": "code",
21 + "colab": {
22 + "base_uri": "https://localhost:8080/",
23 + "height": 122
24 + },
25 + "outputId": "6e85676a-2b15-4885-b467-3358de1e7189"
26 + },
27 + "source": [
28 + "from google.colab import auth\n",
29 + "auth.authenticate_user()\n",
30 + "\n",
31 + "from google.colab import drive\n",
32 + "drive.mount('/content/gdrive')"
33 + ],
34 + "execution_count": null,
35 + "outputs": [
36 + {
37 + "output_type": "stream",
38 + "text": [
39 + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n",
40 + "\n",
41 + "Enter your authorization code:\n",
42 + "··········\n",
43 + "Mounted at /content/gdrive\n"
44 + ],
45 + "name": "stdout"
46 + }
47 + ]
48 + },
49 + {
50 + "cell_type": "code",
51 + "metadata": {
52 + "id": "2GWn_WDkvp3g",
53 + "colab_type": "code",
54 + "colab": {}
55 + },
56 + "source": [
57 + "import pandas as pd\n",
58 + "combined_data = pd.read_csv('gdrive/My Drive/capstone 2/event_embedding/Thesis_data/combined_data3.csv', encoding='utf-8') \n",
59 + "combined_data\n",
60 + "\n",
61 + "\n",
62 + "path = \"gdrive/My Drive/capstone 2/\""
63 + ],
64 + "execution_count": null,
65 + "outputs": []
66 + },
67 + {
68 + "cell_type": "code",
69 + "metadata": {
70 + "id": "ovci8fVpZUmN",
71 + "colab_type": "code",
72 + "colab": {
73 + "base_uri": "https://localhost:8080/",
74 + "height": 419
75 + },
76 + "outputId": "55dddc67-b92a-4cc6-a9b8-152e594441ce"
77 + },
78 + "source": [
79 + "combined_data"
80 + ],
81 + "execution_count": null,
82 + "outputs": [
83 + {
84 + "output_type": "execute_result",
85 + "data": {
86 + "text/html": [
87 + "<div>\n",
88 + "<style scoped>\n",
89 + " .dataframe tbody tr th:only-of-type {\n",
90 + " vertical-align: middle;\n",
91 + " }\n",
92 + "\n",
93 + " .dataframe tbody tr th {\n",
94 + " vertical-align: top;\n",
95 + " }\n",
96 + "\n",
97 + " .dataframe thead th {\n",
98 + " text-align: right;\n",
99 + " }\n",
100 + "</style>\n",
101 + "<table border=\"1\" class=\"dataframe\">\n",
102 + " <thead>\n",
103 + " <tr style=\"text-align: right;\">\n",
104 + " <th></th>\n",
105 + " <th>time</th>\n",
106 + " <th>headline</th>\n",
107 + " <th>body</th>\n",
108 + " <th>Price</th>\n",
109 + " <th>Open</th>\n",
110 + " <th>High</th>\n",
111 + " <th>Low</th>\n",
112 + " <th>Vol</th>\n",
113 + " <th>Change</th>\n",
114 + " <th>index</th>\n",
115 + " </tr>\n",
116 + " </thead>\n",
117 + " <tbody>\n",
118 + " <tr>\n",
119 + " <th>0</th>\n",
120 + " <td>20050107</td>\n",
121 + " <td>Stocks End Lower</td>\n",
122 + " <td>Monday. Among some of the other highlights, c...</td>\n",
123 + " <td>4.93</td>\n",
124 + " <td>4.99</td>\n",
125 + " <td>5.05</td>\n",
126 + " <td>4.85</td>\n",
127 + " <td>434.26M</td>\n",
128 + " <td>-0.40%</td>\n",
129 + " <td>0</td>\n",
130 + " </tr>\n",
131 + " <tr>\n",
132 + " <th>1</th>\n",
133 + " <td>20050107</td>\n",
134 + " <td>Vital Signs for the Week of Jan. 10</td>\n",
135 + " <td>Palo Alto, Calif. EARNINGS REP...</td>\n",
136 + " <td>4.93</td>\n",
137 + " <td>4.99</td>\n",
138 + " <td>5.05</td>\n",
139 + " <td>4.85</td>\n",
140 + " <td>434.26M</td>\n",
141 + " <td>-0.40%</td>\n",
142 + " <td>0</td>\n",
143 + " </tr>\n",
144 + " <tr>\n",
145 + " <th>2</th>\n",
146 + " <td>20050110</td>\n",
147 + " <td>Tightwad IT Buyers Loosen Up</td>\n",
148 + " <td>plain-vanilla desktops, according to NPD Grou...</td>\n",
149 + " <td>4.61</td>\n",
150 + " <td>4.88</td>\n",
151 + " <td>4.94</td>\n",
152 + " <td>4.58</td>\n",
153 + " <td>654.04M</td>\n",
154 + " <td>-6.49%</td>\n",
155 + " <td>0</td>\n",
156 + " </tr>\n",
157 + " <tr>\n",
158 + " <th>3</th>\n",
159 + " <td>20050110</td>\n",
160 + " <td>Stocks Finish Slightly Higher</td>\n",
161 + " <td>regular session. Looking ahead this wee...</td>\n",
162 + " <td>4.61</td>\n",
163 + " <td>4.88</td>\n",
164 + " <td>4.94</td>\n",
165 + " <td>4.58</td>\n",
166 + " <td>654.04M</td>\n",
167 + " <td>-6.49%</td>\n",
168 + " <td>0</td>\n",
169 + " </tr>\n",
170 + " <tr>\n",
171 + " <th>4</th>\n",
172 + " <td>20050110</td>\n",
173 + " <td>Commentary: The New Driver In Chipland</td>\n",
174 + " <td>easy to see the consumer influence. Digital c...</td>\n",
175 + " <td>4.61</td>\n",
176 + " <td>4.88</td>\n",
177 + " <td>4.94</td>\n",
178 + " <td>4.58</td>\n",
179 + " <td>654.04M</td>\n",
180 + " <td>-6.49%</td>\n",
181 + " <td>0</td>\n",
182 + " </tr>\n",
183 + " <tr>\n",
184 + " <th>...</th>\n",
185 + " <td>...</td>\n",
186 + " <td>...</td>\n",
187 + " <td>...</td>\n",
188 + " <td>...</td>\n",
189 + " <td>...</td>\n",
190 + " <td>...</td>\n",
191 + " <td>...</td>\n",
192 + " <td>...</td>\n",
193 + " <td>...</td>\n",
194 + " <td>...</td>\n",
195 + " </tr>\n",
196 + " <tr>\n",
197 + " <th>24046</th>\n",
198 + " <td>20150108</td>\n",
199 + " <td>Israel's Water Ninja</td>\n",
200 + " <td>influenced by his grandfather, who built Tel ...</td>\n",
201 + " <td>112.01</td>\n",
202 + " <td>112.67</td>\n",
203 + " <td>113.25</td>\n",
204 + " <td>110.21</td>\n",
205 + " <td>53.70M</td>\n",
206 + " <td>0.11%</td>\n",
207 + " <td>1</td>\n",
208 + " </tr>\n",
209 + " <tr>\n",
210 + " <th>24047</th>\n",
211 + " <td>20150108</td>\n",
212 + " <td>What Drivers Want: Design Lessons From Ford's ...</td>\n",
213 + " <td>faster, simpler, and easier to use. Will the ...</td>\n",
214 + " <td>112.01</td>\n",
215 + " <td>112.67</td>\n",
216 + " <td>113.25</td>\n",
217 + " <td>110.21</td>\n",
218 + " <td>53.70M</td>\n",
219 + " <td>0.11%</td>\n",
220 + " <td>1</td>\n",
221 + " </tr>\n",
222 + " <tr>\n",
223 + " <th>24048</th>\n",
224 + " <td>20150108</td>\n",
225 + " <td>AT&amp;T May Face FCC Fine Over Mobile Data Slowdo...</td>\n",
226 + " <td>halting the practice and millions of dollars ...</td>\n",
227 + " <td>112.01</td>\n",
228 + " <td>112.67</td>\n",
229 + " <td>113.25</td>\n",
230 + " <td>110.21</td>\n",
231 + " <td>53.70M</td>\n",
232 + " <td>0.11%</td>\n",
233 + " <td>1</td>\n",
234 + " </tr>\n",
235 + " <tr>\n",
236 + " <th>24049</th>\n",
237 + " <td>20150108</td>\n",
238 + " <td>Is Samsung Feeling the Squeeze From Apple?</td>\n",
239 + " <td>UBS Managing Director Steve Milunovich \\ndisc...</td>\n",
240 + " <td>112.01</td>\n",
241 + " <td>112.67</td>\n",
242 + " <td>113.25</td>\n",
243 + " <td>110.21</td>\n",
244 + " <td>53.70M</td>\n",
245 + " <td>0.11%</td>\n",
246 + " <td>1</td>\n",
247 + " </tr>\n",
248 + " <tr>\n",
249 + " <th>24050</th>\n",
250 + " <td>20150108</td>\n",
251 + " <td>Company News: Auto Industry, U.S. Steel, Veriz...</td>\n",
252 + " <td>billion sale to Apple last year. The complain...</td>\n",
253 + " <td>112.01</td>\n",
254 + " <td>112.67</td>\n",
255 + " <td>113.25</td>\n",
256 + " <td>110.21</td>\n",
257 + " <td>53.70M</td>\n",
258 + " <td>0.11%</td>\n",
259 + " <td>1</td>\n",
260 + " </tr>\n",
261 + " </tbody>\n",
262 + "</table>\n",
263 + "<p>24051 rows × 10 columns</p>\n",
264 + "</div>"
265 + ],
266 + "text/plain": [
267 + " time ... index\n",
268 + "0 20050107 ... 0\n",
269 + "1 20050107 ... 0\n",
270 + "2 20050110 ... 0\n",
271 + "3 20050110 ... 0\n",
272 + "4 20050110 ... 0\n",
273 + "... ... ... ...\n",
274 + "24046 20150108 ... 1\n",
275 + "24047 20150108 ... 1\n",
276 + "24048 20150108 ... 1\n",
277 + "24049 20150108 ... 1\n",
278 + "24050 20150108 ... 1\n",
279 + "\n",
280 + "[24051 rows x 10 columns]"
281 + ]
282 + },
283 + "metadata": {
284 + "tags": []
285 + },
286 + "execution_count": 3
287 + }
288 + ]
289 + },
290 + {
291 + "cell_type": "code",
292 + "metadata": {
293 + "id": "XBgA_6YRv3KB",
294 + "colab_type": "code",
295 + "colab": {
296 + "base_uri": "https://localhost:8080/",
297 + "height": 1000
298 + },
299 + "outputId": "73a28fca-497e-4b21-a3f1-1e4de356f3e6"
300 + },
301 + "source": [
302 + "%tensorflow_version 1.x\n",
303 + "import tensorflow as tf\n",
304 + "\n",
305 + "import pandas as pd\n",
306 + "import numpy as np \n",
307 + "import re\n",
308 + "import pickle\n",
309 + "\n",
310 + "import keras as keras\n",
311 + "from keras.models import load_model\n",
312 + "from keras import backend as K\n",
313 + "from keras import Input, Model\n",
314 + "from keras import optimizers\n",
315 + "\n",
316 + "import codecs\n",
317 + "from tqdm import tqdm\n",
318 + "import shutil\n",
319 + "import warnings\n",
320 + "import tensorflow as tf\n",
321 + "import os\n",
322 + "warnings.filterwarnings(action='ignore')\n",
323 + "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' \n",
324 + "tf.logging.set_verbosity(tf.logging.ERROR)\n",
325 + "\n",
326 + "!pip install keras-bert\n",
327 + "!pip install keras-radam"
328 + ],
329 + "execution_count": null,
330 + "outputs": [
331 + {
332 + "output_type": "stream",
333 + "text": [
334 + "TensorFlow 1.x selected.\n"
335 + ],
336 + "name": "stdout"
337 + },
338 + {
339 + "output_type": "stream",
340 + "text": [
341 + "Using TensorFlow backend.\n"
342 + ],
343 + "name": "stderr"
344 + },
345 + {
346 + "output_type": "stream",
347 + "text": [
348 + "Collecting keras-bert\n",
349 + " Downloading https://files.pythonhosted.org/packages/2c/0f/cdc886c1018943ea62d3209bc964413d5aa9d0eb7e493abd8545be679294/keras-bert-0.81.0.tar.gz\n",
350 + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from keras-bert) (1.18.4)\n",
351 + "Requirement already satisfied: Keras in /usr/local/lib/python3.6/dist-packages (from keras-bert) (2.3.1)\n",
352 + "Collecting keras-transformer>=0.30.0\n",
353 + " Downloading https://files.pythonhosted.org/packages/22/b9/9040ec948ef895e71df6bee505a1f7e1c99ffedb409cb6eb329f04ece6e0/keras-transformer-0.33.0.tar.gz\n",
354 + "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (2.10.0)\n",
355 + "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.1.2)\n",
356 + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.12.0)\n",
357 + "Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.4.1)\n",
358 + "Requirement already satisfied: keras-applications>=1.0.6 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (1.0.8)\n",
359 + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from Keras->keras-bert) (3.13)\n",
360 + "Collecting keras-pos-embd>=0.10.0\n",
361 + " Downloading https://files.pythonhosted.org/packages/09/70/b63ed8fc660da2bb6ae29b9895401c628da5740c048c190b5d7107cadd02/keras-pos-embd-0.11.0.tar.gz\n",
362 + "Collecting keras-multi-head>=0.22.0\n",
363 + " Downloading https://files.pythonhosted.org/packages/a5/f0/a9a7528b8fefacaa9c5db736036fd8c061d754830a29c34129f6847bd338/keras-multi-head-0.24.0.tar.gz\n",
364 + "Collecting keras-layer-normalization>=0.12.0\n",
365 + " Downloading https://files.pythonhosted.org/packages/a4/0e/d1078df0494bac9ce1a67954e5380b6e7569668f0f3b50a9531c62c1fc4a/keras-layer-normalization-0.14.0.tar.gz\n",
366 + "Collecting keras-position-wise-feed-forward>=0.5.0\n",
367 + " Downloading https://files.pythonhosted.org/packages/e3/59/f0faa1037c033059e7e9e7758e6c23b4d1c0772cd48de14c4b6fd4033ad5/keras-position-wise-feed-forward-0.6.0.tar.gz\n",
368 + "Collecting keras-embed-sim>=0.7.0\n",
369 + " Downloading https://files.pythonhosted.org/packages/bc/20/735fd53f6896e2af63af47e212601c1b8a7a80d00b6126c388c9d1233892/keras-embed-sim-0.7.0.tar.gz\n",
370 + "Collecting keras-self-attention==0.41.0\n",
371 + " Downloading https://files.pythonhosted.org/packages/1b/1c/01599219bef7266fa43b3316e4f55bcb487734d3bafdc60ffd564f3cfe29/keras-self-attention-0.41.0.tar.gz\n",
372 + "Building wheels for collected packages: keras-bert, keras-transformer, keras-pos-embd, keras-multi-head, keras-layer-normalization, keras-position-wise-feed-forward, keras-embed-sim, keras-self-attention\n",
373 + " Building wheel for keras-bert (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
374 + " Created wheel for keras-bert: filename=keras_bert-0.81.0-cp36-none-any.whl size=37913 sha256=f6e87897fa56346f3a9bd0607c976c0fb72e1d4f5d5798159416838347b34b2f\n",
375 + " Stored in directory: /root/.cache/pip/wheels/bd/27/da/ffc2d573aa48b87440ec4f98bc7c992e3a2d899edb2d22ef9e\n",
376 + " Building wheel for keras-transformer (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
377 + " Created wheel for keras-transformer: filename=keras_transformer-0.33.0-cp36-none-any.whl size=13260 sha256=4cf6dcab922b6caf627c1ba6adc5dbe6e8e2e4d7f59247b710d043b3bc5f8da2\n",
378 + " Stored in directory: /root/.cache/pip/wheels/26/98/13/a28402939e1d48edd8704e6b02f223795af4a706815f4bf6d8\n",
379 + " Building wheel for keras-pos-embd (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
380 + " Created wheel for keras-pos-embd: filename=keras_pos_embd-0.11.0-cp36-none-any.whl size=7554 sha256=8d7fac58ed8196ae123121c05fc80e7cdbcd03425613de81b7512c0a270a4ba2\n",
381 + " Stored in directory: /root/.cache/pip/wheels/5b/a1/a0/ce6b1d49ba1a9a76f592e70cf297b05c96bc9f418146761032\n",
382 + " Building wheel for keras-multi-head (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
383 + " Created wheel for keras-multi-head: filename=keras_multi_head-0.24.0-cp36-none-any.whl size=15511 sha256=965f1fd64d0293581290a3590617435dce809574fa0029af5b70f2a827244133\n",
384 + " Stored in directory: /root/.cache/pip/wheels/b6/84/01/dbcb50629030c8647a19dd0b7134574fad56c531bdb243bd20\n",
385 + " Building wheel for keras-layer-normalization (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
386 + " Created wheel for keras-layer-normalization: filename=keras_layer_normalization-0.14.0-cp36-none-any.whl size=5268 sha256=c9f4b2d27ebb8746e641efeaa10ccd6d26ccecf07851d6faebe0ffb4863deaa1\n",
387 + " Stored in directory: /root/.cache/pip/wheels/54/80/22/a638a7d406fd155e507aa33d703e3fa2612b9eb7bb4f4fe667\n",
388 + " Building wheel for keras-position-wise-feed-forward (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
389 + " Created wheel for keras-position-wise-feed-forward: filename=keras_position_wise_feed_forward-0.6.0-cp36-none-any.whl size=5623 sha256=d502009afa989aa58bd189344430c7c5518e9465a0a1c6e4ef21d77a162d9c97\n",
390 + " Stored in directory: /root/.cache/pip/wheels/39/e2/e2/3514fef126a00574b13bc0b9e23891800158df3a3c19c96e3b\n",
391 + " Building wheel for keras-embed-sim (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
392 + " Created wheel for keras-embed-sim: filename=keras_embed_sim-0.7.0-cp36-none-any.whl size=4676 sha256=c7445fbf736a11babf19d02ddb3d76f098a00706c800f3080ebc9a55745ca146\n",
393 + " Stored in directory: /root/.cache/pip/wheels/d1/bc/b1/b0c45cee4ca2e6c86586b0218ffafe7f0703c6d07fdf049866\n",
394 + " Building wheel for keras-self-attention (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
395 + " Created wheel for keras-self-attention: filename=keras_self_attention-0.41.0-cp36-none-any.whl size=17288 sha256=bdeda9b286ae3be34885c5183effca526d866cba7dd00c740f02eb340e1fab42\n",
396 + " Stored in directory: /root/.cache/pip/wheels/cc/dc/17/84258b27a04cd38ac91998abe148203720ca696186635db694\n",
397 + "Successfully built keras-bert keras-transformer keras-pos-embd keras-multi-head keras-layer-normalization keras-position-wise-feed-forward keras-embed-sim keras-self-attention\n",
398 + "Installing collected packages: keras-pos-embd, keras-self-attention, keras-multi-head, keras-layer-normalization, keras-position-wise-feed-forward, keras-embed-sim, keras-transformer, keras-bert\n",
399 + "Successfully installed keras-bert-0.81.0 keras-embed-sim-0.7.0 keras-layer-normalization-0.14.0 keras-multi-head-0.24.0 keras-pos-embd-0.11.0 keras-position-wise-feed-forward-0.6.0 keras-self-attention-0.41.0 keras-transformer-0.33.0\n",
400 + "Collecting keras-radam\n",
401 + " Downloading https://files.pythonhosted.org/packages/46/8d/b83ccaa94253fbc920b21981f038393041d92236bb541751b98a66a2ac1d/keras-radam-0.15.0.tar.gz\n",
402 + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from keras-radam) (1.18.4)\n",
403 + "Requirement already satisfied: Keras in /usr/local/lib/python3.6/dist-packages (from keras-radam) (2.3.1)\n",
404 + "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (2.10.0)\n",
405 + "Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.4.1)\n",
406 + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (3.13)\n",
407 + "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.12.0)\n",
408 + "Requirement already satisfied: keras-applications>=1.0.6 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.0.8)\n",
409 + "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from Keras->keras-radam) (1.1.2)\n",
410 + "Building wheels for collected packages: keras-radam\n",
411 + " Building wheel for keras-radam (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
412 + " Created wheel for keras-radam: filename=keras_radam-0.15.0-cp36-none-any.whl size=14685 sha256=60abbb595b856dbbf59934ad85b8754fc6d57e41d84bce2fee5b922a3717fc8a\n",
413 + " Stored in directory: /root/.cache/pip/wheels/79/a0/c0/670b0a118e8f078539fafec7bd02eba0af921f745660c7f83f\n",
414 + "Successfully built keras-radam\n",
415 + "Installing collected packages: keras-radam\n",
416 + "Successfully installed keras-radam-0.15.0\n"
417 + ],
418 + "name": "stdout"
419 + }
420 + ]
421 + },
422 + {
423 + "cell_type": "code",
424 + "metadata": {
425 + "id": "V7_zjhL5wGeB",
426 + "colab_type": "code",
427 + "colab": {}
428 + },
429 + "source": [
430 + "from keras_bert import load_trained_model_from_checkpoint, load_vocabulary\n",
431 + "from keras_bert import Tokenizer\n",
432 + "from keras_bert import AdamWarmup, calc_train_steps\n",
433 + "\n",
434 + "from keras_radam import RAdam"
435 + ],
436 + "execution_count": null,
437 + "outputs": []
438 + },
439 + {
440 + "cell_type": "code",
441 + "metadata": {
442 + "id": "RE5pjPZjwG3q",
443 + "colab_type": "code",
444 + "colab": {
445 + "base_uri": "https://localhost:8080/",
446 + "height": 102
447 + },
448 + "outputId": "2b293bd2-7d77-4a03-a8fe-af5896058933"
449 + },
450 + "source": [
451 + "os.listdir(path+'/bert')"
452 + ],
453 + "execution_count": null,
454 + "outputs": [
455 + {
456 + "output_type": "execute_result",
457 + "data": {
458 + "text/plain": [
459 + "['bert_config.json',\n",
460 + " 'vocab.txt',\n",
461 + " 'bert_model.ckpt.index',\n",
462 + " 'bert_model.ckpt.data-00000-of-00001',\n",
463 + " 'bert_model.ckpt.meta']"
464 + ]
465 + },
466 + "metadata": {
467 + "tags": []
468 + },
469 + "execution_count": 6
470 + }
471 + ]
472 + },
473 + {
474 + "cell_type": "code",
475 + "metadata": {
476 + "id": "yWqOLyGWwIMf",
477 + "colab_type": "code",
478 + "colab": {}
479 + },
480 + "source": [
481 + "SEQ_LEN = 256\n",
482 + "BATCH_SIZE = 8\n",
483 + "EPOCHS=2\n",
484 + "LR=1e-5\n",
485 + "\n",
486 + "pretrained_path = path+\"/bert\"\n",
487 + "config_path = os.path.join(pretrained_path, 'bert_config.json')\n",
488 + "checkpoint_path = os.path.join(pretrained_path, 'bert_model.ckpt')\n",
489 + "vocab_path = os.path.join(pretrained_path, 'vocab.txt')\n",
490 + "\n",
491 + "DATA_COLUMN = \"body\"\n",
492 + "LABEL_COLUMN = \"index\""
493 + ],
494 + "execution_count": null,
495 + "outputs": []
496 + },
497 + {
498 + "cell_type": "code",
499 + "metadata": {
500 + "id": "G4E3vhF5wKmg",
501 + "colab_type": "code",
502 + "colab": {}
503 + },
504 + "source": [
505 + "token_dict = {}\n",
506 + "with codecs.open(vocab_path, 'r', 'utf8') as reader:\n",
507 + " for line in reader:\n",
508 + " token = line.strip()\n",
509 + " if \"_\" in token:\n",
510 + " token = token.replace(\"_\",\"\")\n",
511 + " token = \"##\" + token\n",
512 + " token_dict[token] = len(token_dict)"
513 + ],
514 + "execution_count": null,
515 + "outputs": []
516 + },
517 + {
518 + "cell_type": "code",
519 + "metadata": {
520 + "id": "c5a7hPzfwRcr",
521 + "colab_type": "code",
522 + "colab": {}
523 + },
524 + "source": [
525 + "tokenizer = Tokenizer(token_dict)"
526 + ],
527 + "execution_count": null,
528 + "outputs": []
529 + },
530 + {
531 + "cell_type": "code",
532 + "metadata": {
533 + "id": "vehabKa5wTKG",
534 + "colab_type": "code",
535 + "colab": {}
536 + },
537 + "source": [
538 + "def convert_data(data_df):\n",
539 + " global tokenizer\n",
540 + " indices, targets = [], []\n",
541 + " for i in tqdm(range(len(data_df))):\n",
542 + " ids, segments = tokenizer.encode((data_df.iloc[i])[DATA_COLUMN], max_len=SEQ_LEN)\n",
543 + " indices.append(ids)\n",
544 + " targets.append((data_df.iloc[i])[LABEL_COLUMN])\n",
545 + " items = list(zip(indices, targets))\n",
546 + " \n",
547 + " indices, targets = zip(*items)\n",
548 + " indices = np.array(indices)\n",
549 + " return [indices, np.zeros_like(indices)], np.array(targets)\n",
550 + "\n",
551 + "def load_data(pandas_dataframe):\n",
552 + " data_df = pandas_dataframe\n",
553 + " # data_df[\"actor\"] = data_df[\"actor\"].astype(str)\n",
554 + " # data_df[\"action\"] = data_df[\"action\"].astype(str)\n",
555 + " # data_df[\"object\"] = data_df[\"object\"].astype(str)\n",
556 + " data_x, data_y = convert_data(data_df)\n",
557 + "\n",
558 + " return data_x, data_y"
559 + ],
560 + "execution_count": null,
561 + "outputs": []
562 + },
563 + {
564 + "cell_type": "code",
565 + "metadata": {
566 + "id": "V8xrXJlywXG-",
567 + "colab_type": "code",
568 + "colab": {
569 + "base_uri": "https://localhost:8080/",
570 + "height": 51
571 + },
572 + "outputId": "1c560b33-635a-4eca-df3c-eae387590031"
573 + },
574 + "source": [
575 + "from sklearn.model_selection import train_test_split\n",
576 + "train,val = train_test_split(combined_data,test_size = 0.2)\n",
577 + "\n",
578 + "train_x, train_y = load_data(train)\n",
579 + "test_x, test_y = load_data(val)"
580 + ],
581 + "execution_count": null,
582 + "outputs": [
583 + {
584 + "output_type": "stream",
585 + "text": [
586 + "100%|██████████| 19240/19240 [00:14<00:00, 1307.17it/s]\n",
587 + "100%|██████████| 4811/4811 [00:03<00:00, 1265.52it/s]\n"
588 + ],
589 + "name": "stderr"
590 + }
591 + ]
592 + },
593 + {
594 + "cell_type": "code",
595 + "metadata": {
596 + "id": "VyyTba9swZgM",
597 + "colab_type": "code",
598 + "colab": {}
599 + },
600 + "source": [
601 + "layer_num = 12\n",
602 + "model = load_trained_model_from_checkpoint(\n",
603 + " config_path,\n",
604 + " checkpoint_path,\n",
605 + " training=True,\n",
606 + " trainable=True,\n",
607 + " seq_len=SEQ_LEN,)"
608 + ],
609 + "execution_count": null,
610 + "outputs": []
611 + },
612 + {
613 + "cell_type": "code",
614 + "metadata": {
615 + "id": "7jO_vzY6w_qa",
616 + "colab_type": "code",
617 + "colab": {}
618 + },
619 + "source": [
620 + "from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
621 + "def recall(y_true, y_pred):\n",
622 + " \"\"\"Recall metric.\n",
623 + "\n",
624 + " Only computes a batch-wise average of recall.\n",
625 + "\n",
626 + " Computes the recall, a metric for multi-label classification of\n",
627 + " how many relevant items are selected.\n",
628 + " \"\"\"\n",
629 + " true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))\n",
630 + " possible_positives = K.sum(K.round(K.clip(y_true[:, 0], 0, 1)))\n",
631 + " recall = true_positives / (possible_positives + K.epsilon())\n",
632 + " return recall\n",
633 + "\n",
634 + "\n",
635 + "def precision(y_true, y_pred):\n",
636 + " \"\"\"Precision metric.\n",
637 + "\n",
638 + " Only computes a batch-wise average of precision.\n",
639 + "\n",
640 + " Computes the precision, a metric for multi-label classification of\n",
641 + " how many selected items are relevant.\n",
642 + " \"\"\"\n",
643 + " true_positives = K.sum(K.round(K.clip(y_true[:, 0] * y_pred[:, 0], 0, 1)))\n",
644 + " predicted_positives = K.sum(K.round(K.clip(y_pred[:, 0], 0, 1)))\n",
645 + " precision = true_positives / (predicted_positives + K.epsilon())\n",
646 + " return precision\n",
647 + "\n",
648 + "\n",
649 + "def fbeta_score(y_true, y_pred):\n",
650 + "\n",
651 + " # If there are no true positives, fix the F score at 0 like sklearn.\n",
652 + " if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:\n",
653 + " return 0\n",
654 + "\n",
655 + " p = precision(y_true, y_pred)\n",
656 + " r = recall(y_true, y_pred)\n",
657 + " bb = 1 ** 2\n",
658 + " fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())\n",
659 + " return fbeta_score\n",
660 + "\n",
661 + "def get_bert_finetuning_model(model):\n",
662 + " inputs = model.inputs[:2]\n",
663 + " dense = model.layers[-3].output\n",
664 + "\n",
665 + " outputs = keras.layers.Dense(1, activation='sigmoid',kernel_initializer=keras.initializers.TruncatedNormal(stddev=0.02),\n",
666 + " name = 'real_output')(dense)\n",
667 + "\n",
668 + "\n",
669 + "\n",
670 + " bert_model = keras.models.Model(inputs, outputs)\n",
671 + " bert_model.compile(\n",
672 + " optimizer=RAdam(learning_rate=0.00001, weight_decay=0.0025),\n",
673 + " loss='binary_crossentropy',\n",
674 + " metrics=['accuracy', recall, precision, fbeta_score])\n",
675 + " \n",
676 + " return bert_model\n",
677 + " \n",
678 + "model_name = path + \"event_news_label_bert.h5\"\n",
679 + "checkpointer = ModelCheckpoint(filepath=model_name,\n",
680 + " monitor='val_fbeta_score', mode=\"max\",\n",
681 + " verbose=2, save_best_only=True)\n",
682 + "earlystopper = EarlyStopping(monitor='val_loss', patience=20, verbose=2, mode = \"min\")"
683 + ],
684 + "execution_count": null,
685 + "outputs": []
686 + },
687 + {
688 + "cell_type": "code",
689 + "metadata": {
690 + "id": "XQDRjG2vbKKs",
691 + "colab_type": "code",
692 + "colab": {
693 + "base_uri": "https://localhost:8080/",
694 + "height": 938
695 + },
696 + "outputId": "7fbefaa0-2ad0-4c1d-d486-27379af24381"
697 + },
698 + "source": [
699 + "with K.tensorflow_backend.tf.device('/gpu:0'):\n",
700 + " sess = K.get_session()\n",
701 + " uninitialized_variables = set([i.decode('ascii') for i in sess.run(tf.report_uninitialized_variables())])\n",
702 + " init = tf.variables_initializer([v for v in tf.global_variables() if v.name.split(':')[0] in uninitialized_variables])\n",
703 + " sess.run(init)\n",
704 + "\n",
705 + " bert_model = get_bert_finetuning_model(model)\n",
706 + " history = bert_model.fit(train_x, train_y, epochs=30, batch_size=16, verbose = 1, validation_data=(test_x, test_y))\n",
707 + " bert_model.save_weights(\"gdrive/My Drive/body_bert_256_epoch30.h5\")"
708 + ],
709 + "execution_count": null,
710 + "outputs": [
711 + {
712 + "output_type": "stream",
713 + "text": [
714 + "Train on 19240 samples, validate on 4811 samples\n",
715 + "Epoch 1/30\n",
716 + "19240/19240 [==============================] - 1236s 64ms/step - loss: 0.6922 - accuracy: 0.5271 - recall: 0.9021 - precision: 0.5280 - fbeta_score: 0.6416 - val_loss: 0.6910 - val_accuracy: 0.5340 - val_recall: 1.0000 - val_precision: 0.5341 - val_fbeta_score: 0.6876\n",
717 + "Epoch 2/30\n",
718 + "19240/19240 [==============================] - 1228s 64ms/step - loss: 0.6914 - accuracy: 0.5291 - recall: 0.8927 - precision: 0.5204 - fbeta_score: 0.6347 - val_loss: 0.6919 - val_accuracy: 0.5340 - val_recall: 1.0000 - val_precision: 0.5341 - val_fbeta_score: 0.6876\n",
719 + "Epoch 3/30\n",
720 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.6861 - accuracy: 0.5491 - recall: 0.7746 - precision: 0.5634 - fbeta_score: 0.6203 - val_loss: 0.6902 - val_accuracy: 0.5309 - val_recall: 0.7255 - val_precision: 0.5468 - val_fbeta_score: 0.6113\n",
721 + "Epoch 4/30\n",
722 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.6125 - accuracy: 0.6657 - recall: 0.7281 - precision: 0.6842 - fbeta_score: 0.6798 - val_loss: 0.7663 - val_accuracy: 0.5259 - val_recall: 0.4899 - val_precision: 0.5644 - val_fbeta_score: 0.5093\n",
723 + "Epoch 5/30\n",
724 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.3738 - accuracy: 0.8379 - recall: 0.8502 - precision: 0.8488 - fbeta_score: 0.8387 - val_loss: 1.0253 - val_accuracy: 0.5329 - val_recall: 0.6017 - val_precision: 0.5592 - val_fbeta_score: 0.5647\n",
725 + "Epoch 6/30\n",
726 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.1909 - accuracy: 0.9276 - recall: 0.9332 - precision: 0.9313 - fbeta_score: 0.9271 - val_loss: 1.3036 - val_accuracy: 0.5319 - val_recall: 0.5900 - val_precision: 0.5597 - val_fbeta_score: 0.5601\n",
727 + "Epoch 7/30\n",
728 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.1249 - accuracy: 0.9540 - recall: 0.9576 - precision: 0.9573 - fbeta_score: 0.9544 - val_loss: 1.6319 - val_accuracy: 0.5404 - val_recall: 0.6667 - val_precision: 0.5567 - val_fbeta_score: 0.5950\n",
729 + "Epoch 8/30\n",
730 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.0950 - accuracy: 0.9663 - recall: 0.9678 - precision: 0.9675 - fbeta_score: 0.9655 - val_loss: 1.7987 - val_accuracy: 0.5383 - val_recall: 0.5949 - val_precision: 0.5670 - val_fbeta_score: 0.5654\n",
731 + "Epoch 9/30\n",
732 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0802 - accuracy: 0.9715 - recall: 0.9726 - precision: 0.9745 - fbeta_score: 0.9717 - val_loss: 1.8214 - val_accuracy: 0.5311 - val_recall: 0.5689 - val_precision: 0.5639 - val_fbeta_score: 0.5503\n",
733 + "Epoch 10/30\n",
734 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.0726 - accuracy: 0.9730 - recall: 0.9738 - precision: 0.9757 - fbeta_score: 0.9730 - val_loss: 1.9001 - val_accuracy: 0.5417 - val_recall: 0.6549 - val_precision: 0.5639 - val_fbeta_score: 0.5913\n",
735 + "Epoch 11/30\n",
736 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.0618 - accuracy: 0.9768 - recall: 0.9769 - precision: 0.9794 - fbeta_score: 0.9767 - val_loss: 1.9707 - val_accuracy: 0.5350 - val_recall: 0.6545 - val_precision: 0.5576 - val_fbeta_score: 0.5870\n",
737 + "Epoch 12/30\n",
738 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0607 - accuracy: 0.9779 - recall: 0.9785 - precision: 0.9805 - fbeta_score: 0.9780 - val_loss: 1.9424 - val_accuracy: 0.5371 - val_recall: 0.5922 - val_precision: 0.5664 - val_fbeta_score: 0.5638\n",
739 + "Epoch 13/30\n",
740 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0521 - accuracy: 0.9796 - recall: 0.9808 - precision: 0.9814 - fbeta_score: 0.9798 - val_loss: 2.2737 - val_accuracy: 0.5383 - val_recall: 0.6275 - val_precision: 0.5605 - val_fbeta_score: 0.5782\n",
741 + "Epoch 14/30\n",
742 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0514 - accuracy: 0.9797 - recall: 0.9803 - precision: 0.9818 - fbeta_score: 0.9797 - val_loss: 1.9318 - val_accuracy: 0.5309 - val_recall: 0.5317 - val_precision: 0.5681 - val_fbeta_score: 0.5332\n",
743 + "Epoch 15/30\n",
744 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0449 - accuracy: 0.9813 - recall: 0.9797 - precision: 0.9844 - fbeta_score: 0.9808 - val_loss: 2.3235 - val_accuracy: 0.5277 - val_recall: 0.4475 - val_precision: 0.5793 - val_fbeta_score: 0.4868\n",
745 + "Epoch 16/30\n",
746 + "19240/19240 [==============================] - 1231s 64ms/step - loss: 0.0445 - accuracy: 0.9824 - recall: 0.9824 - precision: 0.9850 - fbeta_score: 0.9827 - val_loss: 2.1759 - val_accuracy: 0.5340 - val_recall: 0.4795 - val_precision: 0.5824 - val_fbeta_score: 0.5076\n",
747 + "Epoch 17/30\n",
748 + "19240/19240 [==============================] - 1231s 64ms/step - loss: 0.0412 - accuracy: 0.9827 - recall: 0.9822 - precision: 0.9854 - fbeta_score: 0.9827 - val_loss: 2.1135 - val_accuracy: 0.5390 - val_recall: 0.6302 - val_precision: 0.5630 - val_fbeta_score: 0.5813\n",
749 + "Epoch 18/30\n",
750 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0418 - accuracy: 0.9828 - recall: 0.9826 - precision: 0.9852 - fbeta_score: 0.9828 - val_loss: 2.2571 - val_accuracy: 0.5394 - val_recall: 0.6241 - val_precision: 0.5648 - val_fbeta_score: 0.5785\n",
751 + "Epoch 19/30\n",
752 + "19240/19240 [==============================] - 1229s 64ms/step - loss: 0.0375 - accuracy: 0.9839 - recall: 0.9837 - precision: 0.9863 - fbeta_score: 0.9839 - val_loss: 2.4486 - val_accuracy: 0.5427 - val_recall: 0.6864 - val_precision: 0.5607 - val_fbeta_score: 0.6030\n",
753 + "Epoch 20/30\n",
754 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0390 - accuracy: 0.9837 - recall: 0.9828 - precision: 0.9865 - fbeta_score: 0.9836 - val_loss: 2.3747 - val_accuracy: 0.5321 - val_recall: 0.5468 - val_precision: 0.5661 - val_fbeta_score: 0.5405\n",
755 + "Epoch 21/30\n",
756 + "19240/19240 [==============================] - 1231s 64ms/step - loss: 0.0347 - accuracy: 0.9852 - recall: 0.9846 - precision: 0.9878 - fbeta_score: 0.9854 - val_loss: 2.3107 - val_accuracy: 0.5375 - val_recall: 0.5940 - val_precision: 0.5656 - val_fbeta_score: 0.5647\n",
757 + "Epoch 22/30\n",
758 + "19240/19240 [==============================] - 1231s 64ms/step - loss: 0.0356 - accuracy: 0.9854 - recall: 0.9844 - precision: 0.9877 - fbeta_score: 0.9850 - val_loss: 2.4489 - val_accuracy: 0.5371 - val_recall: 0.6188 - val_precision: 0.5599 - val_fbeta_score: 0.5741\n",
759 + "Epoch 23/30\n",
760 + "19240/19240 [==============================] - 1230s 64ms/step - loss: 0.0368 - accuracy: 0.9837 - recall: 0.9825 - precision: 0.9863 - fbeta_score: 0.9832 - val_loss: 2.1525 - val_accuracy: 0.5271 - val_recall: 0.4709 - val_precision: 0.5715 - val_fbeta_score: 0.4996\n",
761 + "Epoch 24/30\n",
762 + "19240/19240 [==============================] - 1231s 64ms/step - loss: 0.0341 - accuracy: 0.9845 - recall: 0.9841 - precision: 0.9870 - fbeta_score: 0.9846 - val_loss: 2.1537 - val_accuracy: 0.5271 - val_recall: 0.5332 - val_precision: 0.5623 - val_fbeta_score: 0.5319\n",
763 + "Epoch 25/30\n",
764 + "19240/19240 [==============================] - 1231s 64ms/step - loss: 0.0313 - accuracy: 0.9857 - recall: 0.9853 - precision: 0.9879 - fbeta_score: 0.9856 - val_loss: 2.4771 - val_accuracy: 0.5309 - val_recall: 0.6418 - val_precision: 0.5529 - val_fbeta_score: 0.5808\n",
765 + "Epoch 26/30\n",
766 + "15408/19240 [=======================>......] - ETA: 3:48 - loss: 0.0320 - accuracy: 0.9859 - recall: 0.9857 - precision: 0.9883 - fbeta_score: 0.9861"
767 + ],
768 + "name": "stdout"
769 + }
770 + ]
771 + },
772 + {
773 + "cell_type": "code",
774 + "metadata": {
775 + "id": "jBpYE9eVxfXv",
776 + "colab_type": "code",
777 + "colab": {}
778 + },
779 + "source": [
780 + "test = pd.read_csv('gdrive/My Drive/capstone 2/event_embedding/Thesis_data/combined_data2015.csv', encoding='utf-8') "
781 + ],
782 + "execution_count": null,
783 + "outputs": []
784 + },
785 + {
786 + "cell_type": "code",
787 + "metadata": {
788 + "id": "NQu0eoaWxfsv",
789 + "colab_type": "code",
790 + "colab": {}
791 + },
792 + "source": [
793 + "def predict_convert_data(data_df):\n",
794 + " global tokenizer\n",
795 + " indices = []\n",
796 + " for i in tqdm(range(len(data_df))):\n",
797 + " ids, segments = tokenizer.encode(data_df[DATA_COLUMN][i], max_len=SEQ_LEN)\n",
798 + " indices.append(ids)\n",
799 + " \n",
800 + " items = indices\n",
801 + " \n",
802 + " \n",
803 + " indices = np.array(indices)\n",
804 + " return [indices, np.zeros_like(indices)]\n",
805 + "\n",
806 + "def predict_load_data(x): #Pandas Dataframe을 인풋으로 받는다\n",
807 + " data_df = x\n",
808 + " data_df[DATA_COLUMN] = data_df[DATA_COLUMN].astype(str)\n",
809 + " data_x = predict_convert_data(data_df)\n",
810 + "\n",
811 + " return data_x"
812 + ],
813 + "execution_count": null,
814 + "outputs": []
815 + },
816 + {
817 + "cell_type": "code",
818 + "metadata": {
819 + "id": "DBY60yKJxnKL",
820 + "colab_type": "code",
821 + "colab": {
822 + "base_uri": "https://localhost:8080/",
823 + "height": 34
824 + },
825 + "outputId": "87137a7f-a38e-4fe4-b29b-cfd867cedd80"
826 + },
827 + "source": [
828 + "test_set = predict_load_data(test)"
829 + ],
830 + "execution_count": null,
831 + "outputs": [
832 + {
833 + "output_type": "stream",
834 + "text": [
835 + "100%|██████████| 3692/3692 [00:01<00:00, 2567.73it/s]\n"
836 + ],
837 + "name": "stderr"
838 + }
839 + ]
840 + },
841 + {
842 + "cell_type": "markdown",
843 + "metadata": {
844 + "id": "yuZyrVFCo6_9",
845 + "colab_type": "text"
846 + },
847 + "source": [
848 + "# Body 128"
849 + ]
850 + },
851 + {
852 + "cell_type": "code",
853 + "metadata": {
854 + "id": "jf9yeGiVbFxO",
855 + "colab_type": "code",
856 + "colab": {
857 + "base_uri": "https://localhost:8080/",
858 + "height": 170
859 + },
860 + "outputId": "67adb4d9-670c-41e5-f0ac-d20e1c6caae2"
861 + },
862 + "source": [
863 + "bert_model = get_bert_finetuning_model(model)\n",
864 + "bert_model.load_weights(\"gdrive/My Drive/body_bert.h5\")\n",
865 + "preds = bert_model.predict(test_set)\n",
866 + "from sklearn.metrics import classification_report\n",
867 + "y_true = test['index']\n",
868 + "# F1 Score 확인\n",
869 + "print(classification_report(y_true, np.round(preds,0)))"
870 + ],
871 + "execution_count": null,
872 + "outputs": [
873 + {
874 + "output_type": "stream",
875 + "text": [
876 + " precision recall f1-score support\n",
877 + "\n",
878 + " 0 0.51 0.24 0.33 1867\n",
879 + " 1 0.50 0.76 0.60 1825\n",
880 + "\n",
881 + " accuracy 0.50 3692\n",
882 + " macro avg 0.51 0.50 0.47 3692\n",
883 + "weighted avg 0.51 0.50 0.46 3692\n",
884 + "\n"
885 + ],
886 + "name": "stdout"
887 + }
888 + ]
889 + },
890 + {
891 + "cell_type": "markdown",
892 + "metadata": {
893 + "id": "CNChuUzCbY3t",
894 + "colab_type": "text"
895 + },
896 + "source": [
897 + "# Body 256 epoch 3"
898 + ]
899 + },
900 + {
901 + "cell_type": "code",
902 + "metadata": {
903 + "id": "y3l9jap3xpFB",
904 + "colab_type": "code",
905 + "colab": {
906 + "base_uri": "https://localhost:8080/",
907 + "height": 170
908 + },
909 + "outputId": "5c1f17cb-0f0c-4899-b1bf-e4db6dfceb3a"
910 + },
911 + "source": [
912 + "bert_model = get_bert_finetuning_model(model)\n",
913 + "bert_model.load_weights(path+\"body_bert_512.h5\")\n",
914 + "preds = bert_model.predict(test_set)\n",
915 + "from sklearn.metrics import classification_report\n",
916 + "y_true = test['index']\n",
917 + "# F1 Score 확인\n",
918 + "print(classification_report(y_true, np.round(preds,0)))"
919 + ],
920 + "execution_count": null,
921 + "outputs": [
922 + {
923 + "output_type": "stream",
924 + "text": [
925 + " precision recall f1-score support\n",
926 + "\n",
927 + " 0 0.48 0.22 0.30 1867\n",
928 + " 1 0.49 0.76 0.59 1825\n",
929 + "\n",
930 + " accuracy 0.49 3692\n",
931 + " macro avg 0.48 0.49 0.45 3692\n",
932 + "weighted avg 0.48 0.49 0.45 3692\n",
933 + "\n"
934 + ],
935 + "name": "stdout"
936 + }
937 + ]
938 + }
939 + ]
940 +}
...\ No newline at end of file ...\ No newline at end of file
This diff could not be displayed because it is too large.
1 +{
2 + "nbformat": 4,
3 + "nbformat_minor": 0,
4 + "metadata": {
5 + "colab": {
6 + "name": "bert word embedding.ipynb",
7 + "provenance": []
8 + },
9 + "kernelspec": {
10 + "name": "python3",
11 + "display_name": "Python 3"
12 + },
13 + "widgets": {
14 + "application/vnd.jupyter.widget-state+json": {
15 + "0488e2a159f94f1e8fd2d95cfa1f0c00": {
16 + "model_module": "@jupyter-widgets/controls",
17 + "model_name": "HBoxModel",
18 + "state": {
19 + "_view_name": "HBoxView",
20 + "_dom_classes": [],
21 + "_model_name": "HBoxModel",
22 + "_view_module": "@jupyter-widgets/controls",
23 + "_model_module_version": "1.5.0",
24 + "_view_count": null,
25 + "_view_module_version": "1.5.0",
26 + "box_style": "",
27 + "layout": "IPY_MODEL_f6b7f67b13a94abe81c8f311f5d9584e",
28 + "_model_module": "@jupyter-widgets/controls",
29 + "children": [
30 + "IPY_MODEL_182e7f63a7a747be9806d768c59ac8ed",
31 + "IPY_MODEL_89484e917aaf4be7b9c1fd73542101ec"
32 + ]
33 + }
34 + },
35 + "f6b7f67b13a94abe81c8f311f5d9584e": {
36 + "model_module": "@jupyter-widgets/base",
37 + "model_name": "LayoutModel",
38 + "state": {
39 + "_view_name": "LayoutView",
40 + "grid_template_rows": null,
41 + "right": null,
42 + "justify_content": null,
43 + "_view_module": "@jupyter-widgets/base",
44 + "overflow": null,
45 + "_model_module_version": "1.2.0",
46 + "_view_count": null,
47 + "flex_flow": null,
48 + "width": null,
49 + "min_width": null,
50 + "border": null,
51 + "align_items": null,
52 + "bottom": null,
53 + "_model_module": "@jupyter-widgets/base",
54 + "top": null,
55 + "grid_column": null,
56 + "overflow_y": null,
57 + "overflow_x": null,
58 + "grid_auto_flow": null,
59 + "grid_area": null,
60 + "grid_template_columns": null,
61 + "flex": null,
62 + "_model_name": "LayoutModel",
63 + "justify_items": null,
64 + "grid_row": null,
65 + "max_height": null,
66 + "align_content": null,
67 + "visibility": null,
68 + "align_self": null,
69 + "height": null,
70 + "min_height": null,
71 + "padding": null,
72 + "grid_auto_rows": null,
73 + "grid_gap": null,
74 + "max_width": null,
75 + "order": null,
76 + "_view_module_version": "1.2.0",
77 + "grid_template_areas": null,
78 + "object_position": null,
79 + "object_fit": null,
80 + "grid_auto_columns": null,
81 + "margin": null,
82 + "display": null,
83 + "left": null
84 + }
85 + },
86 + "182e7f63a7a747be9806d768c59ac8ed": {
87 + "model_module": "@jupyter-widgets/controls",
88 + "model_name": "FloatProgressModel",
89 + "state": {
90 + "_view_name": "ProgressView",
91 + "style": "IPY_MODEL_e3002daa07f44aa296d26fc14e9e5c10",
92 + "_dom_classes": [],
93 + "description": "Downloading: 100%",
94 + "_model_name": "FloatProgressModel",
95 + "bar_style": "success",
96 + "max": 213450,
97 + "_view_module": "@jupyter-widgets/controls",
98 + "_model_module_version": "1.5.0",
99 + "value": 213450,
100 + "_view_count": null,
101 + "_view_module_version": "1.5.0",
102 + "orientation": "horizontal",
103 + "min": 0,
104 + "description_tooltip": null,
105 + "_model_module": "@jupyter-widgets/controls",
106 + "layout": "IPY_MODEL_03e4968cf76248429f98b73ef104941b"
107 + }
108 + },
109 + "89484e917aaf4be7b9c1fd73542101ec": {
110 + "model_module": "@jupyter-widgets/controls",
111 + "model_name": "HTMLModel",
112 + "state": {
113 + "_view_name": "HTMLView",
114 + "style": "IPY_MODEL_33057b5773f04ab8a43d33eed74453bb",
115 + "_dom_classes": [],
116 + "description": "",
117 + "_model_name": "HTMLModel",
118 + "placeholder": "​",
119 + "_view_module": "@jupyter-widgets/controls",
120 + "_model_module_version": "1.5.0",
121 + "value": " 213k/213k [00:00&lt;00:00, 615kB/s]",
122 + "_view_count": null,
123 + "_view_module_version": "1.5.0",
124 + "description_tooltip": null,
125 + "_model_module": "@jupyter-widgets/controls",
126 + "layout": "IPY_MODEL_865728d013634aeaa7705c7350d86541"
127 + }
128 + },
129 + "e3002daa07f44aa296d26fc14e9e5c10": {
130 + "model_module": "@jupyter-widgets/controls",
131 + "model_name": "ProgressStyleModel",
132 + "state": {
133 + "_view_name": "StyleView",
134 + "_model_name": "ProgressStyleModel",
135 + "description_width": "initial",
136 + "_view_module": "@jupyter-widgets/base",
137 + "_model_module_version": "1.5.0",
138 + "_view_count": null,
139 + "_view_module_version": "1.2.0",
140 + "bar_color": null,
141 + "_model_module": "@jupyter-widgets/controls"
142 + }
143 + },
144 + "03e4968cf76248429f98b73ef104941b": {
145 + "model_module": "@jupyter-widgets/base",
146 + "model_name": "LayoutModel",
147 + "state": {
148 + "_view_name": "LayoutView",
149 + "grid_template_rows": null,
150 + "right": null,
151 + "justify_content": null,
152 + "_view_module": "@jupyter-widgets/base",
153 + "overflow": null,
154 + "_model_module_version": "1.2.0",
155 + "_view_count": null,
156 + "flex_flow": null,
157 + "width": null,
158 + "min_width": null,
159 + "border": null,
160 + "align_items": null,
161 + "bottom": null,
162 + "_model_module": "@jupyter-widgets/base",
163 + "top": null,
164 + "grid_column": null,
165 + "overflow_y": null,
166 + "overflow_x": null,
167 + "grid_auto_flow": null,
168 + "grid_area": null,
169 + "grid_template_columns": null,
170 + "flex": null,
171 + "_model_name": "LayoutModel",
172 + "justify_items": null,
173 + "grid_row": null,
174 + "max_height": null,
175 + "align_content": null,
176 + "visibility": null,
177 + "align_self": null,
178 + "height": null,
179 + "min_height": null,
180 + "padding": null,
181 + "grid_auto_rows": null,
182 + "grid_gap": null,
183 + "max_width": null,
184 + "order": null,
185 + "_view_module_version": "1.2.0",
186 + "grid_template_areas": null,
187 + "object_position": null,
188 + "object_fit": null,
189 + "grid_auto_columns": null,
190 + "margin": null,
191 + "display": null,
192 + "left": null
193 + }
194 + },
195 + "33057b5773f04ab8a43d33eed74453bb": {
196 + "model_module": "@jupyter-widgets/controls",
197 + "model_name": "DescriptionStyleModel",
198 + "state": {
199 + "_view_name": "StyleView",
200 + "_model_name": "DescriptionStyleModel",
201 + "description_width": "",
202 + "_view_module": "@jupyter-widgets/base",
203 + "_model_module_version": "1.5.0",
204 + "_view_count": null,
205 + "_view_module_version": "1.2.0",
206 + "_model_module": "@jupyter-widgets/controls"
207 + }
208 + },
209 + "865728d013634aeaa7705c7350d86541": {
210 + "model_module": "@jupyter-widgets/base",
211 + "model_name": "LayoutModel",
212 + "state": {
213 + "_view_name": "LayoutView",
214 + "grid_template_rows": null,
215 + "right": null,
216 + "justify_content": null,
217 + "_view_module": "@jupyter-widgets/base",
218 + "overflow": null,
219 + "_model_module_version": "1.2.0",
220 + "_view_count": null,
221 + "flex_flow": null,
222 + "width": null,
223 + "min_width": null,
224 + "border": null,
225 + "align_items": null,
226 + "bottom": null,
227 + "_model_module": "@jupyter-widgets/base",
228 + "top": null,
229 + "grid_column": null,
230 + "overflow_y": null,
231 + "overflow_x": null,
232 + "grid_auto_flow": null,
233 + "grid_area": null,
234 + "grid_template_columns": null,
235 + "flex": null,
236 + "_model_name": "LayoutModel",
237 + "justify_items": null,
238 + "grid_row": null,
239 + "max_height": null,
240 + "align_content": null,
241 + "visibility": null,
242 + "align_self": null,
243 + "height": null,
244 + "min_height": null,
245 + "padding": null,
246 + "grid_auto_rows": null,
247 + "grid_gap": null,
248 + "max_width": null,
249 + "order": null,
250 + "_view_module_version": "1.2.0",
251 + "grid_template_areas": null,
252 + "object_position": null,
253 + "object_fit": null,
254 + "grid_auto_columns": null,
255 + "margin": null,
256 + "display": null,
257 + "left": null
258 + }
259 + },
260 + "b3cf8354fb91443db5657239b1631db1": {
261 + "model_module": "@jupyter-widgets/controls",
262 + "model_name": "HBoxModel",
263 + "state": {
264 + "_view_name": "HBoxView",
265 + "_dom_classes": [],
266 + "_model_name": "HBoxModel",
267 + "_view_module": "@jupyter-widgets/controls",
268 + "_model_module_version": "1.5.0",
269 + "_view_count": null,
270 + "_view_module_version": "1.5.0",
271 + "box_style": "",
272 + "layout": "IPY_MODEL_dca67a11598049b5b6a2e87b1d1d9724",
273 + "_model_module": "@jupyter-widgets/controls",
274 + "children": [
275 + "IPY_MODEL_ad4a891b74304e5cafc91dcac6f1aa71",
276 + "IPY_MODEL_3ecdbfd3ce6c4e64ae31985197903358"
277 + ]
278 + }
279 + },
280 + "dca67a11598049b5b6a2e87b1d1d9724": {
281 + "model_module": "@jupyter-widgets/base",
282 + "model_name": "LayoutModel",
283 + "state": {
284 + "_view_name": "LayoutView",
285 + "grid_template_rows": null,
286 + "right": null,
287 + "justify_content": null,
288 + "_view_module": "@jupyter-widgets/base",
289 + "overflow": null,
290 + "_model_module_version": "1.2.0",
291 + "_view_count": null,
292 + "flex_flow": null,
293 + "width": null,
294 + "min_width": null,
295 + "border": null,
296 + "align_items": null,
297 + "bottom": null,
298 + "_model_module": "@jupyter-widgets/base",
299 + "top": null,
300 + "grid_column": null,
301 + "overflow_y": null,
302 + "overflow_x": null,
303 + "grid_auto_flow": null,
304 + "grid_area": null,
305 + "grid_template_columns": null,
306 + "flex": null,
307 + "_model_name": "LayoutModel",
308 + "justify_items": null,
309 + "grid_row": null,
310 + "max_height": null,
311 + "align_content": null,
312 + "visibility": null,
313 + "align_self": null,
314 + "height": null,
315 + "min_height": null,
316 + "padding": null,
317 + "grid_auto_rows": null,
318 + "grid_gap": null,
319 + "max_width": null,
320 + "order": null,
321 + "_view_module_version": "1.2.0",
322 + "grid_template_areas": null,
323 + "object_position": null,
324 + "object_fit": null,
325 + "grid_auto_columns": null,
326 + "margin": null,
327 + "display": null,
328 + "left": null
329 + }
330 + },
331 + "ad4a891b74304e5cafc91dcac6f1aa71": {
332 + "model_module": "@jupyter-widgets/controls",
333 + "model_name": "FloatProgressModel",
334 + "state": {
335 + "_view_name": "ProgressView",
336 + "style": "IPY_MODEL_cec8ee3dd75a468d985fb9d2c17cd7f7",
337 + "_dom_classes": [],
338 + "description": "Downloading: 100%",
339 + "_model_name": "FloatProgressModel",
340 + "bar_style": "success",
341 + "max": 433,
342 + "_view_module": "@jupyter-widgets/controls",
343 + "_model_module_version": "1.5.0",
344 + "value": 433,
345 + "_view_count": null,
346 + "_view_module_version": "1.5.0",
347 + "orientation": "horizontal",
348 + "min": 0,
349 + "description_tooltip": null,
350 + "_model_module": "@jupyter-widgets/controls",
351 + "layout": "IPY_MODEL_58100c551b1d4dd683e9bfb2c4059022"
352 + }
353 + },
354 + "3ecdbfd3ce6c4e64ae31985197903358": {
355 + "model_module": "@jupyter-widgets/controls",
356 + "model_name": "HTMLModel",
357 + "state": {
358 + "_view_name": "HTMLView",
359 + "style": "IPY_MODEL_1c55789eede0464f85386b4e41c46c06",
360 + "_dom_classes": [],
361 + "description": "",
362 + "_model_name": "HTMLModel",
363 + "placeholder": "​",
364 + "_view_module": "@jupyter-widgets/controls",
365 + "_model_module_version": "1.5.0",
366 + "value": " 433/433 [00:12&lt;00:00, 35.2B/s]",
367 + "_view_count": null,
368 + "_view_module_version": "1.5.0",
369 + "description_tooltip": null,
370 + "_model_module": "@jupyter-widgets/controls",
371 + "layout": "IPY_MODEL_b5def479898f453fb51cd221ff78b1e4"
372 + }
373 + },
374 + "cec8ee3dd75a468d985fb9d2c17cd7f7": {
375 + "model_module": "@jupyter-widgets/controls",
376 + "model_name": "ProgressStyleModel",
377 + "state": {
378 + "_view_name": "StyleView",
379 + "_model_name": "ProgressStyleModel",
380 + "description_width": "initial",
381 + "_view_module": "@jupyter-widgets/base",
382 + "_model_module_version": "1.5.0",
383 + "_view_count": null,
384 + "_view_module_version": "1.2.0",
385 + "bar_color": null,
386 + "_model_module": "@jupyter-widgets/controls"
387 + }
388 + },
389 + "58100c551b1d4dd683e9bfb2c4059022": {
390 + "model_module": "@jupyter-widgets/base",
391 + "model_name": "LayoutModel",
392 + "state": {
393 + "_view_name": "LayoutView",
394 + "grid_template_rows": null,
395 + "right": null,
396 + "justify_content": null,
397 + "_view_module": "@jupyter-widgets/base",
398 + "overflow": null,
399 + "_model_module_version": "1.2.0",
400 + "_view_count": null,
401 + "flex_flow": null,
402 + "width": null,
403 + "min_width": null,
404 + "border": null,
405 + "align_items": null,
406 + "bottom": null,
407 + "_model_module": "@jupyter-widgets/base",
408 + "top": null,
409 + "grid_column": null,
410 + "overflow_y": null,
411 + "overflow_x": null,
412 + "grid_auto_flow": null,
413 + "grid_area": null,
414 + "grid_template_columns": null,
415 + "flex": null,
416 + "_model_name": "LayoutModel",
417 + "justify_items": null,
418 + "grid_row": null,
419 + "max_height": null,
420 + "align_content": null,
421 + "visibility": null,
422 + "align_self": null,
423 + "height": null,
424 + "min_height": null,
425 + "padding": null,
426 + "grid_auto_rows": null,
427 + "grid_gap": null,
428 + "max_width": null,
429 + "order": null,
430 + "_view_module_version": "1.2.0",
431 + "grid_template_areas": null,
432 + "object_position": null,
433 + "object_fit": null,
434 + "grid_auto_columns": null,
435 + "margin": null,
436 + "display": null,
437 + "left": null
438 + }
439 + },
440 + "1c55789eede0464f85386b4e41c46c06": {
441 + "model_module": "@jupyter-widgets/controls",
442 + "model_name": "DescriptionStyleModel",
443 + "state": {
444 + "_view_name": "StyleView",
445 + "_model_name": "DescriptionStyleModel",
446 + "description_width": "",
447 + "_view_module": "@jupyter-widgets/base",
448 + "_model_module_version": "1.5.0",
449 + "_view_count": null,
450 + "_view_module_version": "1.2.0",
451 + "_model_module": "@jupyter-widgets/controls"
452 + }
453 + },
454 + "b5def479898f453fb51cd221ff78b1e4": {
455 + "model_module": "@jupyter-widgets/base",
456 + "model_name": "LayoutModel",
457 + "state": {
458 + "_view_name": "LayoutView",
459 + "grid_template_rows": null,
460 + "right": null,
461 + "justify_content": null,
462 + "_view_module": "@jupyter-widgets/base",
463 + "overflow": null,
464 + "_model_module_version": "1.2.0",
465 + "_view_count": null,
466 + "flex_flow": null,
467 + "width": null,
468 + "min_width": null,
469 + "border": null,
470 + "align_items": null,
471 + "bottom": null,
472 + "_model_module": "@jupyter-widgets/base",
473 + "top": null,
474 + "grid_column": null,
475 + "overflow_y": null,
476 + "overflow_x": null,
477 + "grid_auto_flow": null,
478 + "grid_area": null,
479 + "grid_template_columns": null,
480 + "flex": null,
481 + "_model_name": "LayoutModel",
482 + "justify_items": null,
483 + "grid_row": null,
484 + "max_height": null,
485 + "align_content": null,
486 + "visibility": null,
487 + "align_self": null,
488 + "height": null,
489 + "min_height": null,
490 + "padding": null,
491 + "grid_auto_rows": null,
492 + "grid_gap": null,
493 + "max_width": null,
494 + "order": null,
495 + "_view_module_version": "1.2.0",
496 + "grid_template_areas": null,
497 + "object_position": null,
498 + "object_fit": null,
499 + "grid_auto_columns": null,
500 + "margin": null,
501 + "display": null,
502 + "left": null
503 + }
504 + },
505 + "7e00f631bf7c4557bcaeea97f26b3bb8": {
506 + "model_module": "@jupyter-widgets/controls",
507 + "model_name": "HBoxModel",
508 + "state": {
509 + "_view_name": "HBoxView",
510 + "_dom_classes": [],
511 + "_model_name": "HBoxModel",
512 + "_view_module": "@jupyter-widgets/controls",
513 + "_model_module_version": "1.5.0",
514 + "_view_count": null,
515 + "_view_module_version": "1.5.0",
516 + "box_style": "",
517 + "layout": "IPY_MODEL_ca6631cb27f941fa92a7ff08cfb5fdde",
518 + "_model_module": "@jupyter-widgets/controls",
519 + "children": [
520 + "IPY_MODEL_d97d7a015ffa428696024cef965e789d",
521 + "IPY_MODEL_716a044bb3ad4f2081d064d690d40fd9"
522 + ]
523 + }
524 + },
525 + "ca6631cb27f941fa92a7ff08cfb5fdde": {
526 + "model_module": "@jupyter-widgets/base",
527 + "model_name": "LayoutModel",
528 + "state": {
529 + "_view_name": "LayoutView",
530 + "grid_template_rows": null,
531 + "right": null,
532 + "justify_content": null,
533 + "_view_module": "@jupyter-widgets/base",
534 + "overflow": null,
535 + "_model_module_version": "1.2.0",
536 + "_view_count": null,
537 + "flex_flow": null,
538 + "width": null,
539 + "min_width": null,
540 + "border": null,
541 + "align_items": null,
542 + "bottom": null,
543 + "_model_module": "@jupyter-widgets/base",
544 + "top": null,
545 + "grid_column": null,
546 + "overflow_y": null,
547 + "overflow_x": null,
548 + "grid_auto_flow": null,
549 + "grid_area": null,
550 + "grid_template_columns": null,
551 + "flex": null,
552 + "_model_name": "LayoutModel",
553 + "justify_items": null,
554 + "grid_row": null,
555 + "max_height": null,
556 + "align_content": null,
557 + "visibility": null,
558 + "align_self": null,
559 + "height": null,
560 + "min_height": null,
561 + "padding": null,
562 + "grid_auto_rows": null,
563 + "grid_gap": null,
564 + "max_width": null,
565 + "order": null,
566 + "_view_module_version": "1.2.0",
567 + "grid_template_areas": null,
568 + "object_position": null,
569 + "object_fit": null,
570 + "grid_auto_columns": null,
571 + "margin": null,
572 + "display": null,
573 + "left": null
574 + }
575 + },
576 + "d97d7a015ffa428696024cef965e789d": {
577 + "model_module": "@jupyter-widgets/controls",
578 + "model_name": "FloatProgressModel",
579 + "state": {
580 + "_view_name": "ProgressView",
581 + "style": "IPY_MODEL_14c4dc6f2e3b48aa924966e737ed73ff",
582 + "_dom_classes": [],
583 + "description": "Downloading: 100%",
584 + "_model_name": "FloatProgressModel",
585 + "bar_style": "success",
586 + "max": 435779157,
587 + "_view_module": "@jupyter-widgets/controls",
588 + "_model_module_version": "1.5.0",
589 + "value": 435779157,
590 + "_view_count": null,
591 + "_view_module_version": "1.5.0",
592 + "orientation": "horizontal",
593 + "min": 0,
594 + "description_tooltip": null,
595 + "_model_module": "@jupyter-widgets/controls",
596 + "layout": "IPY_MODEL_2df44811e03f4474ab053a62de70c160"
597 + }
598 + },
599 + "716a044bb3ad4f2081d064d690d40fd9": {
600 + "model_module": "@jupyter-widgets/controls",
601 + "model_name": "HTMLModel",
602 + "state": {
603 + "_view_name": "HTMLView",
604 + "style": "IPY_MODEL_dcc537362666468c8994a0eca019d05c",
605 + "_dom_classes": [],
606 + "description": "",
607 + "_model_name": "HTMLModel",
608 + "placeholder": "​",
609 + "_view_module": "@jupyter-widgets/controls",
610 + "_model_module_version": "1.5.0",
611 + "value": " 436M/436M [00:12&lt;00:00, 36.1MB/s]",
612 + "_view_count": null,
613 + "_view_module_version": "1.5.0",
614 + "description_tooltip": null,
615 + "_model_module": "@jupyter-widgets/controls",
616 + "layout": "IPY_MODEL_2eb87bd2ec0a4382b77b8562d0ac8dc8"
617 + }
618 + },
619 + "14c4dc6f2e3b48aa924966e737ed73ff": {
620 + "model_module": "@jupyter-widgets/controls",
621 + "model_name": "ProgressStyleModel",
622 + "state": {
623 + "_view_name": "StyleView",
624 + "_model_name": "ProgressStyleModel",
625 + "description_width": "initial",
626 + "_view_module": "@jupyter-widgets/base",
627 + "_model_module_version": "1.5.0",
628 + "_view_count": null,
629 + "_view_module_version": "1.2.0",
630 + "bar_color": null,
631 + "_model_module": "@jupyter-widgets/controls"
632 + }
633 + },
634 + "2df44811e03f4474ab053a62de70c160": {
635 + "model_module": "@jupyter-widgets/base",
636 + "model_name": "LayoutModel",
637 + "state": {
638 + "_view_name": "LayoutView",
639 + "grid_template_rows": null,
640 + "right": null,
641 + "justify_content": null,
642 + "_view_module": "@jupyter-widgets/base",
643 + "overflow": null,
644 + "_model_module_version": "1.2.0",
645 + "_view_count": null,
646 + "flex_flow": null,
647 + "width": null,
648 + "min_width": null,
649 + "border": null,
650 + "align_items": null,
651 + "bottom": null,
652 + "_model_module": "@jupyter-widgets/base",
653 + "top": null,
654 + "grid_column": null,
655 + "overflow_y": null,
656 + "overflow_x": null,
657 + "grid_auto_flow": null,
658 + "grid_area": null,
659 + "grid_template_columns": null,
660 + "flex": null,
661 + "_model_name": "LayoutModel",
662 + "justify_items": null,
663 + "grid_row": null,
664 + "max_height": null,
665 + "align_content": null,
666 + "visibility": null,
667 + "align_self": null,
668 + "height": null,
669 + "min_height": null,
670 + "padding": null,
671 + "grid_auto_rows": null,
672 + "grid_gap": null,
673 + "max_width": null,
674 + "order": null,
675 + "_view_module_version": "1.2.0",
676 + "grid_template_areas": null,
677 + "object_position": null,
678 + "object_fit": null,
679 + "grid_auto_columns": null,
680 + "margin": null,
681 + "display": null,
682 + "left": null
683 + }
684 + },
685 + "dcc537362666468c8994a0eca019d05c": {
686 + "model_module": "@jupyter-widgets/controls",
687 + "model_name": "DescriptionStyleModel",
688 + "state": {
689 + "_view_name": "StyleView",
690 + "_model_name": "DescriptionStyleModel",
691 + "description_width": "",
692 + "_view_module": "@jupyter-widgets/base",
693 + "_model_module_version": "1.5.0",
694 + "_view_count": null,
695 + "_view_module_version": "1.2.0",
696 + "_model_module": "@jupyter-widgets/controls"
697 + }
698 + },
699 + "2eb87bd2ec0a4382b77b8562d0ac8dc8": {
700 + "model_module": "@jupyter-widgets/base",
701 + "model_name": "LayoutModel",
702 + "state": {
703 + "_view_name": "LayoutView",
704 + "grid_template_rows": null,
705 + "right": null,
706 + "justify_content": null,
707 + "_view_module": "@jupyter-widgets/base",
708 + "overflow": null,
709 + "_model_module_version": "1.2.0",
710 + "_view_count": null,
711 + "flex_flow": null,
712 + "width": null,
713 + "min_width": null,
714 + "border": null,
715 + "align_items": null,
716 + "bottom": null,
717 + "_model_module": "@jupyter-widgets/base",
718 + "top": null,
719 + "grid_column": null,
720 + "overflow_y": null,
721 + "overflow_x": null,
722 + "grid_auto_flow": null,
723 + "grid_area": null,
724 + "grid_template_columns": null,
725 + "flex": null,
726 + "_model_name": "LayoutModel",
727 + "justify_items": null,
728 + "grid_row": null,
729 + "max_height": null,
730 + "align_content": null,
731 + "visibility": null,
732 + "align_self": null,
733 + "height": null,
734 + "min_height": null,
735 + "padding": null,
736 + "grid_auto_rows": null,
737 + "grid_gap": null,
738 + "max_width": null,
739 + "order": null,
740 + "_view_module_version": "1.2.0",
741 + "grid_template_areas": null,
742 + "object_position": null,
743 + "object_fit": null,
744 + "grid_auto_columns": null,
745 + "margin": null,
746 + "display": null,
747 + "left": null
748 + }
749 + }
750 + }
751 + }
752 + },
753 + "cells": [
754 + {
755 + "cell_type": "code",
756 + "metadata": {
757 + "id": "N3qUV5UzKg0E",
758 + "colab_type": "code",
759 + "colab": {
760 + "base_uri": "https://localhost:8080/",
761 + "height": 122
762 + },
763 + "outputId": "80f26292-1a7c-4f58-de91-810f46f754fc"
764 + },
765 + "source": [
766 + "from google.colab import auth\n",
767 + "auth.authenticate_user()\n",
768 + "\n",
769 + "from google.colab import drive\n",
770 + "drive.mount('/content/gdrive')"
771 + ],
772 + "execution_count": null,
773 + "outputs": [
774 + {
775 + "output_type": "stream",
776 + "text": [
777 + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n",
778 + "\n",
779 + "Enter your authorization code:\n",
780 + "··········\n",
781 + "Mounted at /content/gdrive\n"
782 + ],
783 + "name": "stdout"
784 + }
785 + ]
786 + },
787 + {
788 + "cell_type": "code",
789 + "metadata": {
790 + "id": "VWsgaghyKio5",
791 + "colab_type": "code",
792 + "colab": {
793 + "base_uri": "https://localhost:8080/",
794 + "height": 51
795 + },
796 + "outputId": "cce2685d-a26e-4924-d908-2e135107e7eb"
797 + },
798 + "source": [
799 + "import pandas as pd\n",
800 + "combined_data = pd.read_csv('gdrive/My Drive/capstone 2/event_embedding/Thesis_data/keyword_data2.csv', encoding='utf-8') \n",
801 + "test_data = pd.read_csv('gdrive/My Drive/capstone 2/event_embedding/Thesis_data/keyword_data2015.csv', encoding='utf-8') \n",
802 + "\n",
803 + "combined_data\n",
804 + "\n",
805 + "print(len(combined_data))\n",
806 + "print(len(test_data))\n",
807 + "path = \"gdrive/My Drive/capstone 2/\""
808 + ],
809 + "execution_count": null,
810 + "outputs": [
811 + {
812 + "output_type": "stream",
813 + "text": [
814 + "2427\n",
815 + "253\n"
816 + ],
817 + "name": "stdout"
818 + }
819 + ]
820 + },
821 + {
822 + "cell_type": "code",
823 + "metadata": {
824 + "id": "s89npKfoxupw",
825 + "colab_type": "code",
826 + "colab": {
827 + "base_uri": "https://localhost:8080/",
828 + "height": 419
829 + },
830 + "outputId": "555fd7a1-e95f-4fb7-8de4-c2f9c0fa5546"
831 + },
832 + "source": [
833 + "combined_data"
834 + ],
835 + "execution_count": null,
836 + "outputs": [
837 + {
838 + "output_type": "execute_result",
839 + "data": {
840 + "text/html": [
841 + "<div>\n",
842 + "<style scoped>\n",
843 + " .dataframe tbody tr th:only-of-type {\n",
844 + " vertical-align: middle;\n",
845 + " }\n",
846 + "\n",
847 + " .dataframe tbody tr th {\n",
848 + " vertical-align: top;\n",
849 + " }\n",
850 + "\n",
851 + " .dataframe thead th {\n",
852 + " text-align: right;\n",
853 + " }\n",
854 + "</style>\n",
855 + "<table border=\"1\" class=\"dataframe\">\n",
856 + " <thead>\n",
857 + " <tr style=\"text-align: right;\">\n",
858 + " <th></th>\n",
859 + " <th>date</th>\n",
860 + " <th>title</th>\n",
861 + " <th>price</th>\n",
862 + " <th>open</th>\n",
863 + " <th>high</th>\n",
864 + " <th>low</th>\n",
865 + " <th>volume</th>\n",
866 + " <th>change</th>\n",
867 + " <th>index</th>\n",
868 + " </tr>\n",
869 + " </thead>\n",
870 + " <tbody>\n",
871 + " <tr>\n",
872 + " <th>0</th>\n",
873 + " <td>20050107</td>\n",
874 + " <td>Stocks End Lower Vital Signs for the Week of J...</td>\n",
875 + " <td>4.93</td>\n",
876 + " <td>4.99</td>\n",
877 + " <td>5.05</td>\n",
878 + " <td>4.85</td>\n",
879 + " <td>434.26M</td>\n",
880 + " <td>-0.40%</td>\n",
881 + " <td>0</td>\n",
882 + " </tr>\n",
883 + " <tr>\n",
884 + " <th>1</th>\n",
885 + " <td>20050110</td>\n",
886 + " <td>Tightwad IT Buyers Loosen Up Stocks Finish Sli...</td>\n",
887 + " <td>4.61</td>\n",
888 + " <td>4.88</td>\n",
889 + " <td>4.94</td>\n",
890 + " <td>4.58</td>\n",
891 + " <td>654.04M</td>\n",
892 + " <td>-6.49%</td>\n",
893 + " <td>0</td>\n",
894 + " </tr>\n",
895 + " <tr>\n",
896 + " <th>2</th>\n",
897 + " <td>20050111</td>\n",
898 + " <td>Stocks Finish Lower Tech Stocks' Date with Rea...</td>\n",
899 + " <td>4.68</td>\n",
900 + " <td>4.67</td>\n",
901 + " <td>4.71</td>\n",
902 + " <td>4.52</td>\n",
903 + " <td>507.50M</td>\n",
904 + " <td>1.52%</td>\n",
905 + " <td>1</td>\n",
906 + " </tr>\n",
907 + " <tr>\n",
908 + " <th>3</th>\n",
909 + " <td>20050112</td>\n",
910 + " <td>Apple Beats the Street The 90% Solution to IP ...</td>\n",
911 + " <td>4.99</td>\n",
912 + " <td>5.26</td>\n",
913 + " <td>5.32</td>\n",
914 + " <td>4.98</td>\n",
915 + " <td>792.41M</td>\n",
916 + " <td>6.62%</td>\n",
917 + " <td>1</td>\n",
918 + " </tr>\n",
919 + " <tr>\n",
920 + " <th>4</th>\n",
921 + " <td>20050113</td>\n",
922 + " <td>Sun Micro Misses Revenue Estimates Prudential ...</td>\n",
923 + " <td>5.01</td>\n",
924 + " <td>5.01</td>\n",
925 + " <td>5.12</td>\n",
926 + " <td>4.94</td>\n",
927 + " <td>442.85M</td>\n",
928 + " <td>0.40%</td>\n",
929 + " <td>1</td>\n",
930 + " </tr>\n",
931 + " <tr>\n",
932 + " <th>...</th>\n",
933 + " <td>...</td>\n",
934 + " <td>...</td>\n",
935 + " <td>...</td>\n",
936 + " <td>...</td>\n",
937 + " <td>...</td>\n",
938 + " <td>...</td>\n",
939 + " <td>...</td>\n",
940 + " <td>...</td>\n",
941 + " <td>...</td>\n",
942 + " </tr>\n",
943 + " <tr>\n",
944 + " <th>2422</th>\n",
945 + " <td>20150102</td>\n",
946 + " <td>‘Van Gogh or Van Goo’ Matters Little to Billio...</td>\n",
947 + " <td>106.25</td>\n",
948 + " <td>108.29</td>\n",
949 + " <td>108.65</td>\n",
950 + " <td>105.41</td>\n",
951 + " <td>64.29M</td>\n",
952 + " <td>-2.82%</td>\n",
953 + " <td>0</td>\n",
954 + " </tr>\n",
955 + " <tr>\n",
956 + " <th>2423</th>\n",
957 + " <td>20150105</td>\n",
958 + " <td>Berkshire Soars as Buffett Shifts Focus to Tak...</td>\n",
959 + " <td>106.26</td>\n",
960 + " <td>106.54</td>\n",
961 + " <td>107.43</td>\n",
962 + " <td>104.63</td>\n",
963 + " <td>65.80M</td>\n",
964 + " <td>0.01%</td>\n",
965 + " <td>1</td>\n",
966 + " </tr>\n",
967 + " <tr>\n",
968 + " <th>2424</th>\n",
969 + " <td>20150106</td>\n",
970 + " <td>HTC Posts First Sales Growth in 3 Years on New...</td>\n",
971 + " <td>107.75</td>\n",
972 + " <td>107.20</td>\n",
973 + " <td>108.20</td>\n",
974 + " <td>106.69</td>\n",
975 + " <td>40.11M</td>\n",
976 + " <td>1.40%</td>\n",
977 + " <td>1</td>\n",
978 + " </tr>\n",
979 + " <tr>\n",
980 + " <th>2425</th>\n",
981 + " <td>20150107</td>\n",
982 + " <td>Intel CEO Krzanich Shows off Wearable Chipset,...</td>\n",
983 + " <td>111.89</td>\n",
984 + " <td>109.23</td>\n",
985 + " <td>112.15</td>\n",
986 + " <td>108.70</td>\n",
987 + " <td>59.36M</td>\n",
988 + " <td>3.84%</td>\n",
989 + " <td>1</td>\n",
990 + " </tr>\n",
991 + " <tr>\n",
992 + " <th>2426</th>\n",
993 + " <td>20150108</td>\n",
994 + " <td>Xiaomi Buying Spree Gives Apple, Samsung Reaso...</td>\n",
995 + " <td>112.01</td>\n",
996 + " <td>112.67</td>\n",
997 + " <td>113.25</td>\n",
998 + " <td>110.21</td>\n",
999 + " <td>53.70M</td>\n",
1000 + " <td>0.11%</td>\n",
1001 + " <td>1</td>\n",
1002 + " </tr>\n",
1003 + " </tbody>\n",
1004 + "</table>\n",
1005 + "<p>2427 rows × 9 columns</p>\n",
1006 + "</div>"
1007 + ],
1008 + "text/plain": [
1009 + " date ... index\n",
1010 + "0 20050107 ... 0\n",
1011 + "1 20050110 ... 0\n",
1012 + "2 20050111 ... 1\n",
1013 + "3 20050112 ... 1\n",
1014 + "4 20050113 ... 1\n",
1015 + "... ... ... ...\n",
1016 + "2422 20150102 ... 0\n",
1017 + "2423 20150105 ... 1\n",
1018 + "2424 20150106 ... 1\n",
1019 + "2425 20150107 ... 1\n",
1020 + "2426 20150108 ... 1\n",
1021 + "\n",
1022 + "[2427 rows x 9 columns]"
1023 + ]
1024 + },
1025 + "metadata": {
1026 + "tags": []
1027 + },
1028 + "execution_count": 3
1029 + }
1030 + ]
1031 + },
1032 + {
1033 + "cell_type": "code",
1034 + "metadata": {
1035 + "id": "2OsBf-PiSF_1",
1036 + "colab_type": "code",
1037 + "colab": {
1038 + "base_uri": "https://localhost:8080/",
1039 + "height": 265
1040 + },
1041 + "outputId": "7cd8ac0b-b226-48d6-dd30-bbb0268f3349"
1042 + },
1043 + "source": [
1044 + "lenlist = []\n",
1045 + "\n",
1046 + "for _,item in enumerate(combined_data[\"title\"]):\n",
1047 + " lenlist.append(len(item))\n",
1048 + "\n",
1049 + "import matplotlib.pyplot as plt\n",
1050 + "n, bins, patches = plt.hist(lenlist, bins=10)\n",
1051 + "plt.show()"
1052 + ],
1053 + "execution_count": null,
1054 + "outputs": [
1055 + {
1056 + "output_type": "display_data",
1057 + "data": {
1058 + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAD4CAYAAAAAczaOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAARy0lEQVR4nO3df4ylV13H8ffHXVoQlN22k6bubtxFN5pKEJq11mCIYbW0hbg1qaTEyIpNNmpRFA1sJbH+CEnxV5UEa1ZaWJQAtWK6kSqubQ3xjxamUEp/UDqWYnfTdgf6Q5EAVr/+cc/AZZjZ7dw7c+d2z/uV3NzznOfc+3zn7Mxnnj33uXdSVUiS+vAd612AJGlyDH1J6oihL0kdMfQlqSOGviR1ZON6F3A8Z5xxRm3fvn29y5CkZ5U77rjji1U1s9S+qQ797du3Mzs7u95lSNKzSpIvLLfP5R1J6oihL0kdMfQlqSOGviR1xNCXpI4Y+pLUEUNfkjpi6EtSRwx9SerIVL8jd1zb939kXY770FWvXpfjStKJeKYvSR05YegnuS7JsSR3D/X9UZLPJrkryd8n2TS074okc0nuT/Kqof4LWt9ckv2r/6VIkk7kmZzpvxe4YFHfYeDFVfUS4HPAFQBJzgYuBX6oPeYvkmxIsgF4F3AhcDbwujZWkjRBJwz9qvoY8Piivn+uqqfb5m3A1tbeA3ywqr5WVZ8H5oBz222uqh6sqq8DH2xjJUkTtBpr+r8I/GNrbwEeHtp3pPUt1/9tkuxLMptkdn5+fhXKkyQtGCv0k7wNeBp4/+qUA1V1oKp2VdWumZkl/waAJGlEI1+ymeQXgNcAu6uqWvdRYNvQsK2tj+P0S5ImZKQz/SQXAG8BfrqqvjK06xBwaZJTk+wAdgIfBz4B7EyyI8kpDF7sPTRe6ZKklTrhmX6SDwA/AZyR5AhwJYOrdU4FDicBuK2qfqmq7klyPXAvg2Wfy6vqf9vzvBH4KLABuK6q7lmDr0eSdBwnDP2qet0S3dceZ/zbgbcv0X8TcNOKqpMkrSrfkStJHTH0Jakjhr4kdcTQl6SOGPqS1BFDX5I6YuhLUkcMfUnqiKEvSR0x9CWpI4a+JHXE0Jekjhj6ktQRQ1+SOmLoS1JHDH1J6oihL0kdMfQlqSOGviR1xNCXpI4Y+pLUEUNfkjpi6EtSRwx9SeqIoS9JHTlh6Ce5LsmxJHcP9Z2W5HCSB9r95tafJO9MMpfkriTnDD1mbxv/QJK9a/PlSJKO55mc6b8XuGBR337g5qraCdzctgEuBHa22z7gGhj8kgCuBH4UOBe4cuEXhSRpck4Y+lX1MeDxRd17gIOtfRC4eKj/fTVwG7ApyVnAq4DDVfV4VT0BHObbf5FIktbYqGv6Z1bVI639KHBma28BHh4ad6T1Ldf/bZLsSzKbZHZ+fn7E8iRJSxn7hdyqKqBWoZaF5ztQVbuqatfMzMxqPa0kidFD/7G2bEO7P9b6jwLbhsZtbX3L9UuSJmjU0D8ELFyBsxe4caj/9e0qnvOAp9oy0EeB85Nsbi/gnt/6JEkTtPFEA5J8APgJ4IwkRxhchXMVcH2Sy4AvAK9tw28CLgLmgK8AbwCoqseT/AHwiTbu96tq8YvDkqQ1dsLQr6rXLbNr9xJjC7h8mee5DrhuRdVJklaV78iVpI4Y+pLUEUNfkjpi6EtSRwx9SeqIoS9JHTH0Jakjhr4kdcTQl6SOGPqS1BFDX5I6YuhLUkcMfUnqiKEvSR0x9CWpI4a+JHXE0Jekjhj6ktQRQ1+SOmLoS1JHDH1J6oihL0kdMfQlqSOGviR1ZKzQT/IbSe5JcneSDyR5bpIdSW5PMpfkQ0lOaWNPbdtzbf/21fgCJEnP3Mihn2QL8GvArqp6MbABuBR4B3B1VX0/8ARwWXvIZcATrf/qNk6SNEHjLu9sBJ6XZCPwncAjwCuBG9r+g8DFrb2nbdP2706SMY8vSVqBkUO/qo4Cfwz8B4Owfwq4A3iyqp5uw44AW1p7C/Bwe+zTbfzpox5fkrRy4yzvbGZw9r4D+B7g+cAF4xaUZF+S2SSz8/Pz4z6dJGnIOMs7Pwl8vqrmq+p/gA8DLwc2teUegK3A0dY+CmwDaPtfCHxp8ZNW1YGq2lVVu2ZmZsYoT5K02Dih/x/AeUm+s63N7wbuBW4FLmlj9gI3tvahtk3bf0tV1RjHlySt0Dhr+rczeEH2k8Bn2nMdAN4KvDnJHIM1+2vbQ64FTm/9bwb2j1G3JGkEG088ZHlVdSVw5aLuB4Fzlxj7VeBnxzmeJGk8viNXkjpi6EtSRwx9SeqIoS9JHTH0Jakjhr4kdcTQl6SOGPqS1BFDX5I6YuhLUkcMfUnqiKEvSR0x9CWpI4a+JHXE0Jekjhj6ktQRQ1+SOmLoS1JHDH1J6oihL0kdMfQlqSOGviR1xNCXpI4Y+pLUEUNfkjoyVugn2ZTkhiSfTXJfkh9LclqSw0keaPeb29gkeWeSuSR3JTlndb4ESdIzNe6Z/p8D/1RVPwj8MHAfsB+4uap2Aje3bYALgZ3ttg+4ZsxjS5JWaOTQT/JC4BXAtQBV9fWqehLYAxxsww4CF7f2HuB9NXAbsCnJWSNXLklasXHO9HcA88B7knwqybuTPB84s6oeaWMeBc5s7S3Aw0OPP9L6vkWSfUlmk8zOz8+PUZ4kabFxQn8jcA5wTVW9DPhvvrmUA0BVFVAredKqOlBVu6pq18zMzBjlSZIWGyf0jwBHqur2tn0Dg18Cjy0s27T7Y23/UWDb0OO3tj5J0oSMHPpV9SjwcJIfaF27gXuBQ8De1rcXuLG1DwGvb1fxnAc8NbQMJEmagI1jPv5XgfcnOQV4EHgDg18k1ye5DPgC8No29ibgImAO+EobK0maoLFCv6ruBHYtsWv3EmMLuHyc40mSxuM7ciWpI4a+JHXE0Jekjhj6ktQRQ1+SOjLuJZtawvb9H1m3Yz901avX7diSpp9n+pLUEUNfkjpi6EtSRwx9SeqIoS9JHTH0Jakjhr4kdcTQl6SOGPqS1BFDX5I6YuhLUkcMfUnqiKEvSR0x9CWpI4a+JHXE0JekjvhHVE4y6/UHXPzjLdKzg2f6ktQRQ1+SOjJ26CfZkORTSf6hbe9IcnuSuSQfSnJK6z+1bc+1/dvHPbYkaWVW40z/TcB9Q9vvAK6uqu8HngAua/2XAU+0/qvbOEnSBI0V+km2Aq8G3t22A7wSuKENOQhc3Np72jZt/+42XpI0IeOe6f8Z8Bbg/9r26cCTVfV02z4CbGntLcDDAG3/U238t0iyL8lsktn5+fkxy5MkDRs59JO8BjhWVXesYj1U1YGq2lVVu2ZmZlbzqSWpe+Ncp/9y4KeTXAQ8F/hu4M+BTUk2trP5rcDRNv4osA04kmQj8ELgS2McX5K0QiOf6VfVFVW1taq2A5cCt1TVzwG3Ape0YXuBG1v7UNum7b+lqmrU40uSVm4trtN/K/DmJHMM1uyvbf3XAqe3/jcD+9fg2JKk41iVj2Goqn8F/rW1HwTOXWLMV4GfXY3jSZJG4ztyJakjhr4kdcTQl6SOGPqS1BFDX5I6YuhLUkcMfUnqiKEvSR0x9CWpI4a+JHXE0Jekjhj6ktQRQ1+SOmLoS1JHDH1J6oihL0kdMfQlqSOGviR1xNCXpI4Y+pLUEUNfkjpi6EtSRwx9SeqIoS9JHRk59JNsS3JrknuT3JPkTa3/tCSHkzzQ7je3/iR5Z5K5JHclOWe1vghJ0jMzzpn+08BvVtXZwHnA5UnOBvYDN1fVTuDmtg1wIbCz3fYB14xxbEnSCEYO/ap6pKo+2dr/BdwHbAH2AAfbsIPAxa29B3hfDdwGbEpy1siVS5JWbFXW9JNsB14G3A6cWVWPtF2PAme29hbg4aGHHWl9i59rX5LZJLPz8/OrUZ4kqRk79JO8APg74Ner6j+H91VVAbWS56uqA1W1q6p2zczMjFueJGnIWKGf5DkMAv/9VfXh1v3YwrJNuz/W+o8C24YevrX1SZImZJyrdwJcC9xXVX86tOsQsLe19wI3DvW/vl3Fcx7w1NAykCRpAjaO8diXAz8PfCbJna3vt4GrgOuTXAZ8AXht23cTcBEwB3wFeMMYx5YkjWDk0K+qfwOyzO7dS4wv4PJRjydJGp/vyJWkjhj6ktSRcdb0pW/Yvv8j63Lch6569bocV3q28kxfkjpi6EtSRwx9SeqIoS9JHTH0Jakjhr4kdcTQl6SOGPqS1BFDX5I6YuhLUkcMfUnqiKEvSR0x9CWpI4a+JHXE0Jekjhj6ktQR/4iKntXW64+3gH/ARc9OnulLUkcMfUnqiKEvSR0x9CWpI4a+JHVk4qGf5IIk9yeZS7J/0seXpJ5NNPSTbADeBVwInA28LsnZk6xBkno26ev0zwXmqupBgCQfBPYA9064Dmls6/kegfXS43sT1uvfea3metKhvwV4eGj7CPCjwwOS7AP2tc0vJ7l/xGOdAXxxxMdOwrTXB9Nf47TXB9Nf44rqyzvWsJLlnVRz+EyNOdffu9yOqXtHblUdAA6M+zxJZqtq1yqUtCamvT6Y/hqnvT6Y/hqnvT6Y/hqnvb7FJv1C7lFg29D21tYnSZqASYf+J4CdSXYkOQW4FDg04RokqVsTXd6pqqeTvBH4KLABuK6q7lmjw429RLTGpr0+mP4ap70+mP4ap70+mP4ap72+b5GqWu8aJEkT4jtyJakjhr4kdeSkC/1p+piHJA8l+UySO5PMtr7TkhxO8kC739z6k+Sdre67kpyzBvVcl+RYkruH+lZcT5K9bfwDSfZOoMbfTXK0zeOdSS4a2ndFq/H+JK8a6l+T74Mk25LcmuTeJPckeVPrn5p5PE6NUzGPSZ6b5ONJPt3q+73WvyPJ7e1YH2oXe5Dk1LY91/ZvP1Hda1jje5N8fmgOX9r61+XnZSRVddLcGLw4/O/Ai4BTgE8DZ69jPQ8BZyzq+0Ngf2vvB97R2hcB/wgEOA+4fQ3qeQVwDnD3qPUApwEPtvvNrb15jWv8XeC3lhh7dvs3PhXY0f7tN6zl9wFwFnBOa38X8LlWx9TM43FqnIp5bHPxgtZ+DnB7m5vrgUtb/18Cv9zavwL8ZWtfCnzoeHWv0hwuV+N7gUuWGL8uPy+j3E62M/1vfMxDVX0dWPiYh2myBzjY2geBi4f631cDtwGbkpy1mgeuqo8Bj49Zz6uAw1X1eFU9ARwGLljjGpezB/hgVX2tqj4PzDH4Hliz74OqeqSqPtna/wXcx+Cd5lMzj8epcTkTncc2F19um89ptwJeCdzQ+hfP4cLc3gDsTpLj1D2249S4nHX5eRnFyRb6S33Mw/G+2ddaAf+c5I4MPl4C4MyqeqS1HwXObO31qn2l9axXnW9s/22+bmHpZL1rbMsML2NwFjiV87ioRpiSeUyyIcmdwDEGQfjvwJNV9fQSx/pGHW3/U8Dpa1nfUjVW1cIcvr3N4dVJTl1c46Japi2TTrrQnzY/XlXnMPhU0cuTvGJ4Zw3+/zc118xOWz1DrgG+D3gp8AjwJ+tbDiR5AfB3wK9X1X8O75uWeVyixqmZx6r636p6KYN35Z8L/OB61bKcxTUmeTFwBYNaf4TBks1b17HEkZxsoT9VH/NQVUfb/THg7xl8cz+2sGzT7o+14etV+0rrmXidVfVY+wH8P+Cv+OZ/4delxiTPYRCm76+qD7fuqZrHpWqctnlsNT0J3Ar8GIMlkYU3jA4f6xt1tP0vBL40ifoW1XhBWzqrqvoa8B6mYA5X6mQL/an5mIckz0/yXQtt4Hzg7lbPwiv4e4EbW/sQ8Pp2FcB5wFNDywVraaX1fBQ4P8nmtjxwfutbM4te2/gZBvO4UOOl7eqOHcBO4OOs4fdBW0u+Frivqv50aNfUzONyNU7LPCaZSbKptZ8H/BSD1x1uBS5pwxbP4cLcXgLc0v43tVzdY1umxs8O/WIPg9cchudwKn5eTmiSrxpP4sbgVfTPMVgjfNs61vEiBlcWfBq4Z6EWBmuRNwMPAP8CnFbfvFrgXa3uzwC71qCmDzD4b/3/MFhbvGyUeoBfZPCi2RzwhgnU+NethrsY/HCdNTT+ba3G+4EL1/r7APhxBks3dwF3tttF0zSPx6lxKuYReAnwqVbH3cDvDP3MfLzNx98Cp7b+57btubb/RSeqew1rvKXN4d3A3/DNK3zW5edllJsfwyBJHTnZlnckScdh6EtSRwx9SeqIoS9JHTH0Jakjhr4kdcTQl6SO/D/JXcFTIBn7OQAAAABJRU5ErkJggg==\n",
1059 + "text/plain": [
1060 + "<Figure size 432x288 with 1 Axes>"
1061 + ]
1062 + },
1063 + "metadata": {
1064 + "tags": [],
1065 + "needs_background": "light"
1066 + }
1067 + }
1068 + ]
1069 + },
1070 + {
1071 + "cell_type": "code",
1072 + "metadata": {
1073 + "id": "NMPfAqICKf2a",
1074 + "colab_type": "code",
1075 + "colab": {
1076 + "base_uri": "https://localhost:8080/",
1077 + "height": 581
1078 + },
1079 + "outputId": "31fe53cd-6d2d-4a3a-9bd0-ec17159758f3"
1080 + },
1081 + "source": [
1082 + "!pip install transformers\n",
1083 + "import logging\n",
1084 + "import time\n",
1085 + "from platform import python_version\n",
1086 + "import matplotlib\n",
1087 + "import matplotlib.pyplot as plt\n",
1088 + "import numpy as np\n",
1089 + "import pandas as pd\n",
1090 + "import sklearn\n",
1091 + "import torch\n",
1092 + "import torch.nn as nn\n",
1093 + "import torch.nn.functional as F\n",
1094 + "import transformers\n",
1095 + "from sklearn.metrics import roc_auc_score\n",
1096 + "from torch.autograd import Variable"
1097 + ],
1098 + "execution_count": null,
1099 + "outputs": [
1100 + {
1101 + "output_type": "stream",
1102 + "text": [
1103 + "Collecting transformers\n",
1104 + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/12/b5/ac41e3e95205ebf53439e4dd087c58e9fd371fd8e3724f2b9b4cdb8282e5/transformers-2.10.0-py3-none-any.whl (660kB)\n",
1105 + "\u001b[K |████████████████████████████████| 665kB 9.2MB/s \n",
1106 + "\u001b[?25hCollecting sacremoses\n",
1107 + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7d/34/09d19aff26edcc8eb2a01bed8e98f13a1537005d31e95233fd48216eed10/sacremoses-0.0.43.tar.gz (883kB)\n",
1108 + "\u001b[K |████████████████████████████████| 890kB 48.9MB/s \n",
1109 + "\u001b[?25hRequirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.6/dist-packages (from transformers) (2019.12.20)\n",
1110 + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from transformers) (1.18.4)\n",
1111 + "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.6/dist-packages (from transformers) (4.41.1)\n",
1112 + "Collecting sentencepiece\n",
1113 + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d4/a4/d0a884c4300004a78cca907a6ff9a5e9fe4f090f5d95ab341c53d28cbc58/sentencepiece-0.1.91-cp36-cp36m-manylinux1_x86_64.whl (1.1MB)\n",
1114 + "\u001b[K |████████████████████████████████| 1.1MB 44.4MB/s \n",
1115 + "\u001b[?25hRequirement already satisfied: filelock in /usr/local/lib/python3.6/dist-packages (from transformers) (3.0.12)\n",
1116 + "Requirement already satisfied: requests in /usr/local/lib/python3.6/dist-packages (from transformers) (2.23.0)\n",
1117 + "Collecting tokenizers==0.7.0\n",
1118 + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/14/e5/a26eb4716523808bb0a799fcfdceb6ebf77a18169d9591b2f46a9adb87d9/tokenizers-0.7.0-cp36-cp36m-manylinux1_x86_64.whl (3.8MB)\n",
1119 + "\u001b[K |████████████████████████████████| 3.8MB 42.2MB/s \n",
1120 + "\u001b[?25hRequirement already satisfied: dataclasses; python_version < \"3.7\" in /usr/local/lib/python3.6/dist-packages (from transformers) (0.7)\n",
1121 + "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (1.12.0)\n",
1122 + "Requirement already satisfied: click in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (7.1.2)\n",
1123 + "Requirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (0.15.1)\n",
1124 + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (2020.4.5.1)\n",
1125 + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (1.24.3)\n",
1126 + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (2.9)\n",
1127 + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (3.0.4)\n",
1128 + "Building wheels for collected packages: sacremoses\n",
1129 + " Building wheel for sacremoses (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
1130 + " Created wheel for sacremoses: filename=sacremoses-0.0.43-cp36-none-any.whl size=893260 sha256=f4121a107ea6b7b88d33fdc2a7bec9734d6c23cc9083be9036ffe9c3b955acc8\n",
1131 + " Stored in directory: /root/.cache/pip/wheels/29/3c/fd/7ce5c3f0666dab31a50123635e6fb5e19ceb42ce38d4e58f45\n",
1132 + "Successfully built sacremoses\n",
1133 + "Installing collected packages: sacremoses, sentencepiece, tokenizers, transformers\n",
1134 + "Successfully installed sacremoses-0.0.43 sentencepiece-0.1.91 tokenizers-0.7.0 transformers-2.10.0\n"
1135 + ],
1136 + "name": "stdout"
1137 + }
1138 + ]
1139 + },
1140 + {
1141 + "cell_type": "code",
1142 + "metadata": {
1143 + "id": "X2qIJL4fLB4n",
1144 + "colab_type": "code",
1145 + "colab": {
1146 + "base_uri": "https://localhost:8080/",
1147 + "height": 164,
1148 + "referenced_widgets": [
1149 + "0488e2a159f94f1e8fd2d95cfa1f0c00",
1150 + "f6b7f67b13a94abe81c8f311f5d9584e",
1151 + "182e7f63a7a747be9806d768c59ac8ed",
1152 + "89484e917aaf4be7b9c1fd73542101ec",
1153 + "e3002daa07f44aa296d26fc14e9e5c10",
1154 + "03e4968cf76248429f98b73ef104941b",
1155 + "33057b5773f04ab8a43d33eed74453bb",
1156 + "865728d013634aeaa7705c7350d86541",
1157 + "b3cf8354fb91443db5657239b1631db1",
1158 + "dca67a11598049b5b6a2e87b1d1d9724",
1159 + "ad4a891b74304e5cafc91dcac6f1aa71",
1160 + "3ecdbfd3ce6c4e64ae31985197903358",
1161 + "cec8ee3dd75a468d985fb9d2c17cd7f7",
1162 + "58100c551b1d4dd683e9bfb2c4059022",
1163 + "1c55789eede0464f85386b4e41c46c06",
1164 + "b5def479898f453fb51cd221ff78b1e4",
1165 + "7e00f631bf7c4557bcaeea97f26b3bb8",
1166 + "ca6631cb27f941fa92a7ff08cfb5fdde",
1167 + "d97d7a015ffa428696024cef965e789d",
1168 + "716a044bb3ad4f2081d064d690d40fd9",
1169 + "14c4dc6f2e3b48aa924966e737ed73ff",
1170 + "2df44811e03f4474ab053a62de70c160",
1171 + "dcc537362666468c8994a0eca019d05c",
1172 + "2eb87bd2ec0a4382b77b8562d0ac8dc8"
1173 + ]
1174 + },
1175 + "outputId": "1d675857-ca63-467f-ce9a-2e9f3ba985f1"
1176 + },
1177 + "source": [
1178 + "model_class = transformers.BertModel\n",
1179 + "tokenizer_class = transformers.BertTokenizer\n",
1180 + "pretrained_weights='bert-base-cased'\n",
1181 + "# Load pretrained model/tokenizer\n",
1182 + "tokenizer = tokenizer_class.from_pretrained(pretrained_weights)\n",
1183 + "bert_model = model_class.from_pretrained(pretrained_weights)"
1184 + ],
1185 + "execution_count": null,
1186 + "outputs": [
1187 + {
1188 + "output_type": "display_data",
1189 + "data": {
1190 + "application/vnd.jupyter.widget-view+json": {
1191 + "model_id": "0488e2a159f94f1e8fd2d95cfa1f0c00",
1192 + "version_minor": 0,
1193 + "version_major": 2
1194 + },
1195 + "text/plain": [
1196 + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=213450.0, style=ProgressStyle(descripti…"
1197 + ]
1198 + },
1199 + "metadata": {
1200 + "tags": []
1201 + }
1202 + },
1203 + {
1204 + "output_type": "stream",
1205 + "text": [
1206 + "\n"
1207 + ],
1208 + "name": "stdout"
1209 + },
1210 + {
1211 + "output_type": "display_data",
1212 + "data": {
1213 + "application/vnd.jupyter.widget-view+json": {
1214 + "model_id": "b3cf8354fb91443db5657239b1631db1",
1215 + "version_minor": 0,
1216 + "version_major": 2
1217 + },
1218 + "text/plain": [
1219 + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=433.0, style=ProgressStyle(description_…"
1220 + ]
1221 + },
1222 + "metadata": {
1223 + "tags": []
1224 + }
1225 + },
1226 + {
1227 + "output_type": "stream",
1228 + "text": [
1229 + "\n"
1230 + ],
1231 + "name": "stdout"
1232 + },
1233 + {
1234 + "output_type": "display_data",
1235 + "data": {
1236 + "application/vnd.jupyter.widget-view+json": {
1237 + "model_id": "7e00f631bf7c4557bcaeea97f26b3bb8",
1238 + "version_minor": 0,
1239 + "version_major": 2
1240 + },
1241 + "text/plain": [
1242 + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=435779157.0, style=ProgressStyle(descri…"
1243 + ]
1244 + },
1245 + "metadata": {
1246 + "tags": []
1247 + }
1248 + },
1249 + {
1250 + "output_type": "stream",
1251 + "text": [
1252 + "\n"
1253 + ],
1254 + "name": "stdout"
1255 + }
1256 + ]
1257 + },
1258 + {
1259 + "cell_type": "code",
1260 + "metadata": {
1261 + "id": "49IOXdUhLQ7u",
1262 + "colab_type": "code",
1263 + "colab": {}
1264 + },
1265 + "source": [
1266 + "max_seq = 512\n",
1267 + "def tokenize_text(df, max_seq):\n",
1268 + " return [\n",
1269 + " tokenizer.encode(text, add_special_tokens=True)[:max_seq] for text in df.values\n",
1270 + " ]\n",
1271 + "\n",
1272 + "def pad_text(tokenized_text, max_seq):\n",
1273 + " return np.array([el + [0] * (max_seq - len(el)) for el in tokenized_text])\n",
1274 + "\n",
1275 + "def tokenize_and_pad_text(df, max_seq):\n",
1276 + " tokenized_text = tokenize_text(df, max_seq)\n",
1277 + " padded_text = pad_text(tokenized_text, max_seq)\n",
1278 + " return torch.tensor(padded_text)\n",
1279 + "\n",
1280 + "def targets_to_tensor(df, target_columns):\n",
1281 + " return torch.tensor(df[target_columns].values, dtype=torch.float32)"
1282 + ],
1283 + "execution_count": null,
1284 + "outputs": []
1285 + },
1286 + {
1287 + "cell_type": "code",
1288 + "metadata": {
1289 + "id": "OROlflCuyPWG",
1290 + "colab_type": "code",
1291 + "colab": {
1292 + "base_uri": "https://localhost:8080/",
1293 + "height": 34
1294 + },
1295 + "outputId": "23721d4d-e430-4de7-b3a4-e73c0e57072e"
1296 + },
1297 + "source": [
1298 + "train_indices = tokenize_and_pad_text(combined_data[0:1].copy()[\"title\"], max_seq)\n",
1299 + "with torch.no_grad():\n",
1300 + " x_val = bert_model(train_indices)[0]\n",
1301 + "x_val.size()"
1302 + ],
1303 + "execution_count": null,
1304 + "outputs": [
1305 + {
1306 + "output_type": "execute_result",
1307 + "data": {
1308 + "text/plain": [
1309 + "torch.Size([1, 512, 768])"
1310 + ]
1311 + },
1312 + "metadata": {
1313 + "tags": []
1314 + },
1315 + "execution_count": 13
1316 + }
1317 + ]
1318 + },
1319 + {
1320 + "cell_type": "code",
1321 + "metadata": {
1322 + "id": "pn2OsYWp1pPC",
1323 + "colab_type": "code",
1324 + "colab": {
1325 + "base_uri": "https://localhost:8080/",
1326 + "height": 901
1327 + },
1328 + "outputId": "3b932188-810b-432a-b215-63c8296eba1a"
1329 + },
1330 + "source": [
1331 + "train_indices"
1332 + ],
1333 + "execution_count": null,
1334 + "outputs": [
1335 + {
1336 + "output_type": "execute_result",
1337 + "data": {
1338 + "text/plain": [
1339 + "tensor([[ 101, 9924, 1116, 5135, 5738, 25118, 1233, 20979, 1116, 1111,\n",
1340 + " 1103, 6237, 1104, 4945, 119, 1275, 102, 0, 0, 0,\n",
1341 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1342 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1343 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1344 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1345 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1346 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1347 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1348 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1349 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1350 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1351 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1352 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1353 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1354 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1355 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1356 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1357 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1358 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1359 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1360 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1361 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1362 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1363 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1364 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1365 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1366 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1367 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1368 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1369 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1370 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1371 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1372 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1373 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1374 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1375 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1376 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1377 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1378 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1379 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1380 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1381 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1382 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1383 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1384 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1385 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1386 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1387 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1388 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1389 + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
1390 + " 0, 0]])"
1391 + ]
1392 + },
1393 + "metadata": {
1394 + "tags": []
1395 + },
1396 + "execution_count": 14
1397 + }
1398 + ]
1399 + },
1400 + {
1401 + "cell_type": "code",
1402 + "metadata": {
1403 + "id": "TeNZa7Pzxim3",
1404 + "colab_type": "code",
1405 + "colab": {}
1406 + },
1407 + "source": [
1408 + "# count = 1\n",
1409 + "# for i in range(0,1801,200):\n",
1410 + "# train_indices = tokenize_and_pad_text(combined_data[i:i+200].copy()[\"title\"], max_seq) #20246\n",
1411 + "# with torch.no_grad():\n",
1412 + "# x_train = bert_model(train_indices)[0]\n",
1413 + "# torch.save(x_train, 'gdrive/My Drive/tensor512/train_'+str(count)+'.pt')\n",
1414 + "# del train_indices\n",
1415 + "# count+=1\n",
1416 + "# print(count)\n",
1417 + "\n",
1418 + "\n",
1419 + "# val_indices = tokenize_and_pad_text(combined_data[2000:2200].copy()[\"title\"], max_seq)\n",
1420 + "# with torch.no_grad():\n",
1421 + "# x_val = bert_model(val_indices)[0]\n",
1422 + "# torch.save(x_val, 'gdrive/My Drive/tensor512/val_1.pt')\n",
1423 + "# del val_indices\n",
1424 + "# val_indices = tokenize_and_pad_text(combined_data[2200:2400].copy()[\"title\"], max_seq)\n",
1425 + "# with torch.no_grad():\n",
1426 + "# x_val = bert_model(val_indices)[0]\n",
1427 + "# torch.save(x_val, 'gdrive/My Drive/tensor512/val_2.pt')\n",
1428 + "# del val_indices\n",
1429 + "\n",
1430 + "# test_indices = tokenize_and_pad_text(test_data[\"title\"], max_seq)\n",
1431 + "# with torch.no_grad():\n",
1432 + "# x_test = bert_model(test_indices)[0]\n",
1433 + "# np.save('gdrive/My Drive/test.npy',x_test.numpy())\n",
1434 + "# del test_indices\n",
1435 + "\n",
1436 + "# y_train = targets_to_tensor(combined_data[0:2000].copy(), \"index\")\n",
1437 + "# np.save('gdrive/My Drive/y_train.npy',y_train.numpy())\n",
1438 + "# del y_train\n",
1439 + "\n",
1440 + "# y_val = targets_to_tensor(combined_data[2000:].copy(), \"index\")\n",
1441 + "# np.save('gdrive/My Drive/y_val.npy',y_val.numpy())\n",
1442 + "# del y_val\n",
1443 + "\n",
1444 + "y_test = targets_to_tensor(test_data, \"index\")\n",
1445 + "np.save('gdrive/My Drive/y_test.npy',y_test.numpy())\n",
1446 + "del y_test\n",
1447 + "\n",
1448 + "\n",
1449 + "# torch.save(x_test, 'gdrive/My Drive/tensor/test_2.pt')\n",
1450 + "# torch.save(x_val, 'gdrive/My Drive/tensor/val_1.pt')\n",
1451 + "# y_train = targets_to_tensor(combined_data[0:1].copy(), \"index\")\n",
1452 + "# y_val = targets_to_tensor(combined_data[20246:].copy(), \"index\")\n",
1453 + "# y_test = targets_to_tensor(df_test, target_columns)\n",
1454 + "\n",
1455 + "# count = 1\n",
1456 + "# for i in range(0,20000,2000):\n",
1457 + "# if i == 20000:\n",
1458 + "# temp = tokenize_and_pad_text(combined_data[20000:20246].copy()[\"headline\"], max_seq)\n",
1459 + "# else: \n",
1460 + "# temp = tokenize_and_pad_text(combined_data[i:i+2000].copy()[\"headline\"], max_seq)\n",
1461 + "# with torch.no_grad():\n",
1462 + "# x_test = bert_model(temp)[0]\n",
1463 + "# torch.save(x_test, 'gdrive/My Drive/tensor80/train_'+str(count)+'.pt')\n",
1464 + "# count +=1 \n",
1465 + "# del temp\n",
1466 + "# temp = tokenize_and_pad_text(combined_data[20000:20246].copy()[\"headline\"], max_seq)\n",
1467 + "# # temp = tokenize_and_pad_text(combined_data[20246:22246].copy()[\"headline\"], max_seq)\n",
1468 + "# with torch.no_grad():\n",
1469 + "# x_test = bert_model(temp)[0]\n",
1470 + "# torch.save(x_test, 'gdrive/My Drive/tensor80/train_11.pt')\n",
1471 + "# del temp\n",
1472 + "\n",
1473 + "# temp = tokenize_and_pad_text(combined_data[22246:].copy()[\"headline\"], max_seq)\n",
1474 + "# with torch.no_grad():\n",
1475 + "# x_test = bert_model(temp)[0]\n",
1476 + "# torch.save(x_test, 'gdrive/My Drive/tensor80/val_2.pt')\n",
1477 + "# del temp"
1478 + ],
1479 + "execution_count": null,
1480 + "outputs": []
1481 + },
1482 + {
1483 + "cell_type": "code",
1484 + "metadata": {
1485 + "id": "aJp1oA-Kt4Wg",
1486 + "colab_type": "code",
1487 + "colab": {}
1488 + },
1489 + "source": [
1490 + "temp = tokenize_and_pad_text(test_data[0:2000].copy()[\"headline\"], max_seq)\n",
1491 + "with torch.no_grad():\n",
1492 + " x_test = bert_model(temp)[0]\n",
1493 + "torch.save(x_test, 'gdrive/My Drive/tensor80/test_1.pt')\n",
1494 + "del temp\n",
1495 + "\n",
1496 + "temp = tokenize_and_pad_text(test_data[2000:].copy()[\"headline\"], max_seq)\n",
1497 + "with torch.no_grad():\n",
1498 + " x_test = bert_model(temp)[0]\n",
1499 + "torch.save(x_test, 'gdrive/My Drive/tensor80/test_2.pt')\n",
1500 + "del temp"
1501 + ],
1502 + "execution_count": null,
1503 + "outputs": []
1504 + },
1505 + {
1506 + "cell_type": "code",
1507 + "metadata": {
1508 + "id": "o3ObZM88xje5",
1509 + "colab_type": "code",
1510 + "colab": {}
1511 + },
1512 + "source": [
1513 + "y_train = targets_to_tensor(combined_data[0:20246].copy(), \"index\")\n",
1514 + "y_val = targets_to_tensor(combined_data[20246:].copy(), \"index\")\n",
1515 + "y_test = targets_to_tensor(test_data, \"index\")\n",
1516 + "\n",
1517 + "torch.save(y_train, 'gdrive/My Drive/tensor/y_train.pt')\n",
1518 + "torch.save(y_val, 'gdrive/My Drive/tensor/y_val.pt')\n",
1519 + "torch.save(y_test, 'gdrive/My Drive/tensor/y_test.pt')\n"
1520 + ],
1521 + "execution_count": null,
1522 + "outputs": []
1523 + },
1524 + {
1525 + "cell_type": "code",
1526 + "metadata": {
1527 + "id": "LWXz0C4WyKND",
1528 + "colab_type": "code",
1529 + "colab": {
1530 + "base_uri": "https://localhost:8080/",
1531 + "height": 51
1532 + },
1533 + "outputId": "5511676d-be38-4d4e-af38-ff51274c946a"
1534 + },
1535 + "source": [
1536 + "temp = torch.load('gdrive/My Drive/tensor80/test_1.pt')\n",
1537 + "temp2 = torch.load('gdrive/My Drive/tensor80/test_2.pt')\n",
1538 + "print(temp.size())\n",
1539 + "print(temp2.size())\n",
1540 + "\n",
1541 + "temp3 = torch.cat([temp, temp2], dim=0)\n",
1542 + "temp3.size()\n",
1543 + "torch.save(temp3, 'gdrive/My Drive/tensor80/test.pt')"
1544 + ],
1545 + "execution_count": null,
1546 + "outputs": [
1547 + {
1548 + "output_type": "stream",
1549 + "text": [
1550 + "torch.Size([2000, 80, 768])\n",
1551 + "torch.Size([1692, 80, 768])\n"
1552 + ],
1553 + "name": "stdout"
1554 + }
1555 + ]
1556 + },
1557 + {
1558 + "cell_type": "code",
1559 + "metadata": {
1560 + "id": "9QSpEMakzsYN",
1561 + "colab_type": "code",
1562 + "colab": {
1563 + "base_uri": "https://localhost:8080/",
1564 + "height": 170
1565 + },
1566 + "outputId": "220a36ac-3088-4af0-f8b0-6e60e7b69d89"
1567 + },
1568 + "source": [
1569 + "added = torch.load('gdrive/My Drive/tensor512/train_1.pt')\n",
1570 + "\n",
1571 + "for i in range(2,11):\n",
1572 + " temp = torch.load('gdrive/My Drive/tensor512/train_'+str(i)+'.pt')\n",
1573 + " added = torch.cat([added,temp],0)\n",
1574 + " print(added.size())\n",
1575 + " del temp\n",
1576 + "\n",
1577 + "np.save('gdrive/My Drive/train.npy',added.numpy())"
1578 + ],
1579 + "execution_count": null,
1580 + "outputs": [
1581 + {
1582 + "output_type": "stream",
1583 + "text": [
1584 + "torch.Size([400, 512, 768])\n",
1585 + "torch.Size([600, 512, 768])\n",
1586 + "torch.Size([800, 512, 768])\n",
1587 + "torch.Size([1000, 512, 768])\n",
1588 + "torch.Size([1200, 512, 768])\n",
1589 + "torch.Size([1400, 512, 768])\n",
1590 + "torch.Size([1600, 512, 768])\n",
1591 + "torch.Size([1800, 512, 768])\n",
1592 + "torch.Size([2000, 512, 768])\n"
1593 + ],
1594 + "name": "stdout"
1595 + }
1596 + ]
1597 + },
1598 + {
1599 + "cell_type": "code",
1600 + "metadata": {
1601 + "id": "dPx0jGrF0mOk",
1602 + "colab_type": "code",
1603 + "colab": {
1604 + "base_uri": "https://localhost:8080/",
1605 + "height": 34
1606 + },
1607 + "outputId": "d1b26c62-9a81-4866-a6ad-bdbd1a526003"
1608 + },
1609 + "source": [
1610 + "added = torch.load('gdrive/My Drive/tensor512/val_1.pt')\n",
1611 + "\n",
1612 + "for i in range(2,3):\n",
1613 + " temp = torch.load('gdrive/My Drive/tensor512/val_'+str(i)+'.pt')\n",
1614 + " added = torch.cat([added,temp],0)\n",
1615 + " print(added.size())\n",
1616 + "\n",
1617 + "np.save('gdrive/My Drive/val.npy',added.numpy())"
1618 + ],
1619 + "execution_count": null,
1620 + "outputs": [
1621 + {
1622 + "output_type": "stream",
1623 + "text": [
1624 + "torch.Size([400, 512, 768])\n"
1625 + ],
1626 + "name": "stdout"
1627 + }
1628 + ]
1629 + },
1630 + {
1631 + "cell_type": "code",
1632 + "metadata": {
1633 + "id": "ldah_SPc719u",
1634 + "colab_type": "code",
1635 + "colab": {}
1636 + },
1637 + "source": [
1638 + "import numpy as np\n",
1639 + "import torch\n",
1640 + "\n",
1641 + "y_val = targets_to_tensor(combined_data[20246:].copy(), \"index\")\n",
1642 + "np.save('gdrive/My Drive/tensor80/val_y.npy',y_val.numpy())\n",
1643 + "del y_val \n"
1644 + ],
1645 + "execution_count": null,
1646 + "outputs": []
1647 + }
1648 + ]
1649 +}
...\ No newline at end of file ...\ No newline at end of file
This diff could not be displayed because it is too large.
This diff could not be displayed because it is too large.
1 +date,price,open,high,low,volume,change
2 +20200323,"42,500","42,600","43,550","42,400",29.15K,-6.39%
3 +20200322,"45,400","45,400","45,400","45,400",-,0.00%
4 +20200320,"45,400","44,150","45,500","43,550",49.16M,5.70%
5 +20200319,"42,950","46,500","46,600","42,300",0.64K,-5.81%
6 +20200318,"45,600","47,750","48,350","45,600",38.15M,-3.59%
7 +20200317,"47,300","46,900","49,650","46,700",50.59M,-3.27%
8 +20200316,"48,900","50,100","50,900","48,800",33.06M,-2.10%
9 +20200315,"49,950","49,950","49,950","49,950",-,0.00%
10 +20200313,"49,950","47,450","51,600","46,850",58.33M,-1.67%
11 +20200312,"50,800","51,000","51,900","49,300",47.90M,-2.50%
12 +20200311,"52,100","54,300","54,400","52,000",37.83M,-4.58%
13 +20200310,"54,600","53,800","54,900","53,700",31.33M,0.74%
14 +20200309,"54,200","54,700","55,000","53,600",30.01M,-4.07%
15 +20200308,"56,500","56,500","56,500","56,500",-,0.00%
16 +20200306,"56,500","56,500","57,200","56,200",18.59M,-2.25%
17 +20200305,"57,800","57,600","58,000","56,700",21.66M,0.70%
18 +20200304,"57,400","54,800","57,600","54,600",24.32M,3.61%
19 +20200303,"55,400","56,700","56,900","55,100",29.81M,0.73%
20 +20200302,"55,000","54,300","55,500","53,600",29.21M,1.48%
21 +20200301,"54,200","54,200","54,200","54,200",-,0.00%
22 +20200228,"54,200","55,000","55,500","54,200",29.91M,-3.04%
23 +20200227,"55,900","56,300","56,900","55,500",22.36M,-1.06%
24 +20200226,"56,500","56,000","57,000","56,000",25.38M,-2.42%
25 +20200225,"57,900","56,200","58,000","56,200",23.07M,1.94%
26 +20200224,"56,800","57,400","58,100","56,800",25.36M,-4.05%
27 +20200223,"59,200","59,200","59,200","59,200",-,0.00%
28 +20200221,"59,200","58,800","59,800","58,500",13.28M,-1.33%
29 +20200220,"60,000","60,700","61,300","59,600",14.51M,-0.33%
30 +20200219,"60,200","59,800","60,400","59,400",12.94M,0.67%
31 +20200218,"59,800","60,800","60,900","59,700",16.63M,-2.76%
32 +20200217,"61,500","61,600","62,000","61,200",8.49M,-0.49%
33 +20200216,"61,800","61,800","61,800","61,800",-,0.00%
34 +20200214,"61,800","60,900","61,900","60,200",13.26M,1.81%
35 +20200213,"60,700","61,200","61,600","60,500",18.34M,0.33%
36 +20200212,"60,500","60,300","60,700","59,700",12.73M,1.00%
37 +20200211,"59,900","59,800","60,700","59,700",10.95M,0.34%
38 +20200210,"59,700","59,200","59,800","59,100",12.69M,-1.16%
39 +20200209,"60,400","60,400","60,400","60,400",-,0.00%
40 +20200207,"60,400","61,100","61,200","59,700",16.27M,-1.15%
41 +20200206,"61,100","60,100","61,100","59,700",14.68M,2.69%
42 +20200205,"59,500","60,000","60,200","58,900",18.19M,1.02%
43 +20200204,"58,900","57,100","59,000","56,800",21.10M,2.97%
44 +20200203,"57,200","55,500","57,400","55,200",22.91M,1.42%
45 +20200202,"56,400","56,400","56,400","56,400",-,0.00%
46 +20200131,"56,400","57,800","58,400","56,400",19.49M,-1.40%
47 +20200130,"57,200","58,800","58,800","56,800",20.72M,-3.21%
48 +20200129,"59,100","59,100","59,700","58,800",16.26M,0.51%
49 +20200128,"58,800","59,400","59,400","58,300",22.63M,-3.29%
50 +20200127,"60,800","60,800","60,800","60,800",-,0.00%
51 +20200123,"60,800","61,800","61,800","60,700",14.88M,-2.41%
52 +20200122,"62,300","60,500","62,600","60,400",15.05M,1.47%
53 +20200121,"61,400","62,000","62,400","61,200",11.11M,-1.60%
54 +20200120,"62,400","62,000","62,800","61,700",12.46M,1.79%
55 +20200119,"61,300","61,300","61,300","61,300",-,0.00%
56 +20200117,"61,300","61,900","62,000","61,000",15.45M,0.99%
57 +20200116,"60,700","59,100","60,700","59,000",13.56M,2.88%
58 +20200115,"59,000","59,500","59,600","58,900",13.72M,-1.67%
59 +20200114,"60,000","60,400","61,000","59,900",16.08M,0.00%
60 +20200113,"60,000","59,600","60,000","59,100",11.26M,0.84%
61 +20200112,"59,500","59,500","59,500","59,500",-,0.00%
62 +20200110,"59,500","58,800","59,700","58,300",15.83M,1.54%
63 +20200109,"58,600","58,400","58,600","57,400",23.23M,3.17%
64 +20200108,"56,800","56,200","57,400","55,900",23.33M,1.79%
65 +20200107,"55,800","55,700","56,400","55,600",9.89M,0.54%
66 +20200106,"55,500","54,900","55,600","54,600",10.24M,0.00%
67 +20200105,"55,500","55,500","55,500","55,500",-,0.00%
68 +20200103,"55,500","56,000","56,600","54,900",15.31M,0.54%
69 +20200102,"55,200","55,500","56,000","55,000",12.76M,-1.08%
70 +20191230,"55,800","56,200","56,600","55,700",8.35M,-1.24%
71 +20191229,"56,500","56,500","56,500","56,500",-,0.00%
72 +20191227,"56,500","55,700","56,900","55,500",12.29M,1.99%
73 +20191226,"55,400","54,700","55,400","54,400",9.57M,0.73%
74 +20191225,"55,000","55,000","55,000","55,000",-,0.00%
75 +20191224,"55,000","55,600","55,700","54,800",9.19M,-0.90%
76 +20191223,"55,500","56,100","56,400","55,100",9.49M,-0.89%
77 +20191222,"56,000","56,000","56,000","56,000",-,0.00%
78 +20191220,"56,000","56,100","56,500","55,600",12.07M,0.00%
79 +20191219,"56,000","57,000","57,300","55,500",13.95M,-0.53%
80 +20191218,"56,300","56,700","57,200","56,000",15.49M,-0.71%
81 +20191217,"56,700","55,800","56,700","55,400",18.75M,3.66%
82 +20191216,"54,700","54,500","54,900","54,300",11.10M,0.00%
83 +20191215,"54,700","54,700","54,700","54,700",-,0.00%
84 +20191213,"54,700","54,500","54,800","53,900",17.59M,2.63%
85 +20191212,"53,300","53,000","53,300","52,700",28.45M,2.70%
86 +20191211,"51,900","51,500","52,200","51,400",11.00M,0.78%
87 +20191210,"51,500","51,000","51,600","50,700",6.87M,0.59%
88 +20191209,"51,200","50,900","51,400","50,700",8.27M,1.59%
89 +20191208,"50,400","50,400","50,400","50,400",-,0.00%
90 +20191206,"50,400","50,100","50,900","49,950",10.87M,1.82%
91 +20191205,"49,500","50,200","50,400","49,500",10.02M,0.10%
92 +20191204,"49,450","49,600","49,850","49,000",12.96M,-0.90%
93 +20191203,"49,900","49,800","50,300","49,500",12.00M,-0.99%
94 +20191202,"50,400","50,900","51,300","50,400",9.59M,0.20%
95 +20191201,"50,300","50,300","50,300","50,300",-,0.00%
96 +20191129,"50,300","51,200","51,400","50,200",11.01M,-1.95%
97 +20191128,"51,300","51,900","52,100","51,300",6.15M,-1.72%
98 +20191127,"52,200","51,800","52,300","51,600",7.19M,0.77%
99 +20191126,"51,800","51,900","52,900","51,800",26.04M,0.00%
100 +20191125,"51,800","52,200","52,600","51,700",9.01M,0.39%
101 +20191122,"51,600","51,000","51,600","50,900",8.33M,1.18%
102 +20191121,"51,000","51,600","52,100","50,600",14.25M,-1.92%
103 +20191120,"52,000","53,400","53,400","52,000",11.83M,-2.80%
104 +20191119,"53,500","53,200","53,500","52,700",8.10M,0.00%
105 +20191118,"53,500","53,600","53,800","53,200",7.65M,-0.37%
106 +20191117,"53,700","53,700","53,700","53,700",-,0.00%
107 +20191115,"53,700","52,900","53,700","52,600",9.68M,1.70%
108 +20191114,"52,800","51,900","52,800","51,900",12.00M,0.57%
109 +20191113,"52,500","52,500","52,500","52,000",6.22M,-0.19%
110 +20191112,"52,600","51,800","52,600","51,600",6.10M,1.94%
111 +20191111,"51,600","52,200","52,200","51,400",8.18M,-0.96%
112 +20191110,"52,100","52,100","52,100","52,100",-,0.00%
113 +20191108,"52,100","53,200","53,300","52,000",10.39M,-1.51%
114 +20191107,"52,900","53,400","53,400","52,400",9.14M,-0.75%
115 +20191106,"53,300","52,900","53,500","52,700",13.12M,1.14%
116 +20191105,"52,700","52,400","52,700","52,100",10.12M,0.76%
117 +20191104,"52,300","51,700","52,300","51,400",12.76M,2.15%
118 +20191103,"51,200","51,200","51,200","51,200",-,0.00%
119 +20191101,"51,200","50,600","51,200","50,400",7.54M,1.59%
120 +20191031,"50,400","51,000","51,400","50,300",10.21M,0.00%
121 +20191030,"50,400","50,700","50,800","50,200",8.87M,-1.37%
122 +20191029,"51,100","51,400","51,700","50,800",7.14M,-0.39%
123 +20191028,"51,300","50,700","51,500","50,700",6.34M,0.79%
124 +20191027,"50,900","50,900","50,900","50,900",-,0.00%
125 +20191025,"50,900","50,800","51,200","50,500",7.84M,0.39%
126 +20191024,"50,700","52,500","52,500","50,500",10.77M,-0.98%
127 +20191023,"51,200","51,300","51,500","50,800",8.46M,0.00%
128 +20191022,"51,200","50,800","51,500","50,700",10.71M,1.79%
129 +20191021,"50,300","49,900","50,400","49,800",4.39M,0.80%
130 +20191020,"49,900","49,900","49,900","49,900",-,0.00%
131 +20191018,"49,900","50,300","50,900","49,650",8.40M,-1.19%
132 +20191017,"50,500","50,500","50,600","50,100",6.61M,-0.39%
133 +20191016,"50,700","50,700","50,900","50,400",9.01M,1.20%
134 +20191015,"50,100","49,900","50,200","49,900",5.90M,0.20%
135 +20191014,"50,000","50,000","50,300","49,850",10.87M,1.73%
136 +20191013,"49,150","49,150","49,150","49,150",-,0.00%
137 +20191011,"49,150","49,000","49,450","48,800",7.73M,1.24%
138 +20191010,"48,550","48,200","49,200","48,000",17.84M,-0.72%
139 +20191009,"48,900","48,900","48,900","48,900",-,0.00%
140 +20191008,"48,900","47,900","49,000","47,600",13.29M,2.41%
141 +20191007,"47,750","48,350","48,700","47,650",6.47M,-0.52%
142 +20191006,"48,000","48,000","48,000","48,000",-,0.00%
143 +20191004,"48,000","47,400","48,650","47,350",8.46M,0.84%
144 +20191003,"47,600","47,600","47,600","47,600",-,0.00%
145 +20191002,"47,600","48,350","48,400","47,600",8.37M,-2.56%
146 +20191001,"48,850","48,900","49,100","48,650",6.20M,-0.41%
147 +20190930,"49,050","48,050","49,250","47,900",9.20M,1.34%
148 +20190929,"48,400","48,400","48,400","48,400",-,0.00%
149 +20190927,"48,400","48,000","48,700","48,000",8.03M,-1.63%
150 +20190926,"49,200","49,000","49,250","48,900",8.36M,0.61%
151 +20190925,"48,900","49,200","49,350","48,800",9.11M,-1.21%
152 +20190924,"49,500","49,050","49,650","48,850",7.84M,0.41%
153 +20190923,"49,300","49,250","49,300","49,000",7.40M,0.20%
154 +20190922,"49,200","49,200","49,200","49,200",-,0.00%
155 +20190920,"49,200","49,400","49,600","49,100",14.91M,0.10%
156 +20190919,"49,150","48,050","49,200","47,850",15.19M,3.04%
157 +20190918,"47,700","46,900","47,700","46,800",9.82M,1.71%
158 +20190917,"46,900","47,000","47,100","46,800",5.99M,-0.42%
159 +20190916,"47,100","47,000","47,100","46,400",11.99M,-0.11%
160 +20190915,"47,150","47,150","47,150","47,150",-,0.00%
161 +20190911,"47,150","47,300","47,400","46,800",16.12M,0.32%
162 +20190910,"47,000","47,100","47,200","46,550",9.16M,0.21%
163 +20190909,"46,900","46,450","47,000","46,300",9.17M,1.30%
164 +20190908,"46,300","46,300","46,300","46,300",-,0.00%
165 +20190906,"46,300","46,500","46,500","45,850",9.70M,1.31%
166 +20190905,"45,700","44,800","46,100","44,450",17.82M,3.63%
167 +20190904,"44,100","43,250","44,100","43,150",11.48M,1.97%
168 +20190903,"43,250","43,550","43,650","43,100",8.52M,-1.26%
169 +20190902,"43,800","44,850","44,850","43,650",7.11M,-0.45%
170 +20190901,"44,000","44,000","44,000","44,000",-,0.00%
171 +20190830,"44,000","43,750","44,300","43,750",8.78M,1.38%
172 +20190829,"43,400","44,200","44,200","43,050",10.13M,-1.70%
173 +20190828,"44,150","44,100","44,400","43,750",5.75M,0.23%
174 +20190827,"44,050","43,650","44,200","43,600",16.31M,1.03%
175 +20190826,"43,600","43,050","43,800","42,950",7.94M,-0.80%
176 +20190825,"43,950","43,950","43,950","43,950",-,0.00%
177 +20190823,"43,950","43,800","44,200","43,650",4.64M,-0.23%
178 +20190822,"44,050","44,500","44,700","43,850",8.05M,-1.01%
179 +20190821,"44,500","44,350","44,800","44,150",6.52M,0.11%
180 +20190820,"44,450","43,950","44,600","43,550",8.41M,1.95%
181 +20190819,"43,600","44,350","44,350","43,500",5.97M,-0.68%
182 +20190818,"43,900","43,900","43,900","43,900",-,0.00%
183 +20190816,"43,900","43,800","43,900","43,300",9.45M,0.46%
184 +20190815,"43,700","43,700","43,700","43,700",-,0.00%
185 +20190814,"43,700","43,900","44,250","43,500",8.50M,1.63%
186 +20190813,"43,000","43,500","43,500","42,950",6.96M,-1.60%
187 +20190812,"43,700","44,000","44,000","43,550",7.27M,1.27%
188 +20190811,"43,150","43,150","43,150","43,150",-,0.00%
189 +20190809,"43,150","43,250","43,350","43,050",9.08M,1.17%
190 +20190808,"42,650","43,250","43,500","42,650",16.00M,-1.27%
191 +20190807,"43,200","43,600","43,900","43,100",9.99M,-0.69%
192 +20190806,"43,500","42,500","43,800","42,500",15.06M,-1.02%
193 +20190805,"43,950","44,350","44,600","43,600",13.32M,-2.22%
194 +20190804,"44,950","44,950","44,950","44,950",-,0.00%
195 +20190802,"44,950","44,550","45,500","44,300",11.82M,-0.55%
196 +20190801,"45,200","44,900","45,500","44,850",7.74M,-0.33%
197 +20190731,"45,350","46,200","46,600","45,000",12.82M,-2.58%
198 +20190730,"46,550","46,300","46,850","46,300",5.51M,0.98%
199 +20190729,"46,100","46,800","47,050","46,000",6.85M,-2.23%
200 +20190728,"47,150","47,150","47,150","47,150",-,0.00%
201 +20190726,"47,150","46,650","47,150","46,550",7.67M,-0.11%
202 +20190725,"47,200","47,150","47,200","46,600",8.38M,1.72%
203 +20190724,"46,400","47,100","47,150","46,250",8.34M,-1.90%
204 +20190723,"47,300","47,350","47,550","47,050",8.90M,0.21%
205 +20190722,"47,200","46,800","47,300","46,600",9.01M,0.85%
206 +20190721,"46,800","46,800","46,800","46,800",-,0.00%
207 +20190719,"46,800","46,650","46,950","46,600",7.98M,1.52%
208 +20190718,"46,100","46,450","46,450","45,650",4.95M,0.11%
209 +20190717,"46,050","46,150","46,350","45,950",5.21M,-1.71%
210 +20190716,"46,850","46,450","46,850","46,300",7.21M,0.86%
211 +20190715,"46,450","45,950","46,650","45,750",4.71M,0.32%
212 +20190714,"46,300","46,300","46,300","46,300",-,0.00%
213 +20190712,"46,300","46,350","46,400","45,800",5.12M,0.22%
214 +20190711,"46,200","46,350","46,550","46,150",10.85M,1.43%
215 +20190710,"45,550","45,550","46,150","45,500",9.17M,1.00%
216 +20190709,"45,100","44,850","45,450","44,700",7.63M,1.58%
217 +20190708,"44,400","44,750","44,800","44,350",7.81M,-2.74%
218 +20190705,"45,650","45,950","45,950","45,250",7.22M,-0.76%
219 +20190704,"46,000","45,250","46,200","45,250",6.36M,1.32%
220 +20190703,"45,400","45,750","46,350","45,200",9.66M,-1.84%
221 +20190702,"46,250","46,200","46,900","45,850",8.43M,-0.75%
222 +20190701,"46,600","47,350","47,400","46,250",11.03M,-0.85%
223 +20190630,"47,000","47,000","47,000","47,000",-,0.00%
224 +20190628,"47,000","47,000","47,000","46,700",12.25M,1.08%
225 +20190627,"46,500","46,000","46,600","45,750",12.58M,1.75%
226 +20190626,"45,700","45,800","46,000","45,600",9.02M,0.22%
227 +20190625,"45,600","45,200","45,800","45,200",6.87M,0.22%
228 +20190624,"45,500","45,200","45,800","45,200",6.07M,-0.44%
229 +20190623,"45,700","45,700","45,700","45,700",-,0.00%
230 +20190621,"45,700","45,750","45,800","45,200",9.32M,0.44%
231 +20190620,"45,500","44,850","45,500","44,850",6.76M,0.33%
232 +20190619,"45,350","45,450","45,450","45,000",9.36M,2.25%
233 +20190618,"44,350","43,750","44,500","43,650",7.98M,1.03%
234 +20190617,"43,900","43,750","44,050","43,400",7.36M,-0.23%
235 +20190616,"44,000","44,000","44,000","44,000",-,0.00%
236 +20190614,"44,000","43,750","44,150","43,300",8.31M,0.57%
237 +20190613,"43,750","44,200","44,400","43,400",16.89M,-1.91%
238 +20190612,"44,600","44,800","45,050","44,300",8.55M,-0.56%
239 +20190611,"44,850","44,800","45,000","44,550",6.63M,0.11%
240 +20190610,"44,800","44,300","44,850","44,050",8.62M,1.36%
241 +20190607,"44,200","43,600","44,350","43,450",11.62M,0.68%
242 +20190605,"43,900","44,050","44,200","43,700",10.82M,1.04%
243 +20190604,"43,450","43,400","43,700","43,000",9.87M,-0.80%
244 +20190603,"43,800","42,950","43,900","42,500",15.08M,3.06%
245 +20190531,"42,500","42,600","42,800","42,150",10.21M,-0.12%
246 +20190530,"42,550","42,200","42,700","42,150",9.98M,1.79%
247 +20190529,"41,800","41,850","42,100","41,300",14.52M,-1.76%
248 +20190528,"42,550","42,550","42,950","42,150",22.59M,-0.23%
249 +20190527,"42,650","42,500","43,000","42,350",7.44M,-0.12%
250 +20190524,"42,700","43,800","43,800","42,400",13.34M,-2.62%
251 +20190523,"43,850","43,900","44,000","43,250",12.19M,0.80%
252 +20190522,"43,500","43,700","43,800","42,400",10.70M,0.81%
253 +20190521,"43,150","42,600","43,950","42,350",18.69M,2.74%
254 +20190520,"42,000","41,650","42,100","41,550",13.11M,1.94%
255 +20190517,"41,200","41,950","42,050","40,850",12.29M,-0.84%
256 +20190516,"41,550","42,350","42,400","41,350",13.62M,-2.35%
257 +20190515,"42,550","42,700","43,050","42,550",7.66M,-0.23%
258 +20190514,"42,650","41,300","43,100","41,300",11.54M,0.00%
259 +20190513,"42,650","42,500","43,200","42,350",7.62M,-0.58%
260 +20190510,"42,900","42,600","43,450","42,450",13.51M,1.06%
261 +20190509,"42,450","43,900","44,250","42,450",22.04M,-4.07%
262 +20190508,"44,250","44,300","44,850","44,200",10.04M,-1.34%
263 +20190507,"44,850","45,250","45,300","44,400",11.96M,-0.99%
264 +20190503,"45,300","45,900","46,050","45,300",6.54M,-1.31%
265 +20190502,"45,900","45,500","46,150","45,400",8.62M,0.11%
266 +20190430,"45,850","46,000","46,300","45,350",10.16M,-0.65%
267 +20190429,"46,150","45,150","46,150","45,100",8.48M,2.90%
268 +20190426,"44,850","44,200","45,000","43,800",9.37M,0.45%
269 +20190425,"44,650","44,250","45,000","44,100",9.52M,-0.22%
270 +20190424,"44,750","45,400","45,650","44,150",12.18M,-1.00%
271 +20190423,"45,200","45,050","45,500","45,000",6.88M,-0.33%
272 +20190422,"45,350","45,400","45,900","45,100",5.90M,0.11%
273 +20190419,"45,300","45,750","46,000","45,250",8.35M,-0.66%
274 +20190418,"45,600","47,200","47,250","45,500",10.83M,-3.08%
275 +20190417,"47,050","47,300","47,600","47,000",5.47M,-0.42%
276 +20190416,"47,250","47,400","47,400","46,800",7.62M,0.43%
277 +20190415,"47,050","47,150","47,500","47,000",8.72M,0.43%
278 +20190412,"46,850","46,050","46,900","46,000",7.65M,1.30%
279 +20190411,"46,250","46,700","46,800","46,150",13.62M,-0.96%
280 +20190410,"46,700","46,400","46,700","46,050",10.35M,0.11%
281 +20190409,"46,650","46,700","46,950","46,200",6.88M,0.00%
282 +20190408,"46,650","47,250","47,250","46,150",8.49M,-0.43%
283 +20190405,"46,850","46,950","47,550","46,600",8.53M,-0.21%
284 +20190404,"46,950","46,150","47,100","46,150",11.44M,0.75%
285 +20190403,"46,600","46,750","46,750","45,800",12.00M,1.86%
286 +20190402,"45,750","45,550","46,100","45,350",8.90M,1.55%
287 +20190401,"45,050","45,200","45,450","44,850",6.79M,0.90%
288 +20190329,"44,650","44,500","44,900","44,200",11.00M,-0.45%
289 +20190328,"44,850","44,950","45,200","44,300",6.81M,-1.10%
290 +20190327,"45,350","44,750","45,600","44,250",9.32M,0.22%
291 +20190326,"45,250","45,500","45,700","44,900",9.65M,-0.55%
292 +20190325,"45,500","45,300","45,650","44,800",8.68M,-2.26%
293 +20190322,"46,550","46,850","47,000","46,250",12.49M,1.53%
294 +20190321,"45,850","44,600","46,250","44,050",21.03M,4.09%
295 +20190320,"44,050","43,800","44,200","43,100",9.83M,0.34%
296 +20190319,"43,900","43,750","43,900","43,550",7.60M,0.46%
297 +20190318,"43,700","43,950","44,150","43,450",8.16M,-1.13%
298 +20190315,"44,200","43,800","44,250","43,700",14.61M,0.80%
299 +20190314,"43,850","43,700","44,300","43,550",18.02M,0.00%
300 +20190313,"43,850","44,250","44,450","43,700",8.10M,-1.79%
301 +20190312,"44,650","44,300","44,950","44,150",10.83M,2.29%
302 +20190311,"43,650","44,400","44,450","43,650",8.51M,-0.34%
303 +20190308,"43,800","44,450","44,800","43,800",7.70M,-1.46%
304 +20190307,"44,450","43,400","44,950","43,400",10.99M,1.02%
305 +20190306,"44,000","44,000","44,300","43,700",9.95M,-0.56%
306 +20190305,"44,250","44,600","45,100","44,150",10.50M,-1.34%
307 +20190304,"44,850","46,000","46,100","44,800",12.67M,-0.55%
308 +20190228,"45,100","46,400","46,500","45,100",23.50M,-3.53%
309 +20190227,"46,750","47,000","47,250","46,750",7.69M,0.00%
310 +20190226,"46,750","47,350","47,450","46,500",7.98M,-1.27%
311 +20190225,"47,350","47,400","47,550","47,050",7.46M,0.42%
312 +20190222,"47,150","46,500","47,150","46,450",6.67M,0.43%
313 +20190221,"46,950","46,500","47,200","46,200",8.45M,0.11%
314 +20190220,"46,900","46,750","47,100","46,500",11.22M,2.07%
315 +20190219,"45,950","45,850","46,150","45,450",6.73M,-0.54%
316 +20190218,"46,200","46,500","46,850","45,850",8.17M,0.33%
317 +20190215,"46,050","46,750","46,850","45,650",10.54M,-3.05%
318 +20190214,"47,500","46,600","47,500","46,150",17.12M,2.81%
319 +20190213,"46,200","46,400","46,700","46,000",11.29M,0.33%
320 +20190212,"46,050","44,650","46,250","44,650",12.99M,2.33%
321 +20190211,"45,000","44,500","45,000","44,250",10.23M,0.45%
322 +20190208,"44,800","45,700","45,700","44,650",12.49M,-3.03%
323 +20190207,"46,200","46,800","47,100","46,200",15.64M,-0.32%
324 +20190201,"46,350","46,650","46,950","46,250",13.63M,0.43%
325 +20190131,"46,150","46,650","47,050","46,150",21.47M,-0.54%
326 +20190130,"46,400","44,800","46,400","44,800",17.06M,1.98%
327 +20190129,"45,500","45,050","45,500","44,350",15.05M,1.00%
328 +20190128,"45,050","45,000","45,500","44,600",17.60M,0.67%
329 +20190125,"44,750","44,300","44,750","43,750",21.94M,3.95%
330 +20190124,"43,050","43,050","43,100","42,350",14.17M,2.50%
331 +20190123,"42,000","41,350","42,250","41,350",10.38M,-0.36%
332 +20190122,"42,150","42,750","42,850","41,850",9.91M,-1.40%
333 +20190121,"42,750","42,700","42,750","41,900",11.34M,1.06%
334 +20190118,"42,300","42,000","42,400","41,950",9.73M,0.83%
335 +20190117,"41,950","41,700","42,100","41,450",11.57M,1.21%
336 +20190116,"41,450","41,150","41,450","40,700",8.48M,0.85%
337 +20190115,"41,100","40,050","41,100","39,850",11.22M,2.62%
338 +20190114,"40,050","40,450","40,700","39,850",11.98M,-1.11%
339 +20190111,"40,500","40,350","40,550","39,950",11.23M,1.76%
340 +20190110,"39,800","40,000","40,150","39,600",14.33M,0.51%
341 +20190109,"39,600","38,650","39,600","38,300",16.96M,3.94%
342 +20190108,"38,100","38,000","39,200","37,950",12.64M,-1.68%
343 +20190107,"38,750","38,000","38,900","37,800",11.94M,3.47%
344 +20190104,"37,450","37,450","37,600","36,850",13.30M,-0.40%
345 +20190103,"37,600","38,300","38,550","37,450",12.44M,-2.97%
346 +20190102,"38,750","39,400","39,400","38,550",7.79M,0.13%
347 +20181228,"38,700","38,250","38,900","38,200",9.72M,1.18%
348 +20181227,"38,250","38,700","38,800","38,100",10.46M,-0.26%
349 +20181226,"38,350","38,400","38,750","38,300",12.61M,-1.16%
350 +20181224,"38,800","38,500","39,050","38,300",9.72M,0.39%
351 +20181221,"38,650","38,200","38,650","38,100",14.04M,0.00%
352 +20181220,"38,650","38,600","39,100","38,500",10.56M,-1.15%
353 +20181219,"39,100","38,900","39,350","38,850",9.51M,0.51%
354 +20181218,"38,900","38,300","39,200","38,300",10.79M,-0.64%
355 +20181217,"39,150","38,650","39,600","38,650",11.44M,0.51%
356 +20181214,"38,950","40,200","40,200","38,700",18.81M,-2.63%
357 +20181213,"40,000","40,650","40,750","40,000",22.70M,-1.11%
358 +20181212,"40,450","40,250","40,700","40,150",10.61M,0.50%
359 +20181211,"40,250","40,600","40,700","40,200",10.60M,0.12%
360 +20181210,"40,200","40,450","40,650","40,000",13.29M,-1.83%
361 +20181207,"40,950","40,900","41,400","40,850",11.41M,1.11%
362 +20181206,"40,500","40,600","41,100","40,450",14.06M,-2.29%
363 +20181205,"41,450","40,900","41,750","40,850",12.03M,-1.66%
364 +20181204,"42,150","42,650","42,900","41,900",13.97M,-2.54%
365 +20181203,"43,250","42,750","43,400","42,400",11.62M,3.35%
366 +20181130,"41,850","43,450","44,000","41,750",19.08M,-3.01%
367 +20181129,"43,150","43,850","43,850","42,900",8.44M,0.00%
368 +20181128,"43,150","42,800","43,200","42,750",6.86M,0.23%
369 +20181127,"43,050","42,900","43,100","42,500",8.15M,1.06%
370 +20181126,"42,600","42,150","42,800","42,100",5.96M,0.71%
371 +20181125,"42,300","42,250","42,350","42,250",-,-0.24%
372 +20181123,"42,400","42,450","42,600","41,900",5.18M,-0.12%
373 +20181122,"42,450","42,000","42,650","42,000",5.95M,0.83%
374 +20181121,"42,100","41,800","42,300","41,800",10.68M,-1.64%
375 +20181120,"42,800","42,450","43,000","42,100",9.32M,-1.95%
376 +20181119,"43,650","44,050","44,250","43,450",7.50M,-0.34%
377 +20181118,"43,800","44,000","44,000","43,800",-,-0.45%
378 +20181116,"44,000","44,600","44,750","43,700",7.75M,-0.56%
379 +20181115,"44,250","44,050","44,350","43,500",5.85M,0.34%
380 +20181114,"44,100","44,500","44,500","43,800",6.56M,-0.90%
381 +20181113,"44,500","43,900","44,500","43,400",9.02M,-1.55%
382 +20181112,"45,200","43,850","45,250","43,700",8.49M,3.20%
383 +20181111,"43,800","43,900","43,900","43,800",-,-1.13%
384 +20181109,"44,300","44,450","44,850","43,900",7.19M,0.57%
385 +20181108,"44,050","44,900","45,050","44,050",12.32M,0.11%
386 +20181107,"44,000","43,600","44,500","43,400",11.54M,0.57%
387 +20181106,"43,750","43,750","43,800","42,950",7.36M,-0.11%
388 +20181105,"43,800","43,750","43,800","42,900",9.18M,0.23%
389 +20181104,"43,700","43,800","43,800","43,700",-,-1.02%
390 +20181102,"44,150","43,050","44,250","42,800",16.50M,4.74%
391 +20181101,"42,150","42,450","42,950","42,150",12.92M,-0.59%
392 +20181031,"42,400","42,900","43,350","41,700",16.99M,0.12%
393 +20181030,"42,350","41,400","43,000","41,000",14.19M,2.29%
394 +20181029,"41,400","40,850","41,950","40,550",13.03M,0.98%
395 +20181026,"41,000","41,100","41,300","40,400",14.00M,0.00%
396 +20181025,"41,000","40,600","41,550","40,550",18.58M,-3.64%
397 +20181024,"42,550","43,050","43,100","42,250",13.49M,-1.16%
398 +20181023,"43,050","43,300","43,450","42,550",9.44M,-1.15%
399 +20181022,"43,550","43,450","43,950","43,200",7.79M,0.23%
400 +20181021,"43,450","43,400","43,450","43,400",-,-1.03%
401 +20181019,"43,900","43,900","44,150","43,450",7.78M,-0.34%
402 +20181018,"44,050","43,950","44,450","43,700",8.09M,-0.23%
403 +20181017,"44,150","44,150","44,500","44,000",8.30M,1.26%
404 +20181016,"43,600","43,700","44,150","43,350",6.80M,-0.46%
405 +20181015,"43,800","44,050","44,050","43,350",7.19M,-0.45%
406 +20181014,"44,000","44,000","44,000","44,000",-,0.00%
407 +20181012,"44,000","43,200","44,650","43,200",12.37M,2.09%
408 +20181011,"43,100","44,000","44,650","43,100",18.87M,-4.86%
409 +20181010,"45,300","45,250","45,500","44,500",9.75M,0.78%
410 +20181009,"44,950","44,950","44,950","44,950",-,0.00%
411 +20181008,"44,950","44,200","45,200","44,200",6.57M,0.56%
412 +20181007,"44,700","44,700","44,700","44,700",-,0.00%
413 +20181005,"44,700","44,800","45,500","44,550",10.51M,0.00%
414 +20181004,"44,700","45,150","45,600","44,700",11.34M,-2.19%
415 +20181003,"45,700","45,700","45,700","45,700",-,0.00%
416 +20181002,"45,700","46,450","46,700","45,700",6.34M,-1.40%
417 +20181001,"46,350","46,450","46,800","45,800",6.31M,-0.22%
418 +20180930,"46,450","46,450","46,450","46,450",-,0.00%
419 +20180928,"46,450","47,250","47,250","46,300",10.81M,-2.21%
420 +20180927,"47,500","46,950","47,500","46,450",15.26M,0.21%
421 +20180926,"47,400","47,400","47,400","47,400",-,0.00%
422 +20180921,"47,400","46,550","47,550","46,550",14.39M,0.32%
423 +20180920,"47,250","46,850","47,600","46,400",13.40M,2.38%
424 +20180919,"46,150","46,000","46,200","45,700",9.26M,1.43%
425 +20180918,"45,500","44,950","45,900","44,700",8.97M,0.78%
426 +20180917,"45,150","45,550","45,800","44,900",8.10M,-1.53%
427 +20180916,"45,850","45,850","45,850","45,850",-,0.00%
428 +20180914,"45,850","45,000","45,850","44,900",12.03M,4.09%
429 +20180913,"44,050","44,550","44,750","44,000",17.93M,-1.12%
430 +20180912,"44,550","44,900","45,100","44,500",11.17M,-1.11%
431 +20180911,"45,050","45,550","45,900","45,050",7.81M,-0.99%
432 +20180910,"45,500","45,450","45,550","45,000",7.87M,1.34%
433 +20180909,"44,900","44,900","44,900","44,900",-,0.00%
434 +20180907,"44,900","44,500","45,200","44,400",17.73M,-2.60%
435 +20180906,"46,100","46,200","46,400","45,800",8.57M,-1.07%
436 +20180905,"46,600","47,300","47,450","46,400",9.10M,-2.20%
437 +20180904,"47,650","47,550","47,800","47,200",6.61M,0.42%
438 +20180903,"47,450","48,200","48,300","47,300",7.91M,-2.06%
439 +20180902,"48,450","48,450","48,450","48,450",-,0.00%
440 +20180831,"48,450","47,100","48,450","47,000",13.51M,1.68%
441 +20180830,"47,650","46,950","47,950","46,700",12.16M,1.82%
442 +20180829,"46,800","46,750","46,800","46,400",5.50M,0.54%
443 +20180828,"46,550","46,800","46,950","46,300",7.23M,0.54%
444 +20180827,"46,300","46,100","46,550","46,000",5.11M,0.33%
445 +20180826,"46,150","46,150","46,150","46,150",-,0.00%
446 +20180824,"46,150","45,900","46,400","45,550",6.44M,-0.11%
447 +20180823,"46,200","46,150","46,200","45,700",6.60M,0.22%
448 +20180822,"46,100","45,150","46,200","44,900",11.63M,2.90%
449 +20180821,"44,800","43,700","44,900","43,700",9.47M,2.17%
450 +20180820,"43,850","43,500","44,200","43,500",7.20M,-0.57%
451 +20180819,"44,100","44,100","44,100","44,100",-,0.00%
452 +20180817,"44,100","44,050","44,400","44,050",6.54M,-0.34%
453 +20180816,"44,250","43,800","44,650","43,700",10.07M,-1.99%
454 +20180815,"45,150","45,150","45,150","45,150",-,0.00%
455 +20180814,"45,150","44,850","45,400","44,850",6.37M,0.22%
456 +20180813,"45,050","44,950","45,100","44,650",9.78M,-0.77%
457 +20180812,"45,400","45,400","45,400","45,400",-,0.00%
458 +20180810,"45,400","46,150","46,400","44,850",16.53M,-3.20%
459 +20180809,"46,900","47,000","47,050","46,450",11.40M,0.21%
460 +20180808,"46,800","47,000","47,000","46,550",6.37M,0.21%
461 +20180807,"46,700","46,300","46,750","45,900",9.02M,1.97%
462 +20180806,"45,800","46,150","46,150","45,750",6.70M,0.11%
463 +20180805,"45,750","45,750","45,750","45,750",-,0.00%
464 +20180803,"45,750","45,850","45,900","45,450",7.44M,0.44%
465 +20180802,"45,550","46,550","46,800","45,500",7.93M,-2.15%
466 +20180801,"46,550","46,050","46,850","46,050",7.09M,0.65%
467 +20180731,"46,250","46,200","46,450","46,000",7.59M,-0.54%
468 +20180730,"46,500","46,550","46,800","46,350",5.52M,-0.85%
469 +20180729,"46,900","46,900","46,900","46,900",-,0.00%
470 +20180727,"46,900","46,450","47,000","46,450",4.75M,0.00%
471 +20180726,"46,900","46,100","47,000","46,000",7.34M,1.63%
472 +20180725,"46,150","46,250","46,550","45,900",7.21M,0.00%
473 +20180724,"46,150","46,350","46,600","45,950",7.92M,-0.75%
474 +20180723,"46,500","47,100","47,200","46,150",10.81M,-2.00%
475 +20180722,"47,450","47,450","47,450","47,450",-,0.00%
476 +20180720,"47,450","47,000","47,600","46,700",10.23M,1.17%
477 +20180719,"46,900","47,050","47,200","46,600",9.70M,0.75%
478 +20180718,"46,550","46,700","47,200","46,450",10.88M,1.53%
479 +20180717,"45,850","46,150","46,200","45,600",8.76M,-0.43%
480 +20180716,"46,050","46,800","46,800","46,000",7.54M,-0.97%
481 +20180715,"46,500","46,500","46,500","46,500",-,0.00%
482 +20180713,"46,500","45,800","46,500","45,750",10.96M,2.20%
483 +20180712,"45,500","45,900","46,250","45,450",11.78M,-1.09%
484 +20180711,"46,000","46,400","46,450","45,400",10.82M,-0.65%
485 +20180710,"46,300","46,200","46,550","46,100",9.27M,1.54%
486 +20180709,"45,600","45,500","46,100","45,200",11.59M,1.56%
487 +20180708,"44,900","44,900","44,900","44,900",-,0.00%
488 +20180706,"44,900","45,500","45,850","44,650",17.78M,-2.29%
489 +20180705,"45,950","46,100","46,550","45,600",6.91M,-0.65%
490 +20180704,"46,250","46,700","47,050","46,050",8.13M,0.22%
491 +20180703,"46,150","45,750","46,450","45,750",10.43M,1.32%
492 +20180702,"45,550","46,500","47,150","45,500",12.75M,-2.36%
493 +20180629,"46,650","46,250","47,150","46,200",13.69M,-0.32%
494 +20180628,"46,800","46,850","47,150","46,600",12.08M,-2.40%
495 +20180627,"47,950","47,450","48,500","47,000",15.09M,2.02%
496 +20180626,"47,000","45,900","47,300","45,900",11.14M,0.75%
497 +20180625,"46,650","47,050","47,050","46,150",9.52M,-1.27%
498 +20180624,"47,250","47,250","47,250","47,250",-,0.00%
499 +20180622,"47,250","47,000","47,250","46,200",10.25M,0.43%
500 +20180621,"47,050","47,900","47,900","47,050",9.98M,0.11%
501 +20180620,"47,000","47,450","47,600","46,850",11.31M,0.00%
502 +20180619,"47,000","47,200","47,350","46,500",15.17M,0.86%
503 +20180618,"46,600","47,600","47,650","46,200",16.62M,-2.20%
504 +20180617,"47,650","47,650","47,650","47,650",-,0.00%
505 +20180615,"47,650","48,500","48,700","47,650",16.84M,-1.14%
506 +20180614,"48,200","49,000","49,000","48,200",19.00M,-2.43%
507 +20180613,"49,400","49,400","49,400","49,400",-,0.00%
508 +20180612,"49,400","49,700","49,800","49,250",11.43M,-1.00%
509 +20180611,"49,900","49,750","50,300","49,350",10.50M,0.50%
510 +20180610,"49,650","49,650","49,650","49,650",-,0.00%
511 +20180608,"49,650","50,200","50,400","49,600",16.58M,-1.88%
512 +20180607,"50,600","51,800","51,800","50,500",13.17M,-1.36%
513 +20180606,"51,300","51,300","51,300","51,300",-,0.00%
514 +20180605,"51,300","51,100","51,400","50,400",8.86M,0.39%
515 +20180604,"51,100","50,800","51,200","50,700",9.29M,-0.39%
516 +20180603,"51,300","51,300","51,300","51,300",-,0.00%
517 +20180601,"51,300","50,500","51,700","49,950",12.44M,1.18%
518 +20180531,"50,700","50,400","50,800","49,850",35.94M,2.42%
519 +20180530,"49,500","51,300","51,500","49,100",20.20M,-3.51%
520 +20180529,"51,300","52,200","52,500","51,300",8.43M,-1.91%
521 +20180528,"52,300","52,500","53,000","52,000",9.75M,-0.76%
522 +20180527,"52,700","52,700","52,700","52,700",-,0.00%
523 +20180525,"52,700","51,000","52,800","50,800",15.17M,2.53%
524 +20180524,"51,400","52,000","52,000","51,100",8.18M,-0.77%
525 +20180523,"51,800","50,600","52,000","50,400",17.00M,3.60%
526 +20180522,"50,000","50,000","50,000","50,000",-,0.00%
527 +20180521,"50,000","49,650","50,200","49,100",8.62M,1.01%
528 +20180518,"49,500","49,900","49,900","49,350",6.64M,0.20%
529 +20180517,"49,400","50,300","50,500","49,400",10.30M,-0.90%
530 +20180516,"49,850","49,200","50,200","49,150",15.24M,1.32%
531 +20180515,"49,200","50,200","50,400","49,100",17.01M,-1.80%
532 +20180514,"50,100","51,000","51,100","49,900",14.84M,-2.34%
533 +20180511,"51,300","52,000","52,200","51,200",10.14M,-0.58%
534 +20180510,"51,600","51,700","51,700","50,600",13.76M,1.38%
535 +20180509,"50,900","52,600","52,800","50,900",15.96M,-3.23%
536 +20180508,"52,600","52,600","53,200","51,900",22.96M,1.35%
537 +20180507,"51,900","51,900","51,900","51,900",-,0.00%
538 +20180504,"51,900","53,000","53,900","51,800",39.42M,-2.08%
539 +20180427,"53,000","53,380","53,640","52,440",28.28M,1.65%
540 +20180426,"52,140","50,420","52,160","50,400",17.92M,3.45%
541 +20180425,"50,400","49,220","50,500","49,220",16.55M,-0.12%
542 +20180424,"50,460","51,840","51,860","50,080",15.70M,-2.77%
543 +20180423,"51,900","51,000","52,080","51,000",8.85M,0.54%
544 +20180420,"51,620","51,800","52,260","51,420",11.65M,-2.20%
545 +20180419,"52,780","52,000","52,980","51,540",17.13M,2.76%
546 +20180418,"51,360","51,000","51,360","50,580",13.44M,2.76%
547 +20180417,"49,980","50,240","50,540","49,820",7.76M,-0.72%
548 +20180416,"50,340","50,320","50,600","49,860",7.69M,1.08%
549 +20180413,"49,800","49,600","50,180","49,400",10.26M,1.63%
550 +20180412,"49,000","49,440","49,440","48,880",12.45M,0.29%
551 +20180411,"48,860","49,900","49,900","48,600",10.03M,-0.04%
552 +20180410,"48,880","48,540","49,220","48,040",10.93M,-0.65%
553 +20180409,"49,200","48,260","49,440","48,200",9.88M,1.65%
554 +20180406,"48,400","48,000","48,580","47,400",10.95M,-0.70%
555 +20180405,"48,740","47,400","49,380","47,340",13.24M,3.88%
556 +20180404,"46,920","48,160","48,260","46,920",12.31M,-2.49%
557 +20180403,"48,120","47,880","48,140","47,280",12.69M,-0.87%
558 +20180402,"48,540","49,000","49,220","48,500",7.10M,-1.38%
559 +20180330,"49,220","49,080","49,900","49,080",7.75M,0.37%
560 +20180329,"49,040","48,700","49,560","48,320",9.69M,0.70%
561 +20180328,"48,700","49,100","49,100","48,340",15.06M,-2.56%
562 +20180327,"49,980","50,320","50,460","49,080",11.84M,-0.60%
563 +20180326,"50,280","49,420","50,280","49,040",10.04M,1.13%
564 +20180323,"49,720","50,340","50,720","49,600",14.66M,-3.98%
565 +20180322,"51,780","51,060","51,780","51,040",8.28M,1.41%
566 +20180321,"51,060","51,780","51,780","51,060",8.33M,-0.27%
567 +20180320,"51,200","50,700","51,200","50,100",8.08M,0.91%
568 +20180319,"50,740","50,620","51,340","50,440",8.21M,-0.78%
569 +20180316,"51,140","51,220","51,420","50,240",12.11M,-0.78%
570 +20180315,"51,540","52,000","52,020","51,020",8.64M,-0.43%
571 +20180314,"51,760","51,020","52,000","51,000",12.51M,0.19%
572 +20180313,"51,660","50,760","51,660","50,360",20.06M,3.86%
573 +20180312,"49,740","50,560","50,780","49,580",8.64M,0.00%
574 +20180309,"49,740","49,440","50,540","49,240",14.23M,1.10%
575 +20180308,"49,200","49,200","49,480","48,080",19.43M,1.19%
576 +20180307,"48,620","48,200","48,900","47,220",21.20M,3.40%
577 +20180306,"47,020","45,920","47,100","45,820",15.12M,4.03%
578 +20180305,"45,200","45,820","46,160","45,080",13.27M,-1.78%
579 +20180302,"46,020","46,580","46,800","46,000",13.11M,-2.21%
580 +20180228,"47,060","47,380","48,100","47,000",14.97M,-0.68%
581 +20180227,"47,380","48,360","48,380","47,380",9.41M,0.00%
582 +20180226,"47,380","47,280","47,560","47,080",8.20M,0.34%
583 +20180223,"47,220","46,760","47,800","46,760",12.09M,0.98%
584 +20180222,"46,760","47,260","47,260","46,760",8.67M,-1.10%
585 +20180221,"47,280","47,280","47,580","46,840",12.81M,-0.25%
586 +20180220,"47,400","48,040","48,160","47,220",10.10M,-2.03%
587 +20180219,"48,380","49,800","49,800","47,860",14.77M,-1.27%
588 +20180214,"49,000","48,080","49,100","47,940",16.88M,3.07%
589 +20180213,"47,540","46,200","48,060","46,200",18.75M,3.98%
590 +20180212,"45,720","45,100","46,320","45,040",15.55M,2.28%
591 +20180209,"44,700","44,440","45,180","44,420",17.40M,-2.83%
592 +20180208,"46,000","46,120","46,620","45,980",22.48M,0.44%
593 +20180207,"45,800","48,240","48,260","45,800",23.37M,-3.42%
594 +20180206,"47,420","46,600","47,920","46,580",18.25M,-1.04%
595 +20180205,"47,920","46,500","48,320","46,000",25.87M,0.46%
596 +20180202,"47,700","49,380","49,400","47,700",27.89M,-4.26%
597 +20180201,"49,820","50,620","50,960","49,720",25.79M,-0.16%
598 +20180131,"49,900","50,020","54,140","49,600",64.19M,0.20%
599 +20180130,"49,800","50,440","50,640","49,780",12.20M,-2.77%
600 +20180129,"51,220","51,200","51,480","50,900",11.64M,0.87%
601 +20180126,"50,780","50,500","50,780","49,840",10.34M,1.03%
602 +20180125,"50,260","49,220","50,360","49,160",10.99M,1.86%
603 +20180124,"49,340","48,860","49,700","48,560",9.17M,0.37%
604 +20180123,"49,160","48,660","49,160","48,300",12.86M,1.91%
605 +20180122,"48,240","48,640","48,680","47,960",12.31M,-2.19%
606 +20180119,"49,320","50,380","50,380","49,040",9.04M,-1.16%
607 +20180118,"49,900","50,020","50,640","49,820",14.56M,0.56%
608 +20180117,"49,620","50,020","50,020","49,060",10.57M,-0.76%
609 +20180116,"50,000","48,760","50,140","48,620",13.20M,3.01%
610 +20180115,"48,540","48,800","48,980","47,920",10.08M,0.71%
611 +20180112,"48,200","48,240","48,480","46,760",26.77M,-0.08%
612 +20180111,"48,240","48,200","49,260","48,020",23.95M,-1.23%
613 +20180110,"48,840","50,500","50,520","48,640",18.53M,-3.10%
614 +20180109,"50,400","51,460","51,720","49,980",17.83M,-3.11%
615 +20180108,"52,020","52,400","52,520","51,500",8.36M,-0.19%
616 +20180105,"52,120","51,300","52,120","51,200",9.33M,2.04%
617 +20180104,"51,080","52,120","52,180","50,640",11.64M,-1.05%
618 +20180103,"51,620","52,540","52,560","51,420",9.92M,1.18%
619 +20180102,"51,020","51,380","51,400","50,780",8.46M,0.12%
620 +20171228,"50,960","49,560","50,960","49,500",8.88M,3.24%
621 +20171227,"49,360","48,960","49,560","48,460",10.71M,2.41%
622 +20171226,"48,200","49,760","50,100","48,200",15.91M,-3.02%
623 +20171222,"49,700","49,400","49,960","49,240",11.17M,1.14%
624 +20171221,"49,140","51,000","51,060","49,100",15.31M,-3.42%
625 +20171220,"50,880","51,500","51,760","50,820",10.07M,-1.32%
626 +20171219,"51,560","51,540","52,080","51,520",10.51M,0.70%
627 +20171218,"51,200","50,620","51,240","50,620",7.19M,1.15%
628 +20171215,"50,620","51,240","51,480","50,520",14.85M,-0.86%
629 +20171214,"51,060","51,320","52,280","51,060",19.98M,-0.51%
630 +20171213,"51,320","52,100","52,100","51,100",11.08M,-1.50%
631 +20171212,"52,100","51,820","52,100","51,660",8.05M,0.62%
632 +20171211,"51,780","52,000","52,040","51,500",8.00M,-0.42%
633 +20171208,"52,000","51,360","52,000","51,040",10.66M,2.48%
634 +20171207,"50,740","50,040","50,980","50,020",11.03M,1.44%
635 +20171206,"50,020","51,260","51,560","50,020",10.83M,-2.42%
636 +20171205,"51,260","50,600","51,300","50,280",9.20M,-0.16%
637 +20171204,"51,340","50,840","51,340","50,020",13.62M,0.98%
638 +20171201,"50,840","50,800","51,780","50,800",12.46M,0.08%
639 +20171130,"50,800","50,800","51,860","50,200",28.09M,-3.42%
640 +20171129,"52,600","53,200","53,240","52,500",9.58M,-1.28%
641 +20171128,"53,280","52,700","53,280","51,720",13.39M,1.22%
642 +20171127,"52,640","55,360","55,360","52,640",18.09M,-5.08%
643 +20171124,"55,460","55,300","55,500","55,180",4.42M,0.29%
644 +20171123,"55,300","55,960","55,980","55,020",6.21M,-1.18%
645 +20171122,"55,960","55,980","56,200","55,620",7.89M,1.23%
646 +20171121,"55,280","55,400","55,840","55,280",9.92M,0.14%
647 +20171120,"55,200","55,900","55,980","55,200",9.45M,-1.11%
648 +20171117,"55,820","56,400","56,880","55,820",10.37M,0.07%
649 +20171116,"55,780","55,440","56,000","55,400",7.62M,0.80%
650 +20171115,"55,340","55,920","56,320","55,320",8.75M,-1.04%
651 +20171114,"55,920","56,380","56,740","55,920",6.72M,-0.82%
652 +20171113,"56,380","56,400","56,800","56,100",7.47M,-0.04%
653 +20171110,"56,400","55,800","56,540","55,780",6.64M,0.11%
654 +20171109,"56,340","56,920","56,920","55,900",11.99M,-0.74%
655 +20171108,"56,760","56,100","57,060","55,860",8.36M,1.18%
656 +20171107,"56,100","56,380","56,920","55,860",7.46M,-0.50%
657 +20171106,"56,380","56,380","56,500","55,340",8.96M,0.00%
658 +20171103,"56,380","57,060","57,140","55,860",9.93M,-1.19%
659 +20171102,"57,060","57,500","57,520","56,760",10.18M,-0.28%
660 +20171101,"57,220","57,500","57,500","56,180",14.28M,3.89%
661 +20171031,"55,080","54,060","55,440","53,500",13.30M,1.92%
662 +20171030,"54,040","53,780","54,320","53,700",8.19M,1.81%
663 +20171027,"53,080","52,400","53,320","52,140",7.39M,1.30%
664 +20171026,"52,400","53,720","53,900","52,400",9.76M,-2.78%
665 +20171025,"53,900","54,040","54,420","53,700",5.87M,-0.26%
666 +20171024,"54,040","54,700","54,780","54,040",5.75M,-0.48%
667 +20171023,"54,300","54,600","54,640","54,000",8.30M,0.85%
668 +20171020,"53,840","52,800","54,100","52,800",7.86M,1.62%
669 +20171019,"52,980","54,700","54,700","52,980",11.97M,-3.25%
670 +20171018,"54,760","54,820","55,240","54,040",10.03M,-0.07%
671 +20171017,"54,800","54,020","55,380","54,000",10.57M,1.63%
672 +20171016,"53,920","53,980","54,860","53,760",8.96M,-0.15%
673 +20171013,"54,000","54,540","54,840","53,780",12.52M,-1.46%
674 +20171012,"54,800","54,840","55,160","54,100",13.73M,0.29%
675 +20171011,"54,640","53,600","54,760","53,340",12.90M,3.48%
676 +20171010,"52,800","53,360","53,640","52,800",19.83M,-97.94%
677 +20171009,"2,564,000","2,564,000","2,564,000","2,564,000",0.01K,"4,900.00%"
678 +20170929,"51,280","51,180","51,620","50,840",11.68M,0.04%
679 +20170928,"51,260","52,260","52,460","51,260",12.01M,-0.81%
680 +20170927,"51,680","52,000","52,200","51,500",9.99M,0.04%
681 +20170926,"51,660","53,020","53,120","51,560",15.73M,-3.66%
682 +20170925,"53,620","53,000","53,680","53,000",8.78M,1.17%
683 +20170922,"53,000","52,960","53,600","52,460",13.87M,0.38%
684 +20170921,"52,800","52,220","52,960","52,220",8.30M,1.11%
685 +20170920,"52,220","52,120","52,500","51,840",9.12M,0.19%
686 +20170919,"52,120","52,500","52,640","51,780",9.75M,-0.69%
687 +20170918,"52,480","50,540","52,480","50,520",11.27M,4.13%
688 +20170915,"50,400","50,300","50,560","49,860",10.55M,0.20%
689 +20170914,"50,300","50,060","50,320","49,760",14.06M,1.37%
690 +20170913,"49,620","49,820","50,400","49,440",9.33M,0.04%
691 +20170912,"49,600","50,200","50,220","49,140",11.44M,-0.40%
692 +20170911,"49,800","49,700","50,180","49,500",9.80M,1.47%
693 +20170908,"49,080","48,700","49,180","48,580",10.84M,2.00%
694 +20170907,"48,120","47,000","48,220","47,000",9.64M,2.38%
695 +20170906,"47,000","46,760","47,180","46,700",10.27M,0.51%
696 +20170905,"46,760","46,240","46,900","45,960",9.68M,1.56%
697 +20170904,"46,040","45,780","46,360","45,500",7.88M,-0.95%
698 +20170901,"46,480","46,460","46,640","46,300",9.26M,0.35%
699 +20170831,"46,320","46,220","46,640","46,000",10.08M,0.26%
700 +20170830,"46,200","46,380","46,400","45,960",7.24M,0.26%
701 +20170829,"46,080","45,640","46,080","45,160",12.59M,-0.04%
702 +20170828,"46,100","47,020","47,240","45,960",9.96M,-1.96%
703 +20170825,"47,020","47,880","47,880","46,720",10.80M,-1.05%
704 +20170824,"47,520","47,520","47,660","47,340",8.49M,0.08%
705 +20170823,"47,480","47,780","47,780","47,180",7.68M,1.02%
706 +20170822,"47,000","46,820","47,160","46,700",6.48M,0.34%
707 +20170821,"46,840","47,240","47,240","46,580",4.90M,-0.13%
708 +20170818,"46,900","46,760","47,240","46,240",10.00M,-0.30%
709 +20170817,"47,040","46,960","47,300","46,740",12.11M,1.82%
710 +20170816,"46,200","46,220","46,380","46,000",21.88M,2.67%
711 +20170814,"45,000","45,120","45,400","44,720",18.78M,0.85%
712 +20170811,"44,620","45,120","45,300","44,220",24.82M,-2.79%
713 +20170810,"45,900","46,200","46,320","45,460",22.83M,-0.82%
714 +20170809,"46,280","47,400","47,400","46,240",14.53M,-3.02%
715 +20170808,"47,720","47,980","48,260","47,480",7.93M,0.29%
716 +20170807,"47,580","47,500","48,080","47,440",7.12M,-0.25%
717 +20170804,"47,700","48,160","48,180","47,500",8.36M,-0.17%
718 +20170803,"47,780","49,000","49,000","47,120",15.51M,-2.49%
719 +20170802,"49,000","49,200","49,340","48,600",7.59M,0.82%
720 +20170801,"48,600","48,000","48,840","47,540",11.22M,0.83%
721 +20170731,"48,200","47,420","48,240","46,920",12.65M,0.92%
722 +20170728,"47,760","49,800","49,800","47,380",25.54M,-4.10%
723 +20170727,"49,800","50,000","50,640","49,560",11.43M,-0.08%
724 +20170726,"49,840","49,600","50,020","49,300",10.93M,-0.32%
725 +20170725,"50,000","50,500","50,760","49,940",10.13M,-1.69%
726 +20170724,"50,860","50,700","51,000","50,620",7.09M,-0.43%
727 +20170721,"51,080","50,860","51,160","50,520",8.43M,-0.23%
728 +20170720,"51,200","50,760","51,320","50,560",8.35M,0.91%
729 +20170719,"50,740","50,620","50,820","50,000",11.16M,-0.20%
730 +20170718,"50,840","50,420","50,880","50,360",6.66M,0.39%
731 +20170717,"50,640","50,900","51,020","50,520",7.86M,0.32%
732 +20170714,"50,480","51,000","51,080","50,420",7.98M,-0.16%
733 +20170713,"50,560","50,080","50,940","50,040",15.63M,1.36%
734 +20170712,"49,880","49,000","50,000","48,840",9.47M,1.80%
735 +20170711,"49,000","48,640","49,000","48,280",9.59M,0.70%
736 +20170710,"48,660","48,500","48,900","48,320",10.60M,1.67%
737 +20170707,"47,860","47,740","48,120","47,620",8.04M,-0.42%
738 +20170706,"48,060","48,000","48,100","47,720",10.55M,1.01%
739 +20170705,"47,580","46,820","47,680","46,780",9.77M,1.23%
740 +20170704,"47,000","47,160","47,400","46,900",7.95M,-0.47%
741 +20170703,"47,220","47,500","47,780","47,120",6.80M,-0.67%
742 +20170630,"47,540","47,500","47,620","47,100",11.75M,-0.83%
743 +20170629,"47,940","48,040","48,320","47,940",7.99M,0.50%
744 +20170628,"47,700","47,600","48,000","47,560",9.45M,-1.24%
745 +20170627,"48,300","48,220","48,400","47,900",9.38M,0.04%
746 +20170626,"48,280","47,520","48,360","47,520",8.06M,1.39%
747 +20170623,"47,620","47,600","47,780","47,420",8.63M,-0.71%
748 +20170622,"47,960","47,960","48,080","47,720",8.92M,1.01%
749 +20170621,"47,480","47,740","48,120","47,480",9.65M,-1.37%
750 +20170620,"48,140","47,240","48,140","47,220",14.84M,3.39%
751 +20170619,"46,560","45,580","46,560","45,560",10.64M,2.15%
752 +20170616,"45,580","45,500","45,940","45,460",15.12M,-0.22%
753 +20170615,"45,680","45,680","45,920","45,180",9.15M,0.71%
754 +20170614,"45,360","45,800","46,060","45,240",9.74M,-0.09%
755 +20170613,"45,400","45,140","45,620","45,140",8.62M,0.04%
756 +20170612,"45,380","45,420","45,600","45,140",10.90M,-1.56%
757 +20170609,"46,100","45,680","46,440","45,600",11.73M,2.08%
758 +20170608,"45,160","45,000","45,580","45,000",13.95M,-0.31%
759 +20170607,"45,300","46,500","46,500","45,240",12.19M,-98.03%
760 +20170606,"2,297,000","2,297,000","2,297,000","2,297,000",-,"4,900.00%"
761 +20170605,"45,940","46,040","46,360","45,720",7.56M,-0.04%
762 +20170602,"45,960","45,060","45,960","45,000",12.40M,2.86%
763 +20170601,"44,680","44,860","44,900","44,400",9.74M,-0.04%
764 +20170531,"44,700","44,580","45,020","44,400",18.39M,0.13%
765 +20170530,"44,640","45,520","45,660","44,480",12.42M,-2.15%
766 +20170529,"45,620","46,220","46,400","45,380",8.72M,-1.00%
767 +20170526,"46,080","45,600","46,460","45,540",13.06M,0.88%
768 +20170525,"45,680","45,160","45,680","44,800",12.95M,1.78%
769 +20170524,"44,880","44,860","45,300","44,800",8.60M,-0.09%
770 +20170523,"44,920","45,400","45,580","44,900",12.57M,-0.40%
771 +20170522,"45,100","45,040","45,380","44,760",17.62M,0.85%
772 +20170519,"44,720","45,640","45,780","44,720",15.72M,-2.66%
773 +20170518,"45,940","45,740","46,000","45,540",10.77M,-0.86%
774 +20170517,"46,340","46,120","46,640","46,100",7.41M,-0.09%
775 +20170516,"46,380","46,660","46,800","46,100",8.65M,0.61%
776 +20170515,"46,100","45,620","46,280","45,620",8.00M,0.61%
777 +20170512,"45,820","45,760","46,160","45,660",9.00M,0.70%
778 +20170511,"45,500","45,420","46,180","45,220",20.43M,-0.22%
779 +20170510,"45,600","46,160","47,220","45,600",22.63M,-98.06%
780 +20170509,"2,351,000","2,351,000","2,351,000","2,351,000",0.04K,"4,900.00%"
781 +20170508,"47,020","45,520","47,020","45,340",16.63M,3.30%
782 +20170504,"45,520","45,700","45,700","44,860",11.28M,-97.97%
783 +20170503,"2,245,000","2,245,000","2,245,000","2,245,000",-,"4,900.00%"
784 +20170502,"44,900","45,500","45,500","44,760",14.03M,-97.99%
785 +20170501,"2,231,000","2,231,000","2,231,000","2,231,000",0.00K,"4,900.00%"
786 +20170428,"44,620","45,780","45,800","44,520",21.47M,1.78%
787 +20170427,"43,840","42,700","44,520","41,960",21.48M,2.43%
788 +20170426,"42,800","42,700","42,800","42,520",10.63M,0.23%
789 +20170425,"42,700","41,460","42,740","41,320",14.25M,3.54%
790 +20170424,"41,240","41,260","41,260","40,920",7.01M,1.18%
791 +20170421,"40,760","40,480","41,400","40,480",12.16M,1.19%
792 +20170420,"40,280","40,580","40,800","40,080",19.56M,-1.52%
793 +20170419,"40,900","41,300","41,420","40,900",10.14M,-1.45%
794 +20170418,"41,500","41,680","41,820","41,280",6.35M,-0.14%
795 +20170417,"41,560","42,000","42,080","41,520",4.20M,-1.09%
796 +20170414,"42,020","42,160","42,260","41,760",4.46M,-0.94%
797 +20170413,"42,420","41,660","42,460","41,660",8.48M,1.24%
798 +20170412,"41,900","41,860","41,940","41,700",6.77M,0.72%
799 +20170411,"41,600","41,940","41,940","41,580",5.37M,-0.81%
800 +20170410,"41,940","41,940","41,940","41,500",6.07M,0.82%
801 +20170407,"41,600","41,800","41,820","41,160",8.32M,-0.57%
802 +20170406,"41,840","42,000","42,080","41,600",7.78M,-0.71%
803 +20170405,"42,140","41,900","42,240","41,700",8.75M,0.14%
804 +20170404,"42,080","41,600","42,180","41,520",10.12M,1.54%
805 +20170403,"41,440","41,400","41,720","41,300",7.79M,0.58%
806 +20170331,"41,200","41,820","42,020","41,200",9.74M,-1.86%
807 +20170330,"41,980","41,880","42,440","41,880",8.09M,0.48%
808 +20170329,"41,780","41,740","41,960","41,580",10.08M,0.72%
809 +20170328,"41,480","41,560","41,840","41,380",8.14M,0.68%
810 +20170327,"41,200","41,200","41,880","41,180",9.57M,-0.72%
811 +20170324,"41,500","41,600","41,980","41,080",12.65M,-0.72%
812 +20170323,"41,800","42,200","42,360","41,700",14.99M,-1.55%
813 +20170322,"42,460","41,600","42,460","41,580",15.24M,-0.23%
814 +20170321,"42,560","41,780","42,680","41,760",11.01M,1.58%
815 +20170320,"41,900","42,000","42,120","41,740",9.92M,-1.18%
816 +20170317,"42,400","41,800","42,500","41,720",11.34M,1.34%
817 +20170316,"41,840","41,800","42,180","41,540",9.81M,1.06%
818 +20170315,"41,400","40,800","41,440","40,800",8.30M,0.10%
819 +20170314,"41,360","40,620","41,540","40,500",11.32M,1.87%
820 +20170313,"40,600","40,040","40,980","40,040",7.41M,1.05%
821 +20170310,"40,180","39,960","40,420","39,860",10.12M,-0.05%
822 +20170309,"40,200","40,200","40,300","40,020",11.41M,0.00%
823 +20170308,"40,200","40,200","40,620","40,140",10.84M,0.00%
824 +20170307,"40,200","39,800","40,320","39,800",10.54M,0.30%
825 +20170306,"40,080","39,220","40,220","39,220",12.20M,1.16%
826 +20170303,"39,620","39,340","39,720","39,160",12.69M,-0.25%
827 +20170302,"39,720","38,420","39,860","38,420",20.81M,3.33%
828 +20170228,"38,440","38,060","38,760","37,960",15.00M,1.00%
829 +20170227,"38,060","38,020","38,140","37,700",8.74M,-0.42%
830 +20170224,"38,220","38,960","39,100","38,060",8.79M,-2.45%
831 +20170223,"39,180","39,020","39,440","39,020",10.03M,-0.31%
832 +20170222,"39,300","39,000","39,340","38,980",8.53M,0.92%
833 +20170221,"38,940","38,540","39,560","38,420",10.48M,0.72%
834 +20170220,"38,660","38,220","38,780","38,160",7.47M,2.11%
835 +20170217,"37,860","37,560","38,040","37,280",15.08M,-0.42%
836 +20170216,"38,020","37,800","38,360","37,780",10.22M,0.80%
837 +20170215,"37,720","37,080","37,960","37,080",13.88M,0.37%
838 +20170214,"37,580","37,960","38,260","37,320",13.04M,-1.00%
839 +20170213,"37,960","37,740","38,060","37,720",10.39M,-1.04%
840 +20170210,"38,360","38,400","38,760","38,300",10.72M,-0.10%
841 +20170209,"38,400","38,780","38,840","38,220",13.16M,0.00%
842 +20170208,"38,400","38,740","38,780","38,200",12.87M,-1.08%
843 +20170207,"38,820","39,560","39,580","38,760",12.17M,-1.87%
844 +20170206,"39,560","39,580","39,660","39,140",8.82M,0.25%
845 +20170203,"39,460","39,400","39,500","39,180",8.47M,0.25%
846 +20170202,"39,360","39,600","39,660","39,200",11.89M,0.61%
847 +20170201,"39,120","39,540","39,660","39,040",13.00M,-0.86%
848 +20170131,"39,460","39,900","39,900","39,460",14.17M,-98.02%
849 +20170130,"1,995,000","1,995,000","1,995,000","1,995,000",0.04K,"4,900.00%"
850 +20170126,"39,900","39,420","40,000","39,420",12.90M,1.27%
851 +20170125,"39,400","38,340","39,400","38,320",13.14M,3.25%
852 +20170124,"38,160","38,120","38,580","37,880",9.73M,0.26%
853 +20170123,"38,060","37,200","38,060","37,000",8.50M,2.31%
854 +20170120,"37,200","37,120","37,420","36,880",8.76M,-0.75%
855 +20170119,"37,480","37,720","37,920","37,020",8.97M,1.46%
856 +20170118,"36,940","37,040","37,500","36,620",8.86M,-0.05%
857 +20170117,"36,960","36,580","37,460","36,580",8.09M,0.82%
858 +20170116,"36,660","36,860","37,820","36,320",16.59M,-2.14%
859 +20170113,"37,460","38,100","38,320","37,460",15.38M,-3.45%
860 +20170112,"38,800","38,000","38,800","37,980",11.59M,1.36%
861 +20170111,"38,280","37,520","38,560","37,420",11.86M,2.79%
862 +20170110,"37,240","37,280","37,400","37,080",8.71M,0.05%
863 +20170109,"37,220","36,600","37,500","36,560",12.87M,2.82%
864 +20170106,"36,200","36,180","36,440","36,040",8.38M,1.80%
865 +20170105,"35,560","36,060","36,060","35,540",10.49M,-1.66%
866 +20170104,"36,160","36,500","36,520","36,100",7.31M,-0.88%
867 +20170103,"36,480","36,280","36,620","36,020",7.34M,1.05%
868 +20170102,"36,100","35,980","36,240","35,880",4.65M,-98.00%
869 +20170101,"1,802,000","1,802,000","1,802,000","1,802,000",0.01K,"4,900.00%"
870 +20161229,"36,040","35,420","36,040","35,400",7.00M,0.78%
871 +20161228,"35,760","35,840","35,980","35,600",6.62M,-0.61%
872 +20161227,"35,980","35,980","36,200","35,860",4.60M,0.06%
873 +20161226,"35,960","35,600","36,000","35,560",4.80M,-97.98%
874 +20161225,"1,782,000","1,782,000","1,782,000","1,782,000",0.02K,"4,900.00%"
875 +20161223,"35,640","36,020","36,080","35,600",8.12M,-1.49%
876 +20161222,"36,180","36,260","36,300","35,980",5.33M,0.22%
877 +20161221,"36,100","36,360","36,600","36,020",6.53M,-0.39%
878 +20161220,"36,240","35,920","36,400","35,840",6.13M,0.95%
879 +20161219,"35,900","35,620","36,380","35,620",5.44M,-98.00%
880 +20161218,"1,793,000","1,793,000","1,793,000","1,793,000",-,"4,900.00%"
881 +20161216,"35,860","35,300","36,020","35,200",11.50M,1.93%
882 +20161215,"35,180","34,820","35,500","34,820",5.77M,-1.01%
883 +20161214,"35,540","35,560","35,680","35,280",7.18M,0.62%
884 +20161213,"35,320","34,620","35,440","34,620",10.88M,0.80%
885 +20161212,"35,040","34,660","35,360","34,660",11.18M,-98.03%
886 +20161211,"1,780,000","1,780,000","1,780,000","1,780,000",0.02K,"4,900.00%"
887 +20161209,"35,600","35,900","35,900","35,400",9.52M,-0.56%
888 +20161208,"35,800","35,980","36,020","35,520",16.40M,1.02%
889 +20161207,"35,440","35,040","35,480","35,040",9.54M,1.37%
890 +20161206,"34,960","34,440","35,200","34,400",13.69M,1.75%
891 +20161205,"34,360","34,340","34,680","34,220",8.47M,-98.01%
892 +20161204,"1,727,000","1,727,000","1,727,000","1,727,000",0.04K,"4,900.00%"
893 +20161202,"34,540","34,480","34,760","34,140",14.06M,-1.26%
894 +20161201,"34,980","34,800","35,060","34,660",13.10M,0.17%
895 +20161130,"34,920","33,540","34,940","33,540",25.48M,4.11%
896 +20161129,"33,540","33,800","33,960","33,380",17.66M,0.00%
897 +20161128,"33,540","33,000","33,620","32,800",12.98M,-97.97%
898 +20161127,"1,650,000","1,650,000","1,650,000","1,650,000",0.09K,"4,900.00%"
899 +20161125,"33,000","32,820","33,040","32,660",6.26M,0.00%
900 +20161124,"33,000","32,980","33,040","32,660",7.43M,0.06%
901 +20161123,"32,980","33,220","33,220","32,520",12.22M,0.55%
902 +20161122,"32,800","32,140","32,900","32,000",9.75M,2.95%
903 +20161121,"31,860","31,300","32,120","31,300",8.18M,-97.99%
904 +20161120,"1,586,000","1,586,000","1,586,000","1,586,000",0.01K,"4,900.00%"
905 +20161118,"31,720","31,640","31,760","31,400",9.34M,1.15%
906 +20161117,"31,360","31,100","31,520","30,900",7.86M,0.64%
907 +20161116,"31,160","30,800","31,280","30,800",11.32M,1.23%
908 +20161115,"30,780","31,060","31,620","30,780",13.65M,-0.90%
909 +20161114,"31,060","31,900","31,920","31,040",15.05M,-98.06%
910 +20161113,"1,598,000","1,598,000","1,598,000","1,598,000",0.06K,"4,900.00%"
911 +20161111,"31,960","31,700","32,360","31,700",12.75M,-3.09%
912 +20161110,"32,980","32,600","33,000","32,360",11.79M,3.32%
913 +20161109,"31,920","32,920","33,140","31,820",15.62M,-2.92%
914 +20161108,"32,880","32,980","32,980","32,700",5.37M,0.24%
915 +20161107,"32,800","32,940","33,000","32,680",7.65M,-97.98%
916 +20161106,"1,627,000","1,627,000","1,627,000","1,627,000",0.08K,"4,900.00%"
917 +20161104,"32,540","32,100","32,680","32,100",7.10M,0.68%
918 +20161103,"32,320","32,600","32,800","32,120",10.32M,-1.64%
919 +20161102,"32,860","32,800","33,040","32,620",10.10M,-0.54%
920 +20161101,"33,040","32,600","33,040","32,240",10.15M,0.79%
921 +20161031,"32,780","32,320","32,780","32,220",11.87M,-97.97%
922 +20161030,"1,614,000","1,614,000","1,614,000","1,614,000",0.04K,"4,900.00%"
923 +20161028,"32,280","31,600","32,280","31,600",9.83M,2.61%
924 +20161027,"31,460","31,420","32,340","31,120",13.83M,0.38%
925 +20161026,"31,340","31,940","31,980","31,240",9.73M,-1.88%
926 +20161025,"31,940","32,000","32,080","31,840",9.31M,-0.68%
927 +20161024,"32,160","31,860","32,160","31,800",8.75M,-97.98%
928 +20161023,"1,589,000","1,589,000","1,589,000","1,589,000",0.01K,"4,900.00%"
929 +20161021,"31,780","32,120","32,260","31,760",10.32M,-1.91%
930 +20161020,"32,400","32,520","33,020","32,180",10.22M,-0.31%
931 +20161019,"32,500","31,580","32,860","31,500",14.98M,2.27%
932 +20161018,"31,780","31,440","31,900","31,440",10.19M,-0.06%
933 +20161017,"31,800","31,300","32,040","30,760",12.66M,-97.98%
934 +20161016,"1,577,000","1,577,000","1,577,000","1,577,000",0.05K,"4,900.00%"
935 +20161014,"31,540","30,960","31,760","30,940",14.15M,1.28%
936 +20161013,"31,140","31,000","31,620","30,900",21.05M,1.43%
937 +20161012,"30,700","29,900","30,900","29,880",37.54M,-0.65%
938 +20161011,"30,900","32,000","32,500","30,900",34.09M,-8.04%
939 +20161010,"33,600","33,000","33,780","32,560",25.07M,-98.03%
940 +20161009,"1,706,000","1,706,000","1,706,000","1,706,000",0.03K,"4,900.00%"
941 +20161007,"34,120","34,000","34,320","33,800",21.38M,0.89%
942 +20161006,"33,820","33,920","34,000","33,340",27.88M,4.45%
943 +20161005,"32,380","32,020","32,520","31,940",11.45M,0.31%
944 +20161004,"32,280","32,200","32,480","32,120",10.24M,-97.98%
945 +20161003,"1,598,000","1,598,000","1,598,000","1,598,000",-,"4,900.00%"
946 +20160930,"31,960","31,800","32,300","31,700",11.53M,-0.13%
947 +20160929,"32,000","31,460","32,380","31,440",11.38M,2.11%
948 +20160928,"31,340","31,080","31,460","31,080",7.81M,-0.13%
949 +20160927,"31,380","31,000","31,500","30,660",9.83M,0.06%
950 +20160926,"31,360","31,420","31,920","31,280",13.15M,-98.00%
951 +20160925,"1,571,000","1,571,000","1,571,000","1,571,000",0.02K,"4,900.00%"
952 +20160923,"31,420","31,640","31,960","31,320",16.72M,-2.90%
953 +20160922,"32,360","32,000","32,820","31,980",11.01M,1.63%
954 +20160921,"31,840","31,900","32,020","31,520",9.96M,0.44%
955 +20160920,"31,700","31,180","31,760","31,180",12.92M,1.73%
956 +20160919,"31,160","30,760","31,380","30,720",21.33M,-97.96%
957 +20160918,"1,527,000","1,527,000","1,527,000","1,527,000",0.02K,"4,900.00%"
958 +20160913,"30,540","30,160","30,980","29,900",24.65M,4.23%
959 +20160912,"29,300","29,800","30,120","29,120",26.22M,-98.14%
960 +20160911,"1,575,000","1,575,000","1,575,000","1,575,000",-,"4,900.00%"
961 +20160909,"31,500","32,220","32,360","31,280",13.04M,-3.90%
962 +20160908,"32,780","32,460","32,780","32,280",11.84M,1.11%
963 +20160907,"32,420","32,960","33,040","32,420",9.57M,-1.34%
964 +20160906,"32,860","32,140","32,900","31,940",7.66M,2.30%
965 +20160905,"32,120","31,800","32,300","31,700",8.36M,-97.99%
966 +20160904,"1,597,000","1,597,000","1,597,000","1,597,000",0.09K,"4,900.00%"
967 +20160902,"31,940","31,900","32,260","31,760",8.07M,0.63%
968 +20160901,"31,740","31,660","31,760","31,260",18.74M,-2.04%
969 +20160831,"32,400","32,820","32,820","32,220",20.48M,-1.52%
970 +20160830,"32,900","32,940","33,420","32,660",7.84M,0.30%
971 +20160829,"32,800","32,040","32,800","31,940",8.84M,-97.97%
972 +20160828,"1,612,000","1,612,000","1,612,000","1,612,000",-,"4,900.00%"
973 +20160826,"32,240","32,120","32,460","32,060",12.05M,-1.65%
974 +20160825,"32,780","32,600","33,180","32,440",14.12M,-0.85%
975 +20160824,"33,060","33,600","33,640","32,720",15.87M,-2.02%
976 +20160823,"33,740","33,300","33,880","33,140",10.93M,1.32%
977 +20160822,"33,300","33,480","33,840","33,180",12.59M,-98.01%
978 +20160821,"1,675,000","1,675,000","1,675,000","1,675,000",0.02K,"4,900.00%"
979 +20160819,"33,500","32,760","33,500","32,720",15.29M,2.13%
980 +20160818,"32,800","31,340","32,880","31,320",17.87M,4.73%
981 +20160817,"31,320","31,380","31,400","31,020",6.85M,-0.13%
982 +20160816,"31,360","30,900","31,520","30,900",10.70M,1.49%
983 +20160812,"30,900","31,180","31,400","30,880",10.37M,-0.90%
984 +20160811,"31,180","30,820","31,180","30,520",10.37M,1.17%
985 +20160810,"30,820","31,340","31,400","30,680",12.06M,-1.66%
986 +20160809,"31,340","31,480","31,580","31,140",8.76M,-0.13%
987 +20160808,"31,380","31,320","31,500","31,120",10.32M,-97.99%
988 +20160807,"1,561,000","1,561,000","1,561,000","1,561,000",0.06K,"4,900.00%"
989 +20160805,"31,220","30,580","31,280","30,500",6.90M,2.90%
990 +20160804,"30,340","30,380","30,660","30,340",7.00M,0.00%
991 +20160803,"30,340","30,960","30,960","30,340",7.50M,-2.00%
992 +20160802,"30,960","31,360","31,360","30,920",8.31M,-1.28%
993 +20160801,"31,360","31,380","31,600","31,200",11.11M,-97.96%
994 +20160731,"1,539,000","1,539,000","1,539,000","1,539,000",0.01K,"4,900.00%"
995 +20160729,"30,780","30,400","31,140","30,220",14.93M,2.12%
996 +20160728,"30,140","30,660","30,720","29,960",9.46M,-1.31%
997 +20160727,"30,540","30,240","30,580","30,240",6.33M,-0.20%
998 +20160726,"30,600","30,000","30,620","29,960",7.21M,1.86%
999 +20160725,"30,040","30,000","30,460","29,900",7.58M,-98.02%
1000 +20160724,"1,516,000","1,516,000","1,516,000","1,516,000",0.01K,"4,900.00%"
1001 +20160722,"30,320","30,120","30,560","30,120",8.27M,-1.75%
1002 +20160721,"30,860","30,800","30,940","30,740",7.95M,0.19%
1003 +20160720,"30,800","30,660","30,840","30,480",7.76M,0.46%
1004 +20160719,"30,660","30,560","30,800","30,440",10.12M,0.00%
1005 +20160718,"30,660","30,360","30,660","30,000",10.92M,-97.98%
1006 +20160717,"1,518,000","1,518,000","1,518,000","1,518,000",0.00K,"4,900.00%"
1007 +20160715,"30,360","30,000","30,440","29,720",11.59M,1.20%
1008 +20160714,"30,000","29,620","30,000","29,520",12.38M,1.28%
1009 +20160713,"29,620","29,920","29,920","29,260",9.90M,1.16%
1010 +20160712,"29,280","29,980","30,100","29,200",10.97M,-1.68%
1011 +20160711,"29,780","29,200","30,000","29,200",13.43M,-97.96%
1012 +20160710,"1,460,000","1,460,000","1,460,000","1,460,000",0.03K,"4,900.00%"
1013 +20160708,"29,200","29,000","29,500","28,980",12.34M,0.69%
1014 +20160707,"29,000","28,420","29,000","28,320",11.31M,2.04%
1015 +20160706,"28,420","28,940","29,040","28,240",16.04M,-3.27%
1016 +20160705,"29,380","29,320","29,500","29,240",7.77M,0.20%
1017 +20160704,"29,320","29,280","29,480","29,020",7.84M,0.00%
1018 +20160701,"29,320","28,540","29,580","28,540",14.34M,2.88%
1019 +20160630,"28,500","28,160","28,900","27,940",13.65M,2.08%
1020 +20160629,"27,920","28,160","28,240","27,820",10.41M,-0.21%
1021 +20160628,"27,980","27,800","28,080","27,580",10.69M,0.07%
1022 +20160627,"27,960","28,000","28,100","27,700",11.84M,-0.14%
1023 +20160624,"28,000","28,900","28,900","27,200",20.50M,-2.10%
1024 +20160623,"28,600","28,880","28,900","28,540",11.21M,-1.04%
1025 +20160622,"28,900","28,920","29,000","28,620",8.86M,-0.21%
1026 +20160621,"28,960","28,640","28,980","28,520",9.68M,1.19%
1027 +20160620,"28,620","28,540","28,960","28,520",13.42M,-97.99%
1028 +20160619,"1,426,000","1,426,000","1,426,000","1,426,000",0.00K,"4,900.00%"
1029 +20160617,"28,520","28,200","28,700","28,200",16.41M,1.21%
1030 +20160616,"28,180","28,260","28,340","27,900",14.45M,-0.28%
1031 +20160615,"28,260","27,700","28,320","27,660",15.11M,2.39%
1032 +20160614,"27,600","27,420","27,720","27,380",12.17M,0.66%
1033 +20160613,"27,420","27,920","27,920","27,240",14.51M,-98.05%
1034 +20160612,"1,406,000","1,406,000","1,406,000","1,406,000",0.01K,"4,900.00%"
1035 +20160610,"28,120","28,480","28,500","28,080",14.47M,-1.68%
1036 +20160609,"28,600","28,160","28,600","28,120",25.63M,1.71%
1037 +20160608,"28,120","28,380","28,380","27,780",17.35M,0.57%
1038 +20160607,"27,960","27,720","28,040","27,600",23.02M,-97.97%
1039 +20160606,"1,377,000","1,377,000","1,377,000","1,377,000",0.05K,"4,900.00%"
1040 +20160603,"27,540","27,400","27,580","27,280",15.55M,0.88%
1041 +20160602,"27,300","27,000","27,440","26,920",23.64M,2.40%
1042 +20160601,"26,660","25,960","26,820","25,900",23.25M,3.17%
1043 +20160531,"25,840","25,600","26,000","25,360",59.92M,0.94%
1044 +20160530,"25,600","25,940","25,940","25,480",10.23M,-98.00%
1045 +20160529,"1,282,000","1,282,000","1,282,000","1,282,000",-,"4,900.00%"
1046 +20160527,"25,640","26,000","26,020","25,460",13.10M,-1.08%
1047 +20160526,"25,920","25,980","26,060","25,900",11.59M,0.08%
1048 +20160525,"25,900","25,660","25,960","25,480",11.85M,1.89%
1049 +20160524,"25,420","25,720","25,780","25,360",9.56M,-1.17%
1050 +20160523,"25,720","25,380","25,720","25,380",8.04M,-97.97%
1051 +20160522,"1,269,000","1,269,000","1,269,000","1,269,000",-,"4,900.00%"
1052 +20160520,"25,380","25,400","25,600","25,380",7.96M,-0.08%
1053 +20160519,"25,400","25,360","25,540","25,320",9.21M,0.16%
1054 +20160518,"25,360","25,280","25,420","25,100",8.70M,0.32%
1055 +20160517,"25,280","24,980","25,300","24,980",9.09M,1.28%
1056 +20160516,"24,960","25,060","25,260","24,940",11.63M,-98.01%
1057 +20160515,"1,253,000","1,253,000","1,253,000","1,253,000",-,"4,900.00%"
1058 +20160513,"25,060","25,620","25,620","25,020",12.26M,-2.19%
1059 +20160512,"25,620","25,840","25,840","25,500",7.58M,-0.85%
1060 +20160511,"25,840","25,920","25,980","25,740",8.70M,-0.31%
1061 +20160510,"25,920","25,980","26,000","25,760",8.35M,-0.23%
1062 +20160509,"25,980","25,800","26,000","25,700",13.66M,-97.99%
1063 +20160508,"1,290,000","1,290,000","1,290,000","1,290,000",-,"4,900.00%"
1064 +20160504,"25,800","25,440","25,800","25,240",13.89M,2.30%
1065 +20160503,"25,220","25,340","25,400","25,120",7.89M,0.88%
1066 +20160502,"25,000","24,940","25,240","24,900",7.01M,-97.99%
1067 +20160501,"1,245,000","1,245,000","1,245,000","1,245,000",0.01K,"4,900.00%"
1068 +20160429,"24,900","25,200","25,340","24,840",14.63M,-1.58%
1069 +20160428,"25,300","26,000","26,000","25,220",13.94M,-2.69%
1070 +20160427,"26,000","25,880","26,000","25,720",8.24M,0.31%
1071 +20160426,"25,920","25,700","26,100","25,660",7.93M,1.17%
1072 +20160425,"25,620","25,700","25,700","25,420",4.59M,-98.00%
1073 +20160424,"1,280,000","1,280,000","1,280,000","1,280,000",0.04K,"4,900.00%"
1074 +20160422,"25,600","25,880","25,880","25,540",5.73M,-1.08%
1075 +20160421,"25,880","26,000","26,020","25,760",7.24M,-0.38%
1076 +20160420,"25,980","25,640","26,000","25,640",7.32M,0.85%
1077 +20160419,"25,760","25,880","25,920","25,660",7.21M,-0.85%
1078 +20160418,"25,980","25,900","26,100","25,840",6.42M,-98.00%
1079 +20160417,"1,300,000","1,300,000","1,300,000","1,300,000",0.08K,"4,900.00%"
1080 +20160415,"26,000","26,180","26,200","25,800",6.83M,0.00%
1081 +20160414,"26,000","26,000","26,040","25,780",16.63M,-97.96%
1082 +20160413,"1,275,000","1,275,000","1,275,000","1,275,000",-,0.00%
1083 +20160412,"1,275,000","1,270,000","1,281,000","1,266,000",134.02K,0.71%
1084 +20160411,"1,266,000","1,246,000","1,271,000","1,246,000",119.80K,1.61%
1085 +20160408,"1,246,000","1,269,000","1,269,000","1,240,000",251.49K,-1.81%
1086 +20160407,"1,269,000","1,300,000","1,300,000","1,258,000",258.65K,-1.25%
1087 +20160406,"1,285,000","1,269,000","1,291,000","1,268,000",183.89K,1.98%
1088 +20160405,"1,260,000","1,299,000","1,299,000","1,260,000",236.00K,-3.45%
1089 +20160404,"1,305,000","1,279,000","1,305,000","1,279,000",181.84K,2.03%
1090 +20160403,"1,279,000","1,279,000","1,279,000","1,279,000",-,0.00%
1091 +20160401,"1,279,000","1,299,000","1,309,000","1,271,000",263.61K,-2.52%
1092 +20160331,"1,312,000","1,306,000","1,314,000","1,298,000",298.91K,0.31%
1093 +20160330,"1,308,000","1,310,000","1,321,000","1,302,000",267.10K,1.40%
1094 +20160329,"1,290,000","1,294,000","1,300,000","1,285,000",172.35K,-0.31%
1095 +20160328,"1,294,000","1,288,000","1,300,000","1,288,000",120.63K,0.47%
1096 +20160327,"1,288,000","1,288,000","1,288,000","1,288,000",-,0.00%
1097 +20160325,"1,288,000","1,283,000","1,290,000","1,278,000",143.43K,0.47%
1098 +20160324,"1,282,000","1,279,000","1,290,000","1,266,000",218.77K,0.23%
1099 +20160323,"1,279,000","1,269,000","1,279,000","1,262,000",173.91K,0.79%
1100 +20160322,"1,269,000","1,267,000","1,279,000","1,262,000",194.79K,0.16%
1101 +20160321,"1,267,000","1,274,000","1,279,000","1,258,000",181.12K,-0.47%
1102 +20160320,"1,273,000","1,273,000","1,273,000","1,273,000",-,0.00%
1103 +20160318,"1,273,000","1,278,000","1,278,000","1,263,000",223.17K,0.79%
1104 +20160317,"1,263,000","1,265,000","1,296,000","1,257,000",246.72K,0.56%
1105 +20160316,"1,256,000","1,256,000","1,263,000","1,253,000",137.31K,0.24%
1106 +20160315,"1,253,000","1,255,000","1,264,000","1,246,000",167.69K,-0.16%
1107 +20160314,"1,255,000","1,267,000","1,273,000","1,249,000",217.13K,0.48%
1108 +20160311,"1,249,000","1,225,000","1,253,000","1,216,000",244.83K,1.96%
1109 +20160310,"1,225,000","1,208,000","1,236,000","1,201,000",282.75K,2.60%
1110 +20160309,"1,194,000","1,188,000","1,199,000","1,177,000",173.08K,0.17%
1111 +20160308,"1,192,000","1,223,000","1,224,000","1,186,000",215.56K,-2.53%
1112 +20160307,"1,223,000","1,220,000","1,231,000","1,215,000",129.64K,0.66%
1113 +20160306,"1,215,000","1,215,000","1,215,000","1,215,000",-,0.00%
1114 +20160304,"1,215,000","1,220,000","1,228,000","1,202,000",197.05K,-0.41%
1115 +20160303,"1,220,000","1,213,000","1,220,000","1,202,000",214.52K,1.92%
1116 +20160302,"1,197,000","1,200,000","1,207,000","1,196,000",236.32K,1.61%
1117 +20160229,"1,178,000","1,179,000","1,194,000","1,176,000",274.72K,0.51%
1118 +20160226,"1,172,000","1,180,000","1,187,000","1,172,000",177.29K,-0.59%
1119 +20160225,"1,179,000","1,172,000","1,187,000","1,172,000",128.50K,0.60%
1120 +20160224,"1,172,000","1,178,000","1,179,000","1,161,000",140.42K,-0.76%
1121 +20160223,"1,181,000","1,179,000","1,189,000","1,173,000",147.65K,0.51%
1122 +20160222,"1,175,000","1,190,000","1,192,000","1,166,000",174.50K,-1.26%
1123 +20160221,"1,190,000","1,190,000","1,190,000","1,190,000",0.04K,0.00%
1124 +20160219,"1,190,000","1,187,000","1,195,000","1,174,000",176.00K,0.25%
1125 +20160218,"1,187,000","1,203,000","1,203,000","1,178,000",212.32K,0.17%
1126 +20160217,"1,185,000","1,179,000","1,201,000","1,169,000",246.02K,1.46%
1127 +20160216,"1,168,000","1,158,000","1,179,000","1,157,000",179.96K,1.21%
1128 +20160215,"1,154,000","1,154,000","1,160,000","1,144,000",182.64K,2.12%
1129 +20160212,"1,130,000","1,130,000","1,151,000","1,122,000",254.58K,0.00%
1130 +20160211,"1,130,000","1,118,000","1,137,000","1,118,000",305.00K,-2.92%
1131 +20160210,"1,164,000","1,164,000","1,164,000","1,164,000",-,0.00%
1132 +20160205,"1,164,000","1,156,000","1,169,000","1,156,000",183.85K,0.69%
1133 +20160204,"1,156,000","1,150,000","1,161,000","1,148,000",236.55K,0.87%
1134 +20160203,"1,146,000","1,150,000","1,152,000","1,137,000",174.42K,-0.87%
1135 +20160202,"1,156,000","1,161,000","1,166,000","1,147,000",165.56K,-0.60%
1136 +20160201,"1,163,000","1,152,000","1,163,000","1,151,000",258.39K,1.13%
1137 +20160131,"1,150,000","1,150,000","1,150,000","1,150,000",0.02K,0.00%
1138 +20160129,"1,150,000","1,140,000","1,150,000","1,116,000",426.91K,0.44%
1139 +20160128,"1,145,000","1,164,000","1,168,000","1,139,000",315.23K,-2.55%
1140 +20160127,"1,175,000","1,126,000","1,175,000","1,126,000",273.98K,3.34%
1141 +20160126,"1,137,000","1,155,000","1,157,000","1,136,000",152.00K,-2.15%
1142 +20160125,"1,162,000","1,172,000","1,176,000","1,156,000",159.81K,-0.51%
1143 +20160124,"1,168,000","1,168,000","1,168,000","1,168,000",0.01K,0.00%
1144 +20160122,"1,168,000","1,145,000","1,168,000","1,145,000",147.35K,3.27%
1145 +20160121,"1,131,000","1,133,000","1,155,000","1,125,000",182.05K,-0.62%
1146 +20160120,"1,138,000","1,160,000","1,160,000","1,132,000",165.95K,-2.82%
1147 +20160119,"1,171,000","1,128,000","1,171,000","1,128,000",205.97K,4.00%
1148 +20160118,"1,126,000","1,088,000","1,133,000","1,088,000",320.16K,-0.53%
1149 +20160117,"1,132,000","1,132,000","1,132,000","1,132,000",0.00K,0.00%
1150 +20160115,"1,132,000","1,140,000","1,152,000","1,124,000",208.77K,-0.53%
1151 +20160114,"1,138,000","1,131,000","1,142,000","1,131,000",207.87K,-0.87%
1152 +20160113,"1,148,000","1,153,000","1,159,000","1,148,000",143.13K,0.17%
1153 +20160112,"1,146,000","1,148,000","1,166,000","1,144,000",185.02K,-0.52%
1154 +20160111,"1,152,000","1,156,000","1,166,000","1,146,000",240.13K,-1.62%
1155 +20160110,"1,171,000","1,171,000","1,171,000","1,171,000",-,0.00%
1156 +20160108,"1,171,000","1,163,000","1,186,000","1,163,000",244.55K,0.69%
1157 +20160107,"1,163,000","1,166,000","1,183,000","1,151,000",268.93K,-1.02%
1158 +20160106,"1,175,000","1,208,000","1,208,000","1,168,000",360.33K,-2.73%
1159 +20160105,"1,208,000","1,202,000","1,218,000","1,186,000",208.45K,0.25%
1160 +20160104,"1,205,000","1,260,000","1,260,000","1,205,000",305.31K,-4.37%
1161 +20160103,"1,260,000","1,260,000","1,260,000","1,260,000",-,0.00%
1162 +20151230,"1,260,000","1,260,000","1,272,000","1,254,000",203.76K,0.48%
1163 +20151229,"1,254,000","1,265,000","1,266,000","1,241,000",231.87K,-0.95%
1164 +20151228,"1,266,000","1,285,000","1,289,000","1,266,000",227.00K,-1.48%
1165 +20151227,"1,285,000","1,285,000","1,285,000","1,285,000",0.02K,0.00%
1166 +20151224,"1,285,000","1,295,000","1,300,000","1,285,000",151.44K,-0.77%
1167 +20151223,"1,295,000","1,292,000","1,299,000","1,282,000",162.26K,0.23%
1168 +20151222,"1,292,000","1,280,000","1,292,000","1,267,000",204.37K,0.94%
1169 +20151221,"1,280,000","1,278,000","1,285,000","1,261,000",158.06K,0.16%
1170 +20151220,"1,278,000","1,278,000","1,278,000","1,278,000",0.06K,0.00%
1171 +20151218,"1,278,000","1,265,000","1,288,000","1,264,000",168.29K,-0.93%
1172 +20151217,"1,290,000","1,301,000","1,308,000","1,275,000",167.42K,-0.69%
1173 +20151216,"1,299,000","1,278,000","1,310,000","1,278,000",207.76K,1.72%
1174 +20151215,"1,277,000","1,261,000","1,280,000","1,260,000",175.37K,1.27%
1175 +20151214,"1,261,000","1,273,000","1,273,000","1,255,000",222.71K,-1.79%
1176 +20151213,"1,284,000","1,284,000","1,284,000","1,284,000",-,0.00%
1177 +20151211,"1,284,000","1,283,000","1,295,000","1,272,000",204.94K,0.08%
1178 +20151210,"1,283,000","1,263,000","1,293,000","1,263,000",303.46K,1.58%
1179 +20151209,"1,263,000","1,262,000","1,275,000","1,262,000",181.78K,0.08%
1180 +20151208,"1,262,000","1,262,000","1,272,000","1,262,000",133.52K,0.00%
1181 +20151207,"1,262,000","1,269,000","1,275,000","1,262,000",195.60K,-0.55%
1182 +20151206,"1,269,000","1,269,000","1,269,000","1,269,000",-,0.00%
1183 +20151204,"1,269,000","1,275,000","1,280,000","1,267,000",189.85K,-1.63%
1184 +20151203,"1,290,000","1,295,000","1,297,000","1,286,000",166.59K,-0.77%
1185 +20151202,"1,300,000","1,321,000","1,322,000","1,294,000",226.94K,-1.59%
1186 +20151201,"1,321,000","1,294,000","1,322,000","1,288,000",234.22K,2.88%
1187 +20151130,"1,284,000","1,325,000","1,325,000","1,284,000",524.19K,-3.24%
1188 +20151129,"1,327,000","1,327,000","1,327,000","1,327,000",0.03K,0.00%
1189 +20151127,"1,327,000","1,345,000","1,349,000","1,327,000",169.77K,-0.60%
1190 +20151126,"1,335,000","1,299,000","1,340,000","1,299,000",181.70K,2.77%
1191 +20151125,"1,299,000","1,300,000","1,310,000","1,299,000",142.61K,0.00%
1192 +20151124,"1,299,000","1,282,000","1,305,000","1,282,000",153.09K,1.33%
1193 +20151123,"1,282,000","1,285,000","1,302,000","1,281,000",197.45K,-0.23%
1194 +20151122,"1,285,000","1,285,000","1,285,000","1,285,000",-,0.00%
1195 +20151120,"1,285,000","1,289,000","1,296,000","1,278,000",168.72K,-0.31%
1196 +20151119,"1,289,000","1,290,000","1,290,000","1,271,000",189.96K,0.62%
1197 +20151118,"1,281,000","1,272,000","1,290,000","1,272,000",167.41K,0.87%
1198 +20151117,"1,270,000","1,275,000","1,290,000","1,270,000",185.65K,0.55%
1199 +20151116,"1,263,000","1,291,000","1,291,000","1,263,000",183.50K,-2.85%
1200 +20151115,"1,300,000","1,300,000","1,300,000","1,300,000",0.00K,0.00%
1201 +20151113,"1,300,000","1,317,000","1,317,000","1,300,000",177.62K,-1.29%
1202 +20151112,"1,317,000","1,333,000","1,334,000","1,317,000",156.16K,-1.20%
1203 +20151111,"1,333,000","1,321,000","1,345,000","1,321,000",140.44K,0.91%
1204 +20151110,"1,321,000","1,336,000","1,341,000","1,314,000",197.47K,-1.71%
1205 +20151109,"1,344,000","1,338,000","1,344,000","1,321,000",185.42K,0.45%
1206 +20151108,"1,338,000","1,338,000","1,338,000","1,338,000",0.07K,0.00%
1207 +20151106,"1,338,000","1,343,000","1,348,000","1,330,000",157.73K,-0.30%
1208 +20151105,"1,342,000","1,330,000","1,354,000","1,330,000",172.84K,0.90%
1209 +20151104,"1,330,000","1,352,000","1,361,000","1,326,000",277.60K,-1.63%
1210 +20151103,"1,352,000","1,381,000","1,381,000","1,350,000",297.60K,-2.24%
1211 +20151102,"1,383,000","1,385,000","1,393,000","1,374,000",365.16K,0.80%
1212 +20151101,"1,372,000","1,372,000","1,372,000","1,372,000",-,0.00%
1213 +20151030,"1,372,000","1,345,000","1,390,000","1,341,000",499.22K,3.55%
1214 +20151029,"1,325,000","1,330,000","1,392,000","1,324,000",622.86K,1.30%
1215 +20151028,"1,308,000","1,294,000","1,308,000","1,291,000",257.99K,0.77%
1216 +20151027,"1,298,000","1,282,000","1,299,000","1,281,000",131.35K,0.46%
1217 +20151026,"1,292,000","1,298,000","1,298,000","1,272,000",152.07K,0.23%
1218 +20151023,"1,289,000","1,300,000","1,300,000","1,278,000",252.62K,0.70%
1219 +20151022,"1,280,000","1,280,000","1,295,000","1,269,000",229.66K,0.79%
1220 +20151021,"1,270,000","1,265,000","1,282,000","1,259,000",139.50K,0.32%
1221 +20151020,"1,266,000","1,260,000","1,273,000","1,256,000",137.87K,0.80%
1222 +20151019,"1,256,000","1,257,000","1,265,000","1,249,000",116.84K,-0.71%
1223 +20151018,"1,265,000","1,265,000","1,265,000","1,265,000",0.02K,0.00%
1224 +20151016,"1,265,000","1,265,000","1,269,000","1,259,000",142.12K,-0.32%
1225 +20151015,"1,269,000","1,244,000","1,282,000","1,243,000",243.66K,1.20%
1226 +20151014,"1,254,000","1,248,000","1,260,000","1,237,000",174.92K,0.16%
1227 +20151013,"1,252,000","1,260,000","1,272,000","1,248,000",195.35K,-0.63%
1228 +20151012,"1,260,000","1,260,000","1,263,000","1,247,000",302.19K,-0.79%
1229 +20151011,"1,270,000","1,270,000","1,270,000","1,270,000",0.19K,0.00%
1230 +20151008,"1,270,000","1,250,000","1,279,000","1,250,000",501.08K,1.52%
1231 +20151007,"1,251,000","1,198,000","1,252,000","1,186,000",796.77K,8.69%
1232 +20151006,"1,151,000","1,130,000","1,155,000","1,127,000",372.40K,3.23%
1233 +20151005,"1,115,000","1,119,000","1,131,000","1,115,000",211.52K,-0.36%
1234 +20151002,"1,119,000","1,112,000","1,133,000","1,112,000",249.06K,-1.32%
1235 +20151001,"1,134,000","1,140,000","1,145,000","1,121,000",229.67K,0.00%
1236 +20150930,"1,134,000","1,100,000","1,134,000","1,090,000",354.24K,1.98%
1237 +20150929,"1,112,000","1,112,000","1,112,000","1,112,000",0.06K,0.00%
1238 +20150925,"1,112,000","1,120,000","1,125,000","1,109,000",187.68K,-1.24%
1239 +20150924,"1,126,000","1,126,000","1,135,000","1,125,000",132.28K,-0.44%
1240 +20150923,"1,131,000","1,144,000","1,144,000","1,125,000",195.67K,-1.22%
1241 +20150922,"1,145,000","1,143,000","1,150,000","1,130,000",246.92K,-0.43%
1242 +20150921,"1,150,000","1,163,000","1,173,000","1,150,000",197.29K,-3.36%
1243 +20150920,"1,190,000","1,190,000","1,190,000","1,190,000",0.02K,0.00%
1244 +20150918,"1,190,000","1,145,000","1,192,000","1,135,000",421.31K,2.85%
1245 +20150917,"1,157,000","1,153,000","1,157,000","1,144,000",253.47K,0.43%
1246 +20150916,"1,152,000","1,121,000","1,157,000","1,121,000",338.87K,2.58%
1247 +20150915,"1,123,000","1,114,000","1,128,000","1,113,000",212.95K,-0.09%
1248 +20150914,"1,124,000","1,120,000","1,124,000","1,110,000",164.20K,0.81%
1249 +20150913,"1,115,000","1,115,000","1,115,000","1,115,000",-,0.00%
1250 +20150911,"1,115,000","1,126,000","1,133,000","1,115,000",202.74K,-1.68%
1251 +20150910,"1,134,000","1,130,000","1,139,000","1,118,000",357.11K,-1.13%
1252 +20150909,"1,147,000","1,146,000","1,147,000","1,136,000",235.76K,1.41%
1253 +20150908,"1,131,000","1,113,000","1,136,000","1,106,000",181.63K,1.71%
1254 +20150907,"1,112,000","1,129,000","1,129,000","1,105,000",191.48K,-1.51%
1255 +20150906,"1,129,000","1,129,000","1,129,000","1,129,000",0.08K,0.00%
1256 +20150904,"1,129,000","1,144,000","1,144,000","1,118,000",249.85K,0.62%
1257 +20150903,"1,122,000","1,102,000","1,123,000","1,093,000",303.77K,2.94%
1258 +20150902,"1,090,000","1,069,000","1,095,000","1,065,000",312.35K,0.46%
1259 +20150901,"1,085,000","1,089,000","1,098,000","1,081,000",237.36K,-0.37%
1260 +20150831,"1,089,000","1,071,000","1,089,000","1,052,000",407.36K,0.74%
1261 +20150830,"1,081,000","1,081,000","1,081,000","1,081,000",0.12K,0.00%
1262 +20150828,"1,081,000","1,086,000","1,086,000","1,073,000",460.24K,1.31%
1263 +20150827,"1,067,000","1,082,000","1,086,000","1,063,000",493.11K,0.00%
1264 +20150826,"1,067,000","1,068,000","1,074,000","1,050,000",553.23K,-1.11%
1265 +20150825,"1,079,000","1,079,000","1,107,000","1,067,000",390.91K,0.00%
1266 +20150824,"1,079,000","1,088,000","1,115,000","1,033,000",447.19K,-2.00%
1267 +20150823,"1,101,000","1,101,000","1,101,000","1,101,000",0.08K,0.00%
1268 +20150821,"1,101,000","1,099,000","1,128,000","1,096,000",406.15K,-3.34%
1269 +20150820,"1,139,000","1,163,000","1,171,000","1,130,000",215.68K,-1.30%
1270 +20150819,"1,154,000","1,169,000","1,176,000","1,141,000",400.27K,2.03%
1271 +20150818,"1,131,000","1,118,000","1,141,000","1,117,000",223.67K,2.45%
1272 +20150817,"1,104,000","1,140,000","1,141,000","1,104,000",226.39K,-3.16%
1273 +20150816,"1,140,000","1,140,000","1,140,000","1,140,000",0.08K,0.00%
1274 +20150813,"1,140,000","1,153,000","1,153,000","1,138,000",149.62K,-1.21%
1275 +20150812,"1,154,000","1,150,000","1,164,000","1,140,000",222.94K,-0.26%
1276 +20150811,"1,157,000","1,151,000","1,179,000","1,148,000",229.25K,1.40%
1277 +20150810,"1,141,000","1,140,000","1,142,000","1,130,000",114.28K,0.44%
1278 +20150809,"1,136,000","1,136,000","1,136,000","1,136,000",0.03K,0.00%
1279 +20150807,"1,136,000","1,120,000","1,137,000","1,115,000",255.32K,1.88%
1280 +20150806,"1,115,000","1,155,000","1,156,000","1,115,000",400.77K,-3.80%
1281 +20150805,"1,159,000","1,182,000","1,182,000","1,158,000",249.72K,-1.95%
1282 +20150804,"1,182,000","1,175,000","1,187,000","1,170,000",188.61K,0.60%
1283 +20150803,"1,175,000","1,184,000","1,184,000","1,166,000",190.50K,-0.84%
1284 +20150802,"1,185,000","1,185,000","1,185,000","1,185,000",0.03K,0.00%
1285 +20150731,"1,185,000","1,220,000","1,222,000","1,175,000",371.18K,-2.47%
1286 +20150730,"1,215,000","1,258,000","1,260,000","1,215,000",307.77K,-3.80%
1287 +20150729,"1,263,000","1,250,000","1,275,000","1,231,000",272.37K,2.68%
1288 +20150728,"1,230,000","1,224,000","1,251,000","1,219,000",252.48K,0.00%
1289 +20150727,"1,230,000","1,229,000","1,247,000","1,228,000",198.66K,0.08%
1290 +20150726,"1,229,000","1,229,000","1,229,000","1,229,000",0.05K,0.00%
1291 +20150724,"1,229,000","1,227,000","1,238,000","1,224,000",195.74K,-0.41%
1292 +20150723,"1,234,000","1,244,000","1,253,000","1,234,000",198.79K,-1.52%
1293 +20150722,"1,253,000","1,244,000","1,260,000","1,235,000",266.78K,-0.79%
1294 +20150721,"1,263,000","1,275,000","1,277,000","1,247,000",193.61K,-0.94%
1295 +20150720,"1,275,000","1,291,000","1,304,000","1,273,000",128.25K,-2.30%
1296 +20150719,"1,305,000","1,305,000","1,305,000","1,305,000",-,0.00%
1297 +20150717,"1,305,000","1,300,000","1,311,000","1,278,000",297.03K,1.79%
1298 +20150716,"1,282,000","1,223,000","1,287,000","1,223,000",219.35K,3.81%
1299 +20150715,"1,235,000","1,225,000","1,238,000","1,224,000",167.23K,0.82%
1300 +20150714,"1,225,000","1,265,000","1,270,000","1,221,000",369.13K,-3.24%
1301 +20150713,"1,266,000","1,250,000","1,272,000","1,245,000",153.34K,0.56%
1302 +20150712,"1,259,000","1,259,000","1,259,000","1,259,000",0.03K,0.00%
1303 +20150710,"1,259,000","1,257,000","1,266,000","1,248,000",174.78K,-0.47%
1304 +20150709,"1,265,000","1,230,000","1,265,000","1,226,000",274.22K,2.10%
1305 +20150708,"1,239,000","1,240,000","1,251,000","1,232,000",215.39K,-0.08%
1306 +20150707,"1,240,000","1,220,000","1,259,000","1,220,000",237.22K,0.81%
1307 +20150706,"1,230,000","1,253,000","1,260,000","1,223,000",196.76K,-3.00%
1308 +20150705,"1,268,000","1,268,000","1,268,000","1,268,000",0.06K,0.00%
1309 +20150703,"1,268,000","1,287,000","1,294,000","1,267,000",139.35K,-2.39%
1310 +20150702,"1,299,000","1,286,000","1,304,000","1,285,000",151.44K,0.31%
1311 +20150701,"1,295,000","1,268,000","1,302,000","1,259,000",161.75K,2.13%
1312 +20150630,"1,268,000","1,276,000","1,285,000","1,266,000",197.26K,-1.01%
1313 +20150629,"1,281,000","1,269,000","1,285,000","1,256,000",229.47K,0.23%
1314 +20150628,"1,278,000","1,278,000","1,278,000","1,278,000",0.02K,0.00%
1315 +20150626,"1,278,000","1,252,000","1,290,000","1,252,000",206.76K,0.71%
1316 +20150625,"1,269,000","1,290,000","1,303,000","1,269,000",202.41K,-2.53%
1317 +20150624,"1,302,000","1,300,000","1,311,000","1,291,000",195.47K,-1.44%
1318 +20150623,"1,321,000","1,309,000","1,328,000","1,291,000",201.88K,3.12%
1319 +20150622,"1,281,000","1,291,000","1,296,000","1,276,000",124.61K,1.18%
1320 +20150619,"1,266,000","1,266,000","1,278,000","1,260,000",140.65K,0.08%
1321 +20150618,"1,265,000","1,259,000","1,279,000","1,251,000",153.22K,0.88%
1322 +20150617,"1,254,000","1,250,000","1,266,000","1,240,000",188.91K,-0.08%
1323 +20150616,"1,255,000","1,270,000","1,274,000","1,245,000",255.21K,-1.18%
1324 +20150615,"1,270,000","1,255,000","1,274,000","1,255,000",124.29K,-0.55%
1325 +20150614,"1,277,000","1,277,000","1,277,000","1,277,000",0.01K,0.00%
1326 +20150612,"1,277,000","1,278,000","1,287,000","1,264,000",214.65K,1.59%
1327 +20150611,"1,257,000","1,263,000","1,274,000","1,253,000",305.13K,-0.40%
1328 +20150610,"1,262,000","1,282,000","1,294,000","1,262,000",249.47K,-1.56%
1329 +20150609,"1,282,000","1,300,000","1,310,000","1,268,000",272.13K,-2.44%
1330 +20150608,"1,314,000","1,345,000","1,347,000","1,313,000",197.28K,-2.01%
1331 +20150607,"1,341,000","1,341,000","1,341,000","1,341,000",0.08K,0.00%
1332 +20150605,"1,341,000","1,325,000","1,355,000","1,320,000",191.42K,0.30%
1333 +20150604,"1,337,000","1,315,000","1,341,000","1,305,000",387.56K,5.03%
1334 +20150603,"1,273,000","1,303,000","1,316,000","1,265,000",233.35K,-2.30%
1335 +20150602,"1,303,000","1,300,000","1,303,000","1,288,000",174.54K,0.93%
1336 +20150601,"1,291,000","1,300,000","1,301,000","1,288,000",198.52K,-1.22%
1337 +20150529,"1,307,000","1,320,000","1,321,000","1,297,000",340.16K,-0.15%
1338 +20150528,"1,309,000","1,317,000","1,321,000","1,301,000",294.50K,-0.38%
1339 +20150527,"1,314,000","1,360,000","1,366,000","1,313,000",341.53K,-3.52%
1340 +20150526,"1,362,000","1,366,000","1,369,000","1,336,000",193.58K,0.89%
1341 +20150525,"1,350,000","1,350,000","1,350,000","1,350,000",0.04K,0.00%
1342 +20150522,"1,350,000","1,353,000","1,353,000","1,335,000",163.83K,-0.15%
1343 +20150521,"1,352,000","1,371,000","1,372,000","1,344,000",144.04K,-1.02%
1344 +20150520,"1,366,000","1,349,000","1,370,000","1,341,000",205.35K,2.09%
1345 +20150519,"1,338,000","1,321,000","1,355,000","1,307,000",173.56K,1.36%
1346 +20150518,"1,320,000","1,335,000","1,335,000","1,309,000",181.39K,-0.45%
1347 +20150517,"1,326,000","1,326,000","1,326,000","1,326,000",0.01K,0.00%
1348 +20150515,"1,326,000","1,355,000","1,356,000","1,321,000",178.29K,-1.04%
1349 +20150514,"1,340,000","1,341,000","1,343,000","1,328,000",169.26K,0.53%
1350 +20150513,"1,333,000","1,349,000","1,349,000","1,326,000",197.22K,0.15%
1351 +20150512,"1,331,000","1,345,000","1,353,000","1,324,000",212.22K,-0.37%
1352 +20150511,"1,336,000","1,360,000","1,360,000","1,336,000",192.56K,-0.15%
1353 +20150510,"1,338,000","1,338,000","1,338,000","1,338,000",-,0.00%
1354 +20150508,"1,338,000","1,366,000","1,370,000","1,338,000",189.23K,-2.34%
1355 +20150507,"1,370,000","1,362,000","1,375,000","1,354,000",192.17K,0.59%
1356 +20150506,"1,362,000","1,390,000","1,391,000","1,356,000",264.93K,-2.71%
1357 +20150505,"1,400,000","1,400,000","1,400,000","1,400,000",0.01K,0.00%
1358 +20150504,"1,400,000","1,407,000","1,423,000","1,397,000",161.38K,-0.71%
1359 +20150503,"1,410,000","1,410,000","1,410,000","1,410,000",0.04K,0.00%
1360 +20150430,"1,410,000","1,385,000","1,418,000","1,379,000",353.94K,1.81%
1361 +20150429,"1,385,000","1,367,000","1,394,000","1,364,000",217.33K,1.39%
1362 +20150428,"1,366,000","1,390,000","1,400,000","1,359,000",313.53K,-2.08%
1363 +20150427,"1,395,000","1,410,000","1,411,000","1,375,000",330.09K,-1.06%
1364 +20150424,"1,410,000","1,449,000","1,455,000","1,400,000",380.85K,-2.83%
1365 +20150423,"1,451,000","1,470,000","1,470,000","1,440,000",184.72K,-0.68%
1366 +20150422,"1,461,000","1,444,000","1,473,000","1,436,000",252.75K,1.18%
1367 +20150421,"1,444,000","1,421,000","1,445,000","1,420,000",236.29K,0.98%
1368 +20150420,"1,430,000","1,446,000","1,448,000","1,423,000",245.62K,-1.38%
1369 +20150419,"1,450,000","1,450,000","1,450,000","1,450,000",0.01K,0.00%
1370 +20150417,"1,450,000","1,477,000","1,478,000","1,447,000",199.85K,-1.83%
1371 +20150416,"1,477,000","1,462,000","1,477,000","1,447,000",209.98K,2.14%
1372 +20150415,"1,446,000","1,460,000","1,470,000","1,427,000",270.86K,-1.90%
1373 +20150414,"1,474,000","1,485,000","1,485,000","1,465,000",188.50K,-0.34%
1374 +20150413,"1,479,000","1,479,000","1,491,000","1,468,000",203.74K,-0.74%
1375 +20150412,"1,490,000","1,490,000","1,490,000","1,490,000",0.08K,0.00%
1376 +20150410,"1,490,000","1,480,000","1,494,000","1,478,000",188.64K,0.47%
1377 +20150409,"1,483,000","1,470,000","1,489,000","1,470,000",169.62K,0.27%
1378 +20150408,"1,479,000","1,470,000","1,485,000","1,460,000",153.76K,1.16%
1379 +20150407,"1,462,000","1,478,000","1,485,000","1,462,000",186.32K,-0.54%
1380 +20150406,"1,470,000","1,443,000","1,490,000","1,435,000",209.99K,2.51%
1381 +20150405,"1,434,000","1,434,000","1,434,000","1,434,000",0.06K,0.00%
1382 +20150403,"1,434,000","1,434,000","1,440,000","1,420,000",122.24K,0.00%
1383 +20150402,"1,434,000","1,434,000","1,440,000","1,423,000",134.48K,0.77%
1384 +20150401,"1,423,000","1,437,000","1,437,000","1,420,000",144.24K,-1.25%
1385 +20150331,"1,441,000","1,449,000","1,452,000","1,430,000",196.20K,0.91%
1386 +20150330,"1,428,000","1,425,000","1,434,000","1,412,000",146.06K,0.49%
1387 +20150327,"1,421,000","1,415,000","1,448,000","1,415,000",310.93K,0.00%
1388 +20150326,"1,421,000","1,450,000","1,456,000","1,421,000",411.78K,-4.31%
1389 +20150325,"1,485,000","1,487,000","1,488,000","1,472,000",180.31K,0.61%
1390 +20150324,"1,476,000","1,455,000","1,478,000","1,455,000",171.29K,0.61%
1391 +20150323,"1,467,000","1,462,000","1,474,000","1,458,000",154.98K,0.20%
1392 +20150322,"1,464,000","1,464,000","1,464,000","1,464,000",-,0.00%
1393 +20150320,"1,464,000","1,475,000","1,480,000","1,460,000",242.85K,-0.41%
1394 +20150319,"1,470,000","1,510,000","1,510,000","1,470,000",248.25K,-2.20%
1395 +20150318,"1,503,000","1,496,000","1,506,000","1,486,000",243.94K,0.40%
1396 +20150317,"1,497,000","1,470,000","1,500,000","1,460,000",272.16K,1.84%
1397 +20150316,"1,470,000","1,458,000","1,487,000","1,455,000",179.39K,0.89%
1398 +20150315,"1,457,000","1,457,000","1,457,000","1,457,000",0.05K,0.00%
1399 +20150313,"1,457,000","1,461,000","1,479,000","1,455,000",181.90K,0.69%
1400 +20150312,"1,447,000","1,460,000","1,473,000","1,447,000",366.05K,-1.83%
1401 +20150311,"1,474,000","1,419,000","1,479,000","1,418,000",407.47K,3.73%
1402 +20150310,"1,421,000","1,434,000","1,443,000","1,420,000",174.95K,0.07%
1403 +20150309,"1,420,000","1,440,000","1,440,000","1,420,000",141.99K,-1.53%
1404 +20150308,"1,442,000","1,442,000","1,442,000","1,442,000",0.03K,0.00%
1405 +20150306,"1,442,000","1,414,000","1,449,000","1,406,000",234.49K,1.41%
1406 +20150305,"1,422,000","1,439,000","1,443,000","1,417,000",192.13K,-1.04%
1407 +20150304,"1,437,000","1,411,000","1,440,000","1,410,000",231.21K,1.34%
1408 +20150303,"1,418,000","1,435,000","1,437,000","1,406,000",251.32K,-0.35%
1409 +20150302,"1,423,000","1,375,000","1,423,000","1,367,000",425.60K,4.86%
1410 +20150227,"1,357,000","1,375,000","1,376,000","1,357,000",252.42K,-1.31%
1411 +20150226,"1,375,000","1,379,000","1,380,000","1,368,000",146.07K,-0.29%
1412 +20150225,"1,379,000","1,380,000","1,385,000","1,373,000",161.48K,0.88%
1413 +20150224,"1,367,000","1,385,000","1,389,000","1,364,000",190.77K,0.00%
1414 +20150223,"1,367,000","1,378,000","1,390,000","1,366,000",303.16K,-0.73%
1415 +20150222,"1,377,000","1,377,000","1,377,000","1,377,000",0.01K,0.00%
1416 +20150217,"1,377,000","1,374,000","1,377,000","1,364,000",109.61K,0.22%
1417 +20150216,"1,374,000","1,368,000","1,374,000","1,361,000",122.47K,0.96%
1418 +20150215,"1,361,000","1,361,000","1,361,000","1,361,000",0.01K,0.00%
1419 +20150213,"1,361,000","1,360,000","1,361,000","1,345,000",130.03K,1.26%
This diff could not be displayed because it is too large.
No preview for this file type