Showing
4 changed files
with
1588 additions
and
0 deletions
코드_데이터/collaborative_filtering_aftask_v1.py
0 → 100644
1 | +#!/usr/bin/env python | ||
2 | +# coding: utf-8 | ||
3 | + | ||
4 | +# In[ ]: | ||
5 | + | ||
6 | + | ||
7 | +import pandas as pd | ||
8 | +import numpy as np | ||
9 | +import tensorflow as tf | ||
10 | +from tensorflow import keras | ||
11 | +from tensorflow.keras import layers | ||
12 | +from pathlib import Path | ||
13 | +import matplotlib.pyplot as plt | ||
14 | + | ||
15 | +dataset_file = open("dataset.csv",'r') | ||
16 | +df = pd.read_csv(dataset_file) | ||
17 | + | ||
18 | +user_ids = df["userid"].unique().tolist() | ||
19 | +user2user_encoded = {x: i for i, x in enumerate(user_ids)} | ||
20 | +userencoded2user = {i: x for i, x in enumerate(user_ids)} | ||
21 | +contents_ids = df["contentsid"].unique().tolist() | ||
22 | +contents2contents_encoded = {x: i for i, x in enumerate(contents_ids)} | ||
23 | +contents_encoded2contents = {i: x for i, x in enumerate(contents_ids)} | ||
24 | +df["user"] = df["userid"].map(user2user_encoded) | ||
25 | +df["contents"] = df["contentsid"].map(contents2contents_encoded) | ||
26 | + | ||
27 | +num_users = len(user2user_encoded) | ||
28 | +num_contents = len(contents_encoded2contents) | ||
29 | +df["rating"] = df["rating"].values.astype(np.float32) | ||
30 | +# min and max ratings will be used to normalize the ratings later | ||
31 | +min_rating = 0.5 | ||
32 | +max_rating = 5.0 | ||
33 | + | ||
34 | +print( | ||
35 | + "Number of users: {}, Number of Contents: {}, Min rating: {}, Max rating: {}".format( | ||
36 | + num_users, num_contents, min_rating, max_rating | ||
37 | + ) | ||
38 | +) | ||
39 | + | ||
40 | +df = df.sample(frac=1, random_state=42) | ||
41 | +x = df[["user", "contents"]].values | ||
42 | +# Normalize the targets between 0 and 1. Makes it easy to train. | ||
43 | +y = df["rating"].apply(lambda x: (x - min_rating) / (max_rating - min_rating)).values | ||
44 | +# Assuming training on 90% of the data and validating on 10%. | ||
45 | +train_indices = int(0.9 * df.shape[0]) | ||
46 | +x_train, x_val, y_train, y_val = ( | ||
47 | + x[:train_indices], | ||
48 | + x[train_indices:], | ||
49 | + y[:train_indices], | ||
50 | + y[train_indices:], | ||
51 | +) | ||
52 | + | ||
53 | +EMBEDDING_SIZE = 50 | ||
54 | + | ||
55 | + | ||
56 | +class RecommenderNet(keras.Model): | ||
57 | + def __init__(self, num_users, num_contents, embedding_size, **kwargs): | ||
58 | + super(RecommenderNet, self).__init__(**kwargs) | ||
59 | + self.num_users = num_users | ||
60 | + self.num_contents = num_contents | ||
61 | + self.embedding_size = embedding_size | ||
62 | + self.user_embedding = layers.Embedding( | ||
63 | + num_users, | ||
64 | + embedding_size, | ||
65 | + embeddings_initializer="he_normal", | ||
66 | + embeddings_regularizer=keras.regularizers.l2(1e-6), | ||
67 | + ) | ||
68 | + self.user_bias = layers.Embedding(num_users, 1) | ||
69 | + self.contents_embedding = layers.Embedding( | ||
70 | + num_contents, | ||
71 | + embedding_size, | ||
72 | + embeddings_initializer="he_normal", | ||
73 | + embeddings_regularizer=keras.regularizers.l2(1e-6), | ||
74 | + ) | ||
75 | + self.contents_bias = layers.Embedding(num_contents, 1) | ||
76 | + | ||
77 | + def call(self, inputs): | ||
78 | + user_vector = self.user_embedding(inputs[:, 0]) | ||
79 | + user_bias = self.user_bias(inputs[:, 0]) | ||
80 | + contents_vector = self.contents_embedding(inputs[:, 1]) | ||
81 | + contents_bias = self.contents_bias(inputs[:, 1]) | ||
82 | + dot_user_contents = tf.tensordot(user_vector, contents_vector, 2) | ||
83 | + # Add all the components (including bias) | ||
84 | + x = dot_user_contents + user_bias + contents_bias | ||
85 | + # The sigmoid activation forces the rating to between 0 and 1 | ||
86 | + return tf.nn.sigmoid(x) | ||
87 | + | ||
88 | + | ||
89 | +model = RecommenderNet(num_users, num_contents, EMBEDDING_SIZE) | ||
90 | +model.compile( | ||
91 | + optimizer='sgd', | ||
92 | + loss='mse', | ||
93 | + metrics=[tf.keras.metrics.MeanSquaredError()]) | ||
94 | + | ||
95 | +history = model.fit( | ||
96 | + x=x_train, | ||
97 | + y=y_train, | ||
98 | + batch_size=2, | ||
99 | + epochs=20, | ||
100 | + verbose=1, | ||
101 | + validation_data=(x_val, y_val), | ||
102 | +) | ||
103 | + | ||
104 | +plt.plot(history.history["loss"]) | ||
105 | +plt.plot(history.history["val_loss"]) | ||
106 | +plt.title("model loss") | ||
107 | +plt.ylabel("loss") | ||
108 | +plt.xlabel("epoch") | ||
109 | +plt.legend(["train", "test"], loc="upper left") | ||
110 | +plt.show() | ||
111 | + | ||
112 | +test_file = open("dataset_test.csv",'r') | ||
113 | +tf = pd.read_csv(test_file) | ||
114 | + | ||
115 | +user_ids = tf["userid"].unique().tolist() | ||
116 | +user2user_encoded = {x: i for i, x in enumerate(user_ids)} | ||
117 | +userencoded2user = {i: x for i, x in enumerate(user_ids)} | ||
118 | +contents_ids = tf["contentsid"].unique().tolist() | ||
119 | +contents2contents_encoded = {x: i for i, x in enumerate(contents_ids)} | ||
120 | +contents_encoded2contents = {i: x for i, x in enumerate(contents_ids)} | ||
121 | +tf["user"] = tf["userid"].map(user2user_encoded) | ||
122 | +tf["contents"] = tf["contentsid"].map(contents2contents_encoded) | ||
123 | +tf["rating"] = tf["rating"].values.astype(np.float32) | ||
124 | + | ||
125 | +tf = tf.sample(frac=1, random_state=42) | ||
126 | +x = tf[["user", "contents"]].values | ||
127 | +y = tf["rating"].apply(lambda x: (x - min_rating) / (max_rating - min_rating)).values | ||
128 | + | ||
129 | +x_test, y_test = (x, y) | ||
130 | +result = model.evaluate(x_test, y_test) | ||
131 | +print(result) | ||
132 | + |
코드_데이터/collaborative_filtering_aftask_v2.py
0 → 100644
1 | +#!/usr/bin/env python | ||
2 | +# coding: utf-8 | ||
3 | + | ||
4 | +# In[ ]: | ||
5 | + | ||
6 | + | ||
7 | +import pandas as pd | ||
8 | +import numpy as np | ||
9 | +import tensorflow as tf | ||
10 | +from tensorflow import keras | ||
11 | +from tensorflow.keras import layers | ||
12 | +from pathlib import Path | ||
13 | +import matplotlib.pyplot as plt | ||
14 | + | ||
15 | +df_x = pd.read_csv("x_train.csv") | ||
16 | +df_y = pd.read_csv("y_train.csv") | ||
17 | +df = pd.concat([df_x, df_y], axis=1) | ||
18 | + | ||
19 | +user_ids = df["userid"].unique().tolist() | ||
20 | +user2user_encoded = {x: i for i, x in enumerate(user_ids)} | ||
21 | +userencoded2user = {i: x for i, x in enumerate(user_ids)} | ||
22 | +task_ids = df["taskid"].unique().tolist() | ||
23 | +task2task_encoded = {x: i for i, x in enumerate(task_ids)} | ||
24 | +task_encoded2task = {i: x for i, x in enumerate(task_ids)} | ||
25 | +df["user"] = df["userid"].map(user2user_encoded) | ||
26 | +df["task"] = df["taskid"].map(task2task_encoded) | ||
27 | + | ||
28 | +num_users = len(user2user_encoded) | ||
29 | +num_task = len(task_encoded2task) | ||
30 | +df["rating"] = df["rating"].values.astype(np.float32) | ||
31 | +# min and max ratings will be used to normalize the ratings later | ||
32 | +MIN_RATING = 0.5 | ||
33 | +MAX_RATING = 5.0 | ||
34 | + | ||
35 | +print( | ||
36 | + "Number of users: {}, Number of task: {}, Min rating: {}, Max rating: {}".format( | ||
37 | + num_users, num_task, MIN_RATING, MAX_RATING | ||
38 | + ) | ||
39 | +) | ||
40 | + | ||
41 | +df = df.sample(frac=1, random_state=42) | ||
42 | +x = df[["user", "task"]].values | ||
43 | +# Normalize the targets between 0 and 1. Makes it easy to train. | ||
44 | +y = df["rating"].apply(lambda x: (x - MIN_RATING) / (MAX_RATING - MIN_RATING)).values | ||
45 | +# Assuming training on 90% of the data and validating on 10%. | ||
46 | +train_indices = int(0.9 * df.shape[0]) | ||
47 | +x_train, x_val, y_train, y_val = ( | ||
48 | + x[:train_indices], | ||
49 | + x[train_indices:], | ||
50 | + y[:train_indices], | ||
51 | + y[train_indices:], | ||
52 | +) | ||
53 | + | ||
54 | +EMBEDDING_SIZE = 128 | ||
55 | + | ||
56 | +class RecommenderNet(keras.Model): | ||
57 | + def __init__(self, num_users, num_task, embedding_size, **kwargs): | ||
58 | + super(RecommenderNet, self).__init__(**kwargs) | ||
59 | + self.num_users = num_users | ||
60 | + self.num_task = num_task | ||
61 | + self.embedding_size = embedding_size | ||
62 | + self.user_embedding = layers.Embedding( | ||
63 | + num_users, | ||
64 | + embedding_size, | ||
65 | + embeddings_initializer="he_normal", | ||
66 | + embeddings_regularizer=keras.regularizers.l2(1e-6), | ||
67 | + ) | ||
68 | + self.user_bias = layers.Embedding(num_users, 1) | ||
69 | + self.task_embedding = layers.Embedding( | ||
70 | + num_task, | ||
71 | + embedding_size, | ||
72 | + embeddings_initializer="he_normal", | ||
73 | + embeddings_regularizer=keras.regularizers.l2(1e-6), | ||
74 | + ) | ||
75 | + self.task_bias = layers.Embedding(num_task, 1) | ||
76 | + | ||
77 | + def call(self, inputs): | ||
78 | + user_vector = self.user_embedding(inputs[:, 0]) | ||
79 | + user_bias = self.user_bias(inputs[:, 0]) | ||
80 | + task_vector = self.task_embedding(inputs[:, 1]) | ||
81 | + task_bias = self.task_bias(inputs[:, 1]) | ||
82 | + dot_user_task = tf.tensordot(user_vector, task_vector, 2) | ||
83 | + # Add all the components (including bias) | ||
84 | + x = dot_user_task + user_bias + task_bias | ||
85 | + # The sigmoid activation forces the rating to between 0 and 1 | ||
86 | + return tf.nn.sigmoid(x) | ||
87 | + | ||
88 | + | ||
89 | +model = RecommenderNet(num_users, num_task, EMBEDDING_SIZE) | ||
90 | +model.compile( | ||
91 | + optimizer='adam', | ||
92 | + loss='mse', | ||
93 | + metrics=[tf.keras.metrics.MeanSquaredError()]) | ||
94 | + | ||
95 | +history = model.fit( | ||
96 | + x=x_train, | ||
97 | + y=y_train, | ||
98 | + batch_size=8, | ||
99 | + epochs=300, | ||
100 | + verbose=1, | ||
101 | + validation_data=(x_val, y_val), | ||
102 | +) | ||
103 | + | ||
104 | +df_x_test = pd.read_csv('x_test.csv') | ||
105 | + | ||
106 | +df_x_test["user"] = df_x_test["userid"].map(user2user_encoded) | ||
107 | +df_x_test["task"] = df_x_test["taskid"].map(task2task_encoded) | ||
108 | + | ||
109 | +x_test = df_x_test[["user", "task"]].values | ||
110 | + | ||
111 | +y_pred = model.predict(x_test) | ||
112 | + | ||
113 | +df_y_pred = pd.DataFrame(y_pred, columns=['rating']) | ||
114 | +df_y_pred = df_y_pred["rating"].apply(lambda x: (x * (MAX_RATING - MIN_RATING) + MIN_RATING )) | ||
115 | +df_y_pred.to_csv('y_pred.csv', sep=',', columns = ['rating'], index = False) | ||
116 | + | ||
117 | +#evaluate | ||
118 | +import os | ||
119 | +import sys | ||
120 | +import pandas as pd | ||
121 | +from sklearn.metrics import mean_squared_error | ||
122 | + | ||
123 | +gt = pd.read_csv('y_test.csv', header=0) | ||
124 | +pr = pd.read_csv('y_pred.csv', header=0) | ||
125 | + | ||
126 | +gt = gt.to_numpy().astype(float).reshape(-1) | ||
127 | +pr = pr.to_numpy().astype(float).reshape(-1) | ||
128 | + | ||
129 | +score = mean_squared_error(gt, pr, squared = False) | ||
130 | +print("score:", score) | ||
131 | + |
코드_데이터/dataset.csv
0 → 100644
1 | +userid,contentsid,rating | ||
2 | +1,T000036,5 | ||
3 | +1,T000040,0.5 | ||
4 | +1,T000041,0.5 | ||
5 | +1,T000042,0.5 | ||
6 | +1,T000044,5 | ||
7 | +1,T000045,0.5 | ||
8 | +1,T000046,0.5 | ||
9 | +1,T000047,0.5 | ||
10 | +1,T000048,5 | ||
11 | +1,T000049,0.5 | ||
12 | +1,T000050,0.5 | ||
13 | +1,T000051,0.5 | ||
14 | +1,T000052,0.5 | ||
15 | +1,T000053,0.5 | ||
16 | +1,T000054,0.5 | ||
17 | +1,T000056,0.5 | ||
18 | +1,T000057,5 | ||
19 | +1,T000058,5 | ||
20 | +1,T000059,0.5 | ||
21 | +1,T000060,5 | ||
22 | +1,T000061,0.5 | ||
23 | +1,T000062,0.5 | ||
24 | +1,T000078,0.5 | ||
25 | +1,T000077,0.5 | ||
26 | +1,T000076,0.5 | ||
27 | +1,T000075,0.5 | ||
28 | +1,T000074,0.5 | ||
29 | +1,T000073,0.5 | ||
30 | +1,T000071,0.5 | ||
31 | +1,T000070,0.5 | ||
32 | +1,T000069,0.5 | ||
33 | +1,T000063,0.5 | ||
34 | +1,T000065,5 | ||
35 | +1,T000066,5 | ||
36 | +1,T000067,0.5 | ||
37 | +1,T000068,0.5 | ||
38 | +1,T001631,0.5 | ||
39 | +1,T001629,0.5 | ||
40 | +1,T001628,0.5 | ||
41 | +1,T000312,0.5 | ||
42 | +1,T000311,0.5 | ||
43 | +1,T000310,0.5 | ||
44 | +1,T000309,0.5 | ||
45 | +1,T000307,0.5 | ||
46 | +1,T000306,0.5 | ||
47 | +1,T000305,0.5 | ||
48 | +1,T000304,0.5 | ||
49 | +1,T000303,0.5 | ||
50 | +1,T000302,0.5 | ||
51 | +1,T000301,0.5 | ||
52 | +1,T000299,0.5 | ||
53 | +1,T000298,0.5 | ||
54 | +1,T000297,0.5 | ||
55 | +1,T000295,0.5 | ||
56 | +1,T000294,0.5 | ||
57 | +1,T000292,0.5 | ||
58 | +1,T000291,0.5 | ||
59 | +1,T000289,0.5 | ||
60 | +1,T000288,0.5 | ||
61 | +1,T000286,0.5 | ||
62 | +1,T000212,0.5 | ||
63 | +1,T001617,0.5 | ||
64 | +1,T001615,0.5 | ||
65 | +1,T001614,0.5 | ||
66 | +1,T001612,0.5 | ||
67 | +1,T001611,0.5 | ||
68 | +1,T001610,0.5 | ||
69 | +1,T001609,0.5 | ||
70 | +1,T001608,0.5 | ||
71 | +1,T001607,0.5 | ||
72 | +1,T001606,0.5 | ||
73 | +1,T001604,0.5 | ||
74 | +1,T001603,0.5 | ||
75 | +1,T001602,0.5 | ||
76 | +1,T001600,0.5 | ||
77 | +1,T000287,0.5 | ||
78 | +1,T001921,0.5 | ||
79 | +1,T001920,5 | ||
80 | +1,T001918,0.5 | ||
81 | +1,T001917,0.5 | ||
82 | +1,T001916,0.5 | ||
83 | +1,T001915,0.5 | ||
84 | +1,T001914,5 | ||
85 | +1,T001911,0.5 | ||
86 | +1,T001942,0.5 | ||
87 | +1,T001947,0.5 | ||
88 | +1,T001943,0.5 | ||
89 | +1,T001944,0.5 | ||
90 | +1,T001945,0.5 | ||
91 | +1,T001956,0.5 | ||
92 | +2,T000036,5 | ||
93 | +2,T000040,0.5 | ||
94 | +2,T000041,0.5 | ||
95 | +2,T000042,0.5 | ||
96 | +2,T000043,0.5 | ||
97 | +2,T000044,5 | ||
98 | +2,T000045,5 | ||
99 | +2,T000047,0.5 | ||
100 | +2,T000048,0.5 | ||
101 | +2,T000049,0.5 | ||
102 | +2,T000050,0.5 | ||
103 | +2,T000052,0.5 | ||
104 | +2,T000053,5 | ||
105 | +2,T000054,0.5 | ||
106 | +2,T000055,0.5 | ||
107 | +2,T000056,5 | ||
108 | +2,T000057,0.5 | ||
109 | +2,T000058,0.5 | ||
110 | +2,T000059,0.5 | ||
111 | +2,T000060,0.5 | ||
112 | +2,T000061,0.5 | ||
113 | +2,T000062,0.5 | ||
114 | +2,T000078,0.5 | ||
115 | +2,T000077,0.5 | ||
116 | +2,T000076,0.5 | ||
117 | +2,T000075,0.5 | ||
118 | +2,T000073,0.5 | ||
119 | +2,T000072,0.5 | ||
120 | +2,T000071,0.5 | ||
121 | +2,T000070,0.5 | ||
122 | +2,T000069,0.5 | ||
123 | +2,T000063,5 | ||
124 | +2,T000064,5 | ||
125 | +2,T000065,0.5 | ||
126 | +2,T000066,0.5 | ||
127 | +2,T000067,0.5 | ||
128 | +2,T000068,0.5 | ||
129 | +2,T001631,0.5 | ||
130 | +2,T001630,0.5 | ||
131 | +2,T001629,0.5 | ||
132 | +2,T001628,0.5 | ||
133 | +2,T000312,0.5 | ||
134 | +2,T000311,0.5 | ||
135 | +2,T000310,0.5 | ||
136 | +2,T000309,0.5 | ||
137 | +2,T000306,0.5 | ||
138 | +2,T000305,0.5 | ||
139 | +2,T000304,0.5 | ||
140 | +2,T000303,5 | ||
141 | +2,T000302,0.5 | ||
142 | +2,T000301,0.5 | ||
143 | +2,T000298,0.5 | ||
144 | +2,T000297,0.5 | ||
145 | +2,T000295,5 | ||
146 | +2,T000294,0.5 | ||
147 | +2,T000293,0.5 | ||
148 | +2,T000292,0.5 | ||
149 | +2,T000289,0.5 | ||
150 | +2,T000288,0.5 | ||
151 | +2,T000286,0.5 | ||
152 | +2,T000212,5 | ||
153 | +2,T001617,0.5 | ||
154 | +2,T001616,0.5 | ||
155 | +2,T001615,0.5 | ||
156 | +2,T001614,0.5 | ||
157 | +2,T001612,0.5 | ||
158 | +2,T001611,0.5 | ||
159 | +2,T001610,0.5 | ||
160 | +2,T001609,0.5 | ||
161 | +2,T001608,0.5 | ||
162 | +2,T001606,0.5 | ||
163 | +2,T001604,0.5 | ||
164 | +2,T001603,0.5 | ||
165 | +2,T001602,5 | ||
166 | +2,T001601,0.5 | ||
167 | +2,T001600,5 | ||
168 | +2,T000287,0.5 | ||
169 | +2,T001921,0.5 | ||
170 | +2,T001919,0.5 | ||
171 | +2,T001918,0.5 | ||
172 | +2,T001917,0.5 | ||
173 | +2,T001915,0.5 | ||
174 | +2,T001914,0.5 | ||
175 | +2,T001911,5 | ||
176 | +2,T001942,0.5 | ||
177 | +2,T001947,0.5 | ||
178 | +2,T001944,5 | ||
179 | +2,T001945,0.5 | ||
180 | +2,T001946,5 | ||
181 | +2,T001956,0.5 | ||
182 | +3,T000040,0.5 | ||
183 | +3,T000041,5 | ||
184 | +3,T000042,0.5 | ||
185 | +3,T000043,5 | ||
186 | +3,T000044,0.5 | ||
187 | +3,T000045,0.5 | ||
188 | +3,T000046,5 | ||
189 | +3,T000047,0.5 | ||
190 | +3,T000048,5 | ||
191 | +3,T000050,5 | ||
192 | +3,T000051,0.5 | ||
193 | +3,T000052,0.5 | ||
194 | +3,T000054,5 | ||
195 | +3,T000055,0.5 | ||
196 | +3,T000056,0.5 | ||
197 | +3,T000057,0.5 | ||
198 | +3,T000058,0.5 | ||
199 | +3,T000059,5 | ||
200 | +3,T000060,0.5 | ||
201 | +3,T000062,0.5 | ||
202 | +3,T000078,0.5 | ||
203 | +3,T000077,0.5 | ||
204 | +3,T000076,0.5 | ||
205 | +3,T000075,0.5 | ||
206 | +3,T000074,0.5 | ||
207 | +3,T000072,0.5 | ||
208 | +3,T000071,0.5 | ||
209 | +3,T000070,0.5 | ||
210 | +3,T000069,0.5 | ||
211 | +3,T000063,0.5 | ||
212 | +3,T000064,0.5 | ||
213 | +3,T000065,0.5 | ||
214 | +3,T000066,0.5 | ||
215 | +3,T000067,5 | ||
216 | +3,T000068,0.5 | ||
217 | +3,T001631,5 | ||
218 | +3,T001630,5 | ||
219 | +3,T001629,5 | ||
220 | +3,T000312,5 | ||
221 | +3,T000311,0.5 | ||
222 | +3,T000310,0.5 | ||
223 | +3,T000309,5 | ||
224 | +3,T000308,0.5 | ||
225 | +3,T000307,0.5 | ||
226 | +3,T000306,0.5 | ||
227 | +3,T000305,0.5 | ||
228 | +3,T000304,0.5 | ||
229 | +3,T000303,0.5 | ||
230 | +3,T000301,0.5 | ||
231 | +3,T000299,0.5 | ||
232 | +3,T000298,0.5 | ||
233 | +3,T000297,0.5 | ||
234 | +3,T000295,0.5 | ||
235 | +3,T000294,0.5 | ||
236 | +3,T000293,0.5 | ||
237 | +3,T000292,0.5 | ||
238 | +3,T000291,0.5 | ||
239 | +3,T000289,0.5 | ||
240 | +3,T000288,0.5 | ||
241 | +3,T000286,5 | ||
242 | +3,T001617,0.5 | ||
243 | +3,T001615,0.5 | ||
244 | +3,T001614,0.5 | ||
245 | +3,T001613,0.5 | ||
246 | +3,T001612,5 | ||
247 | +3,T001611,0.5 | ||
248 | +3,T001610,5 | ||
249 | +3,T001609,5 | ||
250 | +3,T001608,0.5 | ||
251 | +3,T001607,0.5 | ||
252 | +3,T001604,5 | ||
253 | +3,T001603,0.5 | ||
254 | +3,T001602,0.5 | ||
255 | +3,T001601,5 | ||
256 | +3,T001600,0.5 | ||
257 | +3,T000287,0.5 | ||
258 | +3,T001921,0.5 | ||
259 | +3,T001919,0.5 | ||
260 | +3,T001918,5 | ||
261 | +3,T001917,0.5 | ||
262 | +3,T001916,5 | ||
263 | +3,T001914,0.5 | ||
264 | +3,T001911,0.5 | ||
265 | +3,T001942,0.5 | ||
266 | +3,T001947,0.5 | ||
267 | +3,T001943,0.5 | ||
268 | +3,T001944,5 | ||
269 | +3,T001945,0.5 | ||
270 | +3,T001946,5 | ||
271 | +3,T001956,0.5 | ||
272 | +4,T001942,0.5 | ||
273 | +4,T001943,0.5 | ||
274 | +4,T001944,0.5 | ||
275 | +4,T001945,0.5 | ||
276 | +4,T001946,0.5 | ||
277 | +4,T001956,0.5 | ||
278 | +4,T001920,5 | ||
279 | +4,T001919,5 | ||
280 | +4,T001918,5 | ||
281 | +4,T001917,0.5 | ||
282 | +4,T001916,0.5 | ||
283 | +4,T001915,0.5 | ||
284 | +4,T001914,0.5 | ||
285 | +4,T001911,0.5 | ||
286 | +4,T001616,0.5 | ||
287 | +4,T001615,0.5 | ||
288 | +4,T001614,0.5 | ||
289 | +4,T001613,0.5 | ||
290 | +4,T001612,0.5 | ||
291 | +4,T001611,0.5 | ||
292 | +4,T001610,0.5 | ||
293 | +4,T001609,0.5 | ||
294 | +4,T001608,0.5 | ||
295 | +4,T001607,0.5 | ||
296 | +4,T001604,0.5 | ||
297 | +4,T001603,0.5 | ||
298 | +4,T001602,0.5 | ||
299 | +4,T001601,0.5 | ||
300 | +4,T001600,0.5 | ||
301 | +4,T000287,0.5 | ||
302 | +4,T000036,0.5 | ||
303 | +4,T000041,0.5 | ||
304 | +4,T000042,0.5 | ||
305 | +4,T000043,0.5 | ||
306 | +4,T000044,0.5 | ||
307 | +4,T000046,0.5 | ||
308 | +4,T000047,0.5 | ||
309 | +4,T000048,0.5 | ||
310 | +4,T000049,5 | ||
311 | +4,T000050,0.5 | ||
312 | +4,T000051,0.5 | ||
313 | +4,T000052,0.5 | ||
314 | +4,T000053,0.5 | ||
315 | +4,T000054,0.5 | ||
316 | +4,T000055,0.5 | ||
317 | +4,T000056,0.5 | ||
318 | +4,T000057,0.5 | ||
319 | +4,T000058,0.5 | ||
320 | +4,T000059,0.5 | ||
321 | +4,T000061,5 | ||
322 | +4,T000062,5 | ||
323 | +4,T000078,0.5 | ||
324 | +4,T000076,0.5 | ||
325 | +4,T000075,0.5 | ||
326 | +4,T000074,0.5 | ||
327 | +4,T000073,0.5 | ||
328 | +4,T000072,0.5 | ||
329 | +4,T000071,0.5 | ||
330 | +4,T000070,0.5 | ||
331 | +4,T000069,0.5 | ||
332 | +4,T000063,0.5 | ||
333 | +4,T000064,0.5 | ||
334 | +4,T000065,0.5 | ||
335 | +4,T000066,0.5 | ||
336 | +4,T000067,0.5 | ||
337 | +4,T001631,0.5 | ||
338 | +4,T001630,0.5 | ||
339 | +4,T001629,0.5 | ||
340 | +4,T001628,0.5 | ||
341 | +4,T000312,0.5 | ||
342 | +4,T000311,0.5 | ||
343 | +4,T000310,0.5 | ||
344 | +4,T000309,0.5 | ||
345 | +4,T000308,0.5 | ||
346 | +4,T000307,0.5 | ||
347 | +4,T000306,0.5 | ||
348 | +4,T000305,0.5 | ||
349 | +4,T000304,0.5 | ||
350 | +4,T000303,0.5 | ||
351 | +4,T000301,0.5 | ||
352 | +4,T000299,0.5 | ||
353 | +4,T000298,0.5 | ||
354 | +4,T000297,0.5 | ||
355 | +4,T000295,0.5 | ||
356 | +4,T000294,0.5 | ||
357 | +4,T000292,0.5 | ||
358 | +4,T000291,0.5 | ||
359 | +4,T000286,0.5 | ||
360 | +4,T000212,0.5 | ||
361 | +5,T001942,0.5 | ||
362 | +5,T001947,5 | ||
363 | +5,T001943,5 | ||
364 | +5,T001944,0.5 | ||
365 | +5,T001945,5 | ||
366 | +5,T001946,5 | ||
367 | +5,T001921,0.5 | ||
368 | +5,T001920,5 | ||
369 | +5,T001919,5 | ||
370 | +5,T001918,5 | ||
371 | +5,T001917,0.5 | ||
372 | +5,T001916,0.5 | ||
373 | +5,T001914,5 | ||
374 | +5,T001911,5 | ||
375 | +5,T001617,5 | ||
376 | +5,T001616,0.5 | ||
377 | +5,T001615,5 | ||
378 | +5,T001614,0.5 | ||
379 | +5,T001613,5 | ||
380 | +5,T001612,5 | ||
381 | +5,T001610,5 | ||
382 | +5,T001609,0.5 | ||
383 | +5,T001608,0.5 | ||
384 | +5,T001607,0.5 | ||
385 | +5,T001606,0.5 | ||
386 | +5,T001603,0.5 | ||
387 | +5,T001602,0.5 | ||
388 | +5,T001601,0.5 | ||
389 | +5,T001600,0.5 | ||
390 | +5,T000287,0.5 | ||
391 | +5,T000036,0.5 | ||
392 | +5,T000040,5 | ||
393 | +5,T000041,0.5 | ||
394 | +5,T000042,0.5 | ||
395 | +5,T000043,5 | ||
396 | +5,T000044,0.5 | ||
397 | +5,T000045,5 | ||
398 | +5,T000047,0.5 | ||
399 | +5,T000048,0.5 | ||
400 | +5,T000049,5 | ||
401 | +5,T000050,5 | ||
402 | +5,T000051,0.5 | ||
403 | +5,T000052,0.5 | ||
404 | +5,T000053,5 | ||
405 | +5,T000054,5 | ||
406 | +5,T000055,0.5 | ||
407 | +5,T000057,0.5 | ||
408 | +5,T000058,0.5 | ||
409 | +5,T000059,5 | ||
410 | +5,T000060,0.5 | ||
411 | +5,T000061,0.5 | ||
412 | +5,T000062,5 | ||
413 | +5,T000078,0.5 | ||
414 | +5,T000077,0.5 | ||
415 | +5,T000076,0.5 | ||
416 | +5,T000075,5 | ||
417 | +5,T000074,5 | ||
418 | +5,T000072,0.5 | ||
419 | +5,T000071,0.5 | ||
420 | +5,T000070,0.5 | ||
421 | +5,T000069,0.5 | ||
422 | +5,T000063,0.5 | ||
423 | +5,T000064,0.5 | ||
424 | +5,T000066,0.5 | ||
425 | +5,T000067,0.5 | ||
426 | +5,T000068,0.5 | ||
427 | +5,T001631,5 | ||
428 | +5,T001629,0.5 | ||
429 | +5,T001628,0.5 | ||
430 | +5,T000312,0.5 | ||
431 | +5,T000311,0.5 | ||
432 | +5,T000310,0.5 | ||
433 | +5,T000308,0.5 | ||
434 | +5,T000307,0.5 | ||
435 | +5,T000306,0.5 | ||
436 | +5,T000305,0.5 | ||
437 | +5,T000304,0.5 | ||
438 | +5,T000303,5 | ||
439 | +5,T000302,0.5 | ||
440 | +5,T000301,0.5 | ||
441 | +5,T000298,0.5 | ||
442 | +5,T000297,0.5 | ||
443 | +5,T000295,0.5 | ||
444 | +5,T000293,0.5 | ||
445 | +5,T000292,0.5 | ||
446 | +5,T000291,0.5 | ||
447 | +5,T000288,5 | ||
448 | +5,T000286,0.5 | ||
449 | +5,T000212,0.5 | ||
450 | +6,T001942,5 | ||
451 | +6,T001947,0.5 | ||
452 | +6,T001944,5 | ||
453 | +6,T001945,5 | ||
454 | +6,T001946,5 | ||
455 | +6,T001956,0.5 | ||
456 | +6,T001921,5 | ||
457 | +6,T001920,5 | ||
458 | +6,T001919,0.5 | ||
459 | +6,T001918,0.5 | ||
460 | +6,T001917,5 | ||
461 | +6,T001916,5 | ||
462 | +6,T001915,0.5 | ||
463 | +6,T001914,5 | ||
464 | +6,T001617,5 | ||
465 | +6,T001616,5 | ||
466 | +6,T001615,5 | ||
467 | +6,T001614,0.5 | ||
468 | +6,T001613,5 | ||
469 | +6,T001612,5 | ||
470 | +6,T001611,5 | ||
471 | +6,T001610,5 | ||
472 | +6,T001609,5 | ||
473 | +6,T001608,5 | ||
474 | +6,T001607,0.5 | ||
475 | +6,T001606,0.5 | ||
476 | +6,T001604,5 | ||
477 | +6,T001603,0.5 | ||
478 | +6,T001602,0.5 | ||
479 | +6,T001601,5 | ||
480 | +6,T001600,5 | ||
481 | +6,T000287,0.5 | ||
482 | +6,T000040,5 | ||
483 | +6,T000041,5 | ||
484 | +6,T000042,0.5 | ||
485 | +6,T000043,5 | ||
486 | +6,T000044,5 | ||
487 | +6,T000045,5 | ||
488 | +6,T000046,0.5 | ||
489 | +6,T000047,5 | ||
490 | +6,T000048,5 | ||
491 | +6,T000049,5 | ||
492 | +6,T000051,0.5 | ||
493 | +6,T000052,5 | ||
494 | +6,T000053,5 | ||
495 | +6,T000054,5 | ||
496 | +6,T000055,5 | ||
497 | +6,T000057,0.5 | ||
498 | +6,T000058,5 | ||
499 | +6,T000060,5 | ||
500 | +6,T000061,5 | ||
501 | +6,T000062,5 | ||
502 | +6,T000078,0.5 | ||
503 | +6,T000077,0.5 | ||
504 | +6,T000076,0.5 | ||
505 | +6,T000075,5 | ||
506 | +6,T000073,0.5 | ||
507 | +6,T000070,0.5 | ||
508 | +6,T000069,0.5 | ||
509 | +6,T000063,5 | ||
510 | +6,T000064,5 | ||
511 | +6,T000065,5 | ||
512 | +6,T000066,0.5 | ||
513 | +6,T000067,0.5 | ||
514 | +6,T000068,0.5 | ||
515 | +6,T001631,5 | ||
516 | +6,T001630,0.5 | ||
517 | +6,T001629,5 | ||
518 | +6,T001628,5 | ||
519 | +6,T000312,5 | ||
520 | +6,T000311,0.5 | ||
521 | +6,T000310,0.5 | ||
522 | +6,T000309,5 | ||
523 | +6,T000308,5 | ||
524 | +6,T000307,5 | ||
525 | +6,T000306,0.5 | ||
526 | +6,T000305,5 | ||
527 | +6,T000304,5 | ||
528 | +6,T000303,0.5 | ||
529 | +6,T000302,5 | ||
530 | +6,T000301,0.5 | ||
531 | +6,T000299,0.5 | ||
532 | +6,T000298,5 | ||
533 | +6,T000297,5 | ||
534 | +6,T000295,5 | ||
535 | +6,T000294,0.5 | ||
536 | +6,T000291,0.5 | ||
537 | +6,T000288,5 | ||
538 | +6,T000286,5 | ||
539 | +7,T000036,5 | ||
540 | +7,T000040,0.5 | ||
541 | +7,T000041,5 | ||
542 | +7,T000042,5 | ||
543 | +7,T000043,5 | ||
544 | +7,T000044,0.5 | ||
545 | +7,T000045,5 | ||
546 | +7,T000046,5 | ||
547 | +7,T000047,5 | ||
548 | +7,T000048,0.5 | ||
549 | +7,T000049,5 | ||
550 | +7,T000050,5 | ||
551 | +7,T000051,0.5 | ||
552 | +7,T000052,5 | ||
553 | +7,T000055,0.5 | ||
554 | +7,T000056,5 | ||
555 | +7,T000057,0.5 | ||
556 | +7,T000058,0.5 | ||
557 | +7,T000059,5 | ||
558 | +7,T000061,5 | ||
559 | +7,T000062,0.5 | ||
560 | +7,T000077,5 | ||
561 | +7,T000076,5 | ||
562 | +7,T000075,0.5 | ||
563 | +7,T000074,5 | ||
564 | +7,T000073,0.5 | ||
565 | +7,T000072,5 | ||
566 | +7,T000070,5 | ||
567 | +7,T000069,5 | ||
568 | +7,T000063,5 | ||
569 | +7,T000064,5 | ||
570 | +7,T000065,5 | ||
571 | +7,T000066,0.5 | ||
572 | +7,T000067,5 | ||
573 | +7,T000068,0.5 | ||
574 | +7,T001631,5 | ||
575 | +7,T001630,0.5 | ||
576 | +7,T001629,5 | ||
577 | +7,T001628,5 | ||
578 | +7,T000312,0.5 | ||
579 | +7,T000311,5 | ||
580 | +7,T000310,0.5 | ||
581 | +7,T000309,0.5 | ||
582 | +7,T000308,0.5 | ||
583 | +7,T000307,5 | ||
584 | +7,T000306,5 | ||
585 | +7,T000305,5 | ||
586 | +7,T000304,5 | ||
587 | +7,T000303,5 | ||
588 | +7,T000302,5 | ||
589 | +7,T000301,5 | ||
590 | +7,T000299,0.5 | ||
591 | +7,T000297,0.5 | ||
592 | +7,T000295,0.5 | ||
593 | +7,T000294,0.5 | ||
594 | +7,T000293,0.5 | ||
595 | +7,T000292,5 | ||
596 | +7,T000291,0.5 | ||
597 | +7,T000289,5 | ||
598 | +7,T000286,0.5 | ||
599 | +7,T000212,0.5 | ||
600 | +7,T001617,0.5 | ||
601 | +7,T001616,5 | ||
602 | +7,T001615,0.5 | ||
603 | +7,T001614,5 | ||
604 | +7,T001613,5 | ||
605 | +7,T001612,0.5 | ||
606 | +7,T001611,0.5 | ||
607 | +7,T001610,0.5 | ||
608 | +7,T001609,5 | ||
609 | +7,T001607,0.5 | ||
610 | +7,T001604,5 | ||
611 | +7,T001603,0.5 | ||
612 | +7,T001602,0.5 | ||
613 | +7,T001601,0.5 | ||
614 | +7,T001600,5 | ||
615 | +7,T000287,5 | ||
616 | +7,T001921,5 | ||
617 | +7,T001920,5 | ||
618 | +7,T001919,5 | ||
619 | +7,T001918,0.5 | ||
620 | +7,T001916,0.5 | ||
621 | +7,T001911,5 | ||
622 | +7,T001942,5 | ||
623 | +7,T001947,0.5 | ||
624 | +7,T001943,0.5 | ||
625 | +7,T001944,0.5 | ||
626 | +7,T001945,5 | ||
627 | +7,T001946,5 | ||
628 | +7,T001956,0.5 | ||
629 | +8,T000036,5 | ||
630 | +8,T000041,0.5 | ||
631 | +8,T000042,5 | ||
632 | +8,T000043,0.5 | ||
633 | +8,T000045,0.5 | ||
634 | +8,T000046,0.5 | ||
635 | +8,T000047,0.5 | ||
636 | +8,T000048,5 | ||
637 | +8,T000049,5 | ||
638 | +8,T000050,0.5 | ||
639 | +8,T000051,5 | ||
640 | +8,T000052,5 | ||
641 | +8,T000054,5 | ||
642 | +8,T000055,0.5 | ||
643 | +8,T000056,5 | ||
644 | +8,T000057,5 | ||
645 | +8,T000058,5 | ||
646 | +8,T000060,5 | ||
647 | +8,T000062,0.5 | ||
648 | +8,T000078,0.5 | ||
649 | +8,T000077,0.5 | ||
650 | +8,T000076,0.5 | ||
651 | +8,T000075,0.5 | ||
652 | +8,T000074,0.5 | ||
653 | +8,T000073,0.5 | ||
654 | +8,T000071,0.5 | ||
655 | +8,T000070,0.5 | ||
656 | +8,T000069,0.5 | ||
657 | +8,T000063,0.5 | ||
658 | +8,T000064,5 | ||
659 | +8,T000065,5 | ||
660 | +8,T000066,0.5 | ||
661 | +8,T000067,5 | ||
662 | +8,T000068,0.5 | ||
663 | +8,T001630,0.5 | ||
664 | +8,T001629,0.5 | ||
665 | +8,T001628,5 | ||
666 | +8,T000312,0.5 | ||
667 | +8,T000311,0.5 | ||
668 | +8,T000310,5 | ||
669 | +8,T000309,0.5 | ||
670 | +8,T000308,0.5 | ||
671 | +8,T000307,0.5 | ||
672 | +8,T000306,5 | ||
673 | +8,T000305,5 | ||
674 | +8,T000304,0.5 | ||
675 | +8,T000303,0.5 | ||
676 | +8,T000302,0.5 | ||
677 | +8,T000299,0.5 | ||
678 | +8,T000298,0.5 | ||
679 | +8,T000297,5 | ||
680 | +8,T000293,0.5 | ||
681 | +8,T000292,5 | ||
682 | +8,T000291,5 | ||
683 | +8,T000289,0.5 | ||
684 | +8,T000288,5 | ||
685 | +8,T000286,0.5 | ||
686 | +8,T000212,0.5 | ||
687 | +8,T001617,5 | ||
688 | +8,T001615,5 | ||
689 | +8,T001614,0.5 | ||
690 | +8,T001613,0.5 | ||
691 | +8,T001612,0.5 | ||
692 | +8,T001611,0.5 | ||
693 | +8,T001610,5 | ||
694 | +8,T001609,0.5 | ||
695 | +8,T001608,0.5 | ||
696 | +8,T001607,0.5 | ||
697 | +8,T001606,0.5 | ||
698 | +8,T001604,0.5 | ||
699 | +8,T001603,5 | ||
700 | +8,T001602,5 | ||
701 | +8,T001601,0.5 | ||
702 | +8,T001600,0.5 | ||
703 | +8,T000287,0.5 | ||
704 | +8,T001921,0.5 | ||
705 | +8,T001920,5 | ||
706 | +8,T001919,0.5 | ||
707 | +8,T001918,0.5 | ||
708 | +8,T001917,5 | ||
709 | +8,T001916,5 | ||
710 | +8,T001915,0.5 | ||
711 | +8,T001914,0.5 | ||
712 | +8,T001911,5 | ||
713 | +8,T001942,0.5 | ||
714 | +8,T001947,0.5 | ||
715 | +8,T001943,5 | ||
716 | +8,T001945,0.5 | ||
717 | +8,T001946,5 | ||
718 | +8,T001956,5 | ||
719 | +9,T000036,5 | ||
720 | +9,T000040,5 | ||
721 | +9,T000041,5 | ||
722 | +9,T000042,0.5 | ||
723 | +9,T000043,0.5 | ||
724 | +9,T000044,0.5 | ||
725 | +9,T000045,5 | ||
726 | +9,T000046,0.5 | ||
727 | +9,T000047,5 | ||
728 | +9,T000048,5 | ||
729 | +9,T000050,5 | ||
730 | +9,T000052,0.5 | ||
731 | +9,T000053,5 | ||
732 | +9,T000057,0.5 | ||
733 | +9,T000058,0.5 | ||
734 | +9,T000059,5 | ||
735 | +9,T000060,0.5 | ||
736 | +9,T000061,5 | ||
737 | +9,T000062,5 | ||
738 | +9,T000078,0.5 | ||
739 | +9,T000077,5 | ||
740 | +9,T000076,5 | ||
741 | +9,T000075,5 | ||
742 | +9,T000074,0.5 | ||
743 | +9,T000073,0.5 | ||
744 | +9,T000072,0.5 | ||
745 | +9,T000071,5 | ||
746 | +9,T000070,0.5 | ||
747 | +9,T000069,0.5 | ||
748 | +9,T000063,5 | ||
749 | +9,T000064,5 | ||
750 | +9,T000065,0.5 | ||
751 | +9,T000066,0.5 | ||
752 | +9,T000067,0.5 | ||
753 | +9,T000068,5 | ||
754 | +9,T001631,5 | ||
755 | +9,T001630,5 | ||
756 | +9,T001629,5 | ||
757 | +9,T001628,5 | ||
758 | +9,T000312,0.5 | ||
759 | +9,T000310,0.5 | ||
760 | +9,T000308,0.5 | ||
761 | +9,T000307,0.5 | ||
762 | +9,T000306,0.5 | ||
763 | +9,T000305,0.5 | ||
764 | +9,T000304,0.5 | ||
765 | +9,T000303,0.5 | ||
766 | +9,T000302,0.5 | ||
767 | +9,T000301,5 | ||
768 | +9,T000299,5 | ||
769 | +9,T000298,5 | ||
770 | +9,T000295,0.5 | ||
771 | +9,T000294,5 | ||
772 | +9,T000293,5 | ||
773 | +9,T000292,0.5 | ||
774 | +9,T000291,0.5 | ||
775 | +9,T000288,5 | ||
776 | +9,T000286,5 | ||
777 | +9,T000212,0.5 | ||
778 | +9,T001617,0.5 | ||
779 | +9,T001616,5 | ||
780 | +9,T001615,5 | ||
781 | +9,T001613,5 | ||
782 | +9,T001611,0.5 | ||
783 | +9,T001610,0.5 | ||
784 | +9,T001609,5 | ||
785 | +9,T001608,0.5 | ||
786 | +9,T001607,5 | ||
787 | +9,T001606,5 | ||
788 | +9,T001604,5 | ||
789 | +9,T001603,5 | ||
790 | +9,T001602,0.5 | ||
791 | +9,T001601,0.5 | ||
792 | +9,T001600,0.5 | ||
793 | +9,T000287,5 | ||
794 | +9,T001921,0.5 | ||
795 | +9,T001920,5 | ||
796 | +9,T001919,0.5 | ||
797 | +9,T001918,5 | ||
798 | +9,T001917,5 | ||
799 | +9,T001916,5 | ||
800 | +9,T001915,0.5 | ||
801 | +9,T001914,0.5 | ||
802 | +9,T001911,5 | ||
803 | +9,T001942,0.5 | ||
804 | +9,T001947,0.5 | ||
805 | +9,T001943,0.5 | ||
806 | +9,T001944,5 | ||
807 | +9,T001945,5 | ||
808 | +9,T001946,5 | ||
809 | +10,T000036,0.5 | ||
810 | +10,T000040,5 | ||
811 | +10,T000041,0.5 | ||
812 | +10,T000042,0.5 | ||
813 | +10,T000043,5 | ||
814 | +10,T000044,5 | ||
815 | +10,T000046,5 | ||
816 | +10,T000047,5 | ||
817 | +10,T000048,0.5 | ||
818 | +10,T000049,0.5 | ||
819 | +10,T000050,5 | ||
820 | +10,T000051,5 | ||
821 | +10,T000052,0.5 | ||
822 | +10,T000054,5 | ||
823 | +10,T000055,5 | ||
824 | +10,T000056,5 | ||
825 | +10,T000057,0.5 | ||
826 | +10,T000058,0.5 | ||
827 | +10,T000059,0.5 | ||
828 | +10,T000060,0.5 | ||
829 | +10,T000062,0.5 | ||
830 | +10,T000078,0.5 | ||
831 | +10,T000077,0.5 | ||
832 | +10,T000076,0.5 | ||
833 | +10,T000075,5 | ||
834 | +10,T000074,0.5 | ||
835 | +10,T000073,5 | ||
836 | +10,T000072,5 | ||
837 | +10,T000071,0.5 | ||
838 | +10,T000070,5 | ||
839 | +10,T000063,5 | ||
840 | +10,T000064,0.5 | ||
841 | +10,T000065,0.5 | ||
842 | +10,T000066,5 | ||
843 | +10,T000067,0.5 | ||
844 | +10,T001631,0.5 | ||
845 | +10,T001630,0.5 | ||
846 | +10,T001629,0.5 | ||
847 | +10,T001628,0.5 | ||
848 | +10,T000311,0.5 | ||
849 | +10,T000310,0.5 | ||
850 | +10,T000309,0.5 | ||
851 | +10,T000308,0.5 | ||
852 | +10,T000307,0.5 | ||
853 | +10,T000306,0.5 | ||
854 | +10,T000305,0.5 | ||
855 | +10,T000304,0.5 | ||
856 | +10,T000302,0.5 | ||
857 | +10,T000301,0.5 | ||
858 | +10,T000299,0.5 | ||
859 | +10,T000298,0.5 | ||
860 | +10,T000295,0.5 | ||
861 | +10,T000294,0.5 | ||
862 | +10,T000293,0.5 | ||
863 | +10,T000292,0.5 | ||
864 | +10,T000291,0.5 | ||
865 | +10,T000289,0.5 | ||
866 | +10,T000288,0.5 | ||
867 | +10,T000286,0.5 | ||
868 | +10,T000212,0.5 | ||
869 | +10,T001617,0.5 | ||
870 | +10,T001616,5 | ||
871 | +10,T001614,5 | ||
872 | +10,T001613,0.5 | ||
873 | +10,T001612,0.5 | ||
874 | +10,T001611,0.5 | ||
875 | +10,T001610,0.5 | ||
876 | +10,T001608,0.5 | ||
877 | +10,T001607,0.5 | ||
878 | +10,T001606,5 | ||
879 | +10,T001604,0.5 | ||
880 | +10,T001603,0.5 | ||
881 | +10,T001602,0.5 | ||
882 | +10,T001601,5 | ||
883 | +10,T001600,0.5 | ||
884 | +10,T001921,5 | ||
885 | +10,T001920,5 | ||
886 | +10,T001919,5 | ||
887 | +10,T001918,0.5 | ||
888 | +10,T001917,0.5 | ||
889 | +10,T001916,5 | ||
890 | +10,T001914,0.5 | ||
891 | +10,T001911,0.5 | ||
892 | +10,T001942,0.5 | ||
893 | +10,T001947,0.5 | ||
894 | +10,T001943,5 | ||
895 | +10,T001944,0.5 | ||
896 | +10,T001945,5 | ||
897 | +10,T001946,0.5 | ||
898 | +10,T001956,0.5 | ||
899 | +11,T000040,0.5 | ||
900 | +11,T000041,0.5 | ||
901 | +11,T000042,0.5 | ||
902 | +11,T000043,0.5 | ||
903 | +11,T000044,5 | ||
904 | +11,T000045,5 | ||
905 | +11,T000047,0.5 | ||
906 | +11,T000048,0.5 | ||
907 | +11,T000049,5 | ||
908 | +11,T000050,5 | ||
909 | +11,T000051,5 | ||
910 | +11,T000052,0.5 | ||
911 | +11,T000053,0.5 | ||
912 | +11,T000055,0.5 | ||
913 | +11,T000056,5 | ||
914 | +11,T000057,0.5 | ||
915 | +11,T000058,0.5 | ||
916 | +11,T000059,0.5 | ||
917 | +11,T000060,0.5 | ||
918 | +11,T000061,5 | ||
919 | +11,T000062,5 | ||
920 | +11,T000078,0.5 | ||
921 | +11,T000076,0.5 | ||
922 | +11,T000075,5 | ||
923 | +11,T000074,5 | ||
924 | +11,T000073,0.5 | ||
925 | +11,T000072,0.5 | ||
926 | +11,T000071,0.5 | ||
927 | +11,T000070,5 | ||
928 | +11,T000063,0.5 | ||
929 | +11,T000064,5 | ||
930 | +11,T000065,5 | ||
931 | +11,T000066,0.5 | ||
932 | +11,T000067,5 | ||
933 | +11,T000068,0.5 | ||
934 | +11,T001631,0.5 | ||
935 | +11,T001630,0.5 | ||
936 | +11,T001629,0.5 | ||
937 | +11,T000312,5 | ||
938 | +11,T000311,5 | ||
939 | +11,T000310,0.5 | ||
940 | +11,T000309,0.5 | ||
941 | +11,T000307,0.5 | ||
942 | +11,T000306,0.5 | ||
943 | +11,T000305,0.5 | ||
944 | +11,T000304,0.5 | ||
945 | +11,T000303,0.5 | ||
946 | +11,T000302,0.5 | ||
947 | +11,T000299,5 | ||
948 | +11,T000298,5 | ||
949 | +11,T000297,5 | ||
950 | +11,T000295,0.5 | ||
951 | +11,T000294,0.5 | ||
952 | +11,T000293,0.5 | ||
953 | +11,T000292,0.5 | ||
954 | +11,T000291,5 | ||
955 | +11,T000289,5 | ||
956 | +11,T000286,5 | ||
957 | +11,T000212,0.5 | ||
958 | +11,T001617,0.5 | ||
959 | +11,T001616,5 | ||
960 | +11,T001615,5 | ||
961 | +11,T001614,5 | ||
962 | +11,T001613,0.5 | ||
963 | +11,T001612,0.5 | ||
964 | +11,T001611,0.5 | ||
965 | +11,T001609,5 | ||
966 | +11,T001608,5 | ||
967 | +11,T001607,0.5 | ||
968 | +11,T001606,0.5 | ||
969 | +11,T001604,5 | ||
970 | +11,T001603,0.5 | ||
971 | +11,T001602,0.5 | ||
972 | +11,T001601,0.5 | ||
973 | +11,T000287,0.5 | ||
974 | +11,T001921,5 | ||
975 | +11,T001920,5 | ||
976 | +11,T001919,5 | ||
977 | +11,T001918,0.5 | ||
978 | +11,T001917,0.5 | ||
979 | +11,T001916,0.5 | ||
980 | +11,T001915,0.5 | ||
981 | +11,T001911,0.5 | ||
982 | +11,T001942,0.5 | ||
983 | +11,T001947,0.5 | ||
984 | +11,T001943,0.5 | ||
985 | +11,T001944,5 | ||
986 | +11,T001945,0.5 | ||
987 | +11,T001946,0.5 | ||
988 | +11,T001956,0.5 | ||
989 | +12,T000036,5 | ||
990 | +12,T000040,0.5 | ||
991 | +12,T000041,0.5 | ||
992 | +12,T000042,0.5 | ||
993 | +12,T000043,5 | ||
994 | +12,T000044,0.5 | ||
995 | +12,T000046,0.5 | ||
996 | +12,T000047,5 | ||
997 | +12,T000049,5 | ||
998 | +12,T000050,5 | ||
999 | +12,T000051,0.5 | ||
1000 | +12,T000052,5 | ||
1001 | +12,T000053,5 | ||
1002 | +12,T000054,5 | ||
1003 | +12,T000055,0.5 | ||
1004 | +12,T000057,0.5 | ||
1005 | +12,T000058,0.5 | ||
1006 | +12,T000059,0.5 | ||
1007 | +12,T000060,5 | ||
1008 | +12,T000061,5 | ||
1009 | +12,T000062,5 | ||
1010 | +12,T000078,0.5 | ||
1011 | +12,T000077,0.5 | ||
1012 | +12,T000076,5 | ||
1013 | +12,T000074,0.5 | ||
1014 | +12,T000073,5 | ||
1015 | +12,T000072,0.5 | ||
1016 | +12,T000071,5 | ||
1017 | +12,T000070,0.5 | ||
1018 | +12,T000069,5 | ||
1019 | +12,T000064,5 | ||
1020 | +12,T000065,0.5 | ||
1021 | +12,T000066,0.5 | ||
1022 | +12,T000067,5 | ||
1023 | +12,T000068,0.5 | ||
1024 | +12,T001630,0.5 | ||
1025 | +12,T001629,0.5 | ||
1026 | +12,T001628,0.5 | ||
1027 | +12,T000312,0.5 | ||
1028 | +12,T000311,0.5 | ||
1029 | +12,T000310,5 | ||
1030 | +12,T000308,0.5 | ||
1031 | +12,T000307,0.5 | ||
1032 | +12,T000306,0.5 | ||
1033 | +12,T000305,0.5 | ||
1034 | +12,T000304,0.5 | ||
1035 | +12,T000303,5 | ||
1036 | +12,T000301,0.5 | ||
1037 | +12,T000299,5 | ||
1038 | +12,T000298,0.5 | ||
1039 | +12,T000297,0.5 | ||
1040 | +12,T000295,0.5 | ||
1041 | +12,T000294,5 | ||
1042 | +12,T000293,5 | ||
1043 | +12,T000292,0.5 | ||
1044 | +12,T000289,0.5 | ||
1045 | +12,T000288,5 | ||
1046 | +12,T000286,5 | ||
1047 | +12,T000212,0.5 | ||
1048 | +12,T001617,0.5 | ||
1049 | +12,T001615,5 | ||
1050 | +12,T001614,0.5 | ||
1051 | +12,T001613,5 | ||
1052 | +12,T001612,5 | ||
1053 | +12,T001611,5 | ||
1054 | +12,T001610,5 | ||
1055 | +12,T001609,0.5 | ||
1056 | +12,T001608,0.5 | ||
1057 | +12,T001607,0.5 | ||
1058 | +12,T001606,0.5 | ||
1059 | +12,T001604,0.5 | ||
1060 | +12,T001603,0.5 | ||
1061 | +12,T001602,0.5 | ||
1062 | +12,T001601,5 | ||
1063 | +12,T001600,0.5 | ||
1064 | +12,T000287,0.5 | ||
1065 | +12,T001921,5 | ||
1066 | +12,T001920,5 | ||
1067 | +12,T001919,0.5 | ||
1068 | +12,T001917,5 | ||
1069 | +12,T001916,0.5 | ||
1070 | +12,T001915,5 | ||
1071 | +12,T001914,0.5 | ||
1072 | +12,T001911,0.5 | ||
1073 | +12,T001942,0.5 | ||
1074 | +12,T001943,0.5 | ||
1075 | +12,T001944,0.5 | ||
1076 | +12,T001945,5 | ||
1077 | +12,T001946,0.5 | ||
1078 | +12,T001956,5 | ||
1079 | +13,T000036,0.5 | ||
1080 | +13,T000040,0.5 | ||
1081 | +13,T000041,0.5 | ||
1082 | +13,T000042,0.5 | ||
1083 | +13,T000044,5 | ||
1084 | +13,T000045,5 | ||
1085 | +13,T000046,0.5 | ||
1086 | +13,T000047,5 | ||
1087 | +13,T000048,5 | ||
1088 | +13,T000049,5 | ||
1089 | +13,T000050,5 | ||
1090 | +13,T000051,0.5 | ||
1091 | +13,T000053,0.5 | ||
1092 | +13,T000054,0.5 | ||
1093 | +13,T000055,0.5 | ||
1094 | +13,T000056,5 | ||
1095 | +13,T000057,5 | ||
1096 | +13,T000058,5 | ||
1097 | +13,T000060,5 | ||
1098 | +13,T000061,5 | ||
1099 | +13,T000062,5 | ||
1100 | +13,T000078,5 | ||
1101 | +13,T000077,5 | ||
1102 | +13,T000076,5 | ||
1103 | +13,T000075,5 | ||
1104 | +13,T000074,5 | ||
1105 | +13,T000073,5 | ||
1106 | +13,T000072,5 | ||
1107 | +13,T000070,5 | ||
1108 | +13,T000069,5 | ||
1109 | +13,T000063,5 | ||
1110 | +13,T000064,5 | ||
1111 | +13,T000065,5 | ||
1112 | +13,T000066,5 | ||
1113 | +13,T000067,5 | ||
1114 | +13,T000068,0.5 | ||
1115 | +13,T001631,5 | ||
1116 | +13,T001630,5 | ||
1117 | +13,T001629,0.5 | ||
1118 | +13,T000312,5 | ||
1119 | +13,T000311,5 | ||
1120 | +13,T000310,5 | ||
1121 | +13,T000309,5 | ||
1122 | +13,T000308,5 | ||
1123 | +13,T000306,0.5 | ||
1124 | +13,T000305,5 | ||
1125 | +13,T000304,5 | ||
1126 | +13,T000303,5 | ||
1127 | +13,T000302,5 | ||
1128 | +13,T000301,5 | ||
1129 | +13,T000298,0.5 | ||
1130 | +13,T000297,0.5 | ||
1131 | +13,T000295,0.5 | ||
1132 | +13,T000294,5 | ||
1133 | +13,T000293,5 | ||
1134 | +13,T000292,5 | ||
1135 | +13,T000289,5 | ||
1136 | +13,T000288,5 | ||
1137 | +13,T000286,5 | ||
1138 | +13,T000212,0.5 | ||
1139 | +13,T001617,5 | ||
1140 | +13,T001616,5 | ||
1141 | +13,T001615,5 | ||
1142 | +13,T001613,5 | ||
1143 | +13,T001612,0.5 | ||
1144 | +13,T001611,0.5 | ||
1145 | +13,T001610,5 | ||
1146 | +13,T001609,0.5 | ||
1147 | +13,T001608,0.5 | ||
1148 | +13,T001607,5 | ||
1149 | +13,T001606,0.5 | ||
1150 | +13,T001603,5 | ||
1151 | +13,T001602,0.5 | ||
1152 | +13,T001601,0.5 | ||
1153 | +13,T001600,0.5 | ||
1154 | +13,T000287,0.5 | ||
1155 | +13,T001920,0.5 | ||
1156 | +13,T001919,0.5 | ||
1157 | +13,T001918,0.5 | ||
1158 | +13,T001917,0.5 | ||
1159 | +13,T001916,0.5 | ||
1160 | +13,T001915,0.5 | ||
1161 | +13,T001914,0.5 | ||
1162 | +13,T001942,0.5 | ||
1163 | +13,T001947,0.5 | ||
1164 | +13,T001943,5 | ||
1165 | +13,T001944,0.5 | ||
1166 | +13,T001945,5 | ||
1167 | +13,T001946,5 | ||
1168 | +13,T001956,0.5 |
코드_데이터/dataset_test.csv
0 → 100644
1 | +userid,contentsid,rating | ||
2 | +1,T000043,5 | ||
3 | +1,T000055,0.5 | ||
4 | +1,T000072,0.5 | ||
5 | +1,T000064,5 | ||
6 | +1,T001630,0.5 | ||
7 | +1,T000308,0.5 | ||
8 | +1,T000293,0.5 | ||
9 | +1,T001616,0.5 | ||
10 | +1,T001613,5 | ||
11 | +1,T001601,0.5 | ||
12 | +1,T001919,0.5 | ||
13 | +1,T001946,5 | ||
14 | +2,T000046,5 | ||
15 | +2,T000051,0.5 | ||
16 | +2,T000074,0.5 | ||
17 | +2,T000308,0.5 | ||
18 | +2,T000307,5 | ||
19 | +2,T000299,0.5 | ||
20 | +2,T000291,0.5 | ||
21 | +2,T001613,5 | ||
22 | +2,T001607,0.5 | ||
23 | +2,T001920,5 | ||
24 | +2,T001916,0.5 | ||
25 | +2,T001943,0.5 | ||
26 | +3,T000036,5 | ||
27 | +3,T000049,0.5 | ||
28 | +3,T000053,0.5 | ||
29 | +3,T000061,5 | ||
30 | +3,T000073,0.5 | ||
31 | +3,T001628,5 | ||
32 | +3,T000302,5 | ||
33 | +3,T000212,0.5 | ||
34 | +3,T001616,0.5 | ||
35 | +3,T001606,0.5 | ||
36 | +3,T001920,0.5 | ||
37 | +3,T001915,0.5 | ||
38 | +4,T001947,0.5 | ||
39 | +4,T001921,0.5 | ||
40 | +4,T001617,0.5 | ||
41 | +4,T001606,0.5 | ||
42 | +4,T000040,0.5 | ||
43 | +4,T000045,0.5 | ||
44 | +4,T000060,5 | ||
45 | +4,T000077,0.5 | ||
46 | +4,T000068,0.5 | ||
47 | +4,T000302,0.5 | ||
48 | +4,T000293,0.5 | ||
49 | +4,T000288,0.5 | ||
50 | +5,T001956,5 | ||
51 | +5,T001915,0.5 | ||
52 | +5,T001611,5 | ||
53 | +5,T001604,5 | ||
54 | +5,T000046,0.5 | ||
55 | +5,T000056,5 | ||
56 | +5,T000073,0.5 | ||
57 | +5,T000065,0.5 | ||
58 | +5,T001630,0.5 | ||
59 | +5,T000309,0.5 | ||
60 | +5,T000299,5 | ||
61 | +5,T000294,0.5 | ||
62 | +6,T001943,5 | ||
63 | +6,T001911,0.5 | ||
64 | +6,T000036,0.5 | ||
65 | +6,T000050,5 | ||
66 | +6,T000056,0.5 | ||
67 | +6,T000059,5 | ||
68 | +6,T000074,5 | ||
69 | +6,T000072,0.5 | ||
70 | +6,T000071,0.5 | ||
71 | +6,T000293,0.5 | ||
72 | +6,T000292,5 | ||
73 | +6,T000212,0.5 | ||
74 | +7,T000053,5 | ||
75 | +7,T000054,5 | ||
76 | +7,T000060,0.5 | ||
77 | +7,T000078,0.5 | ||
78 | +7,T000071,0.5 | ||
79 | +7,T000298,0.5 | ||
80 | +7,T000288,0.5 | ||
81 | +7,T001608,0.5 | ||
82 | +7,T001606,5 | ||
83 | +7,T001917,0.5 | ||
84 | +7,T001915,0.5 | ||
85 | +7,T001914,5 | ||
86 | +8,T000040,0.5 | ||
87 | +8,T000044,0.5 | ||
88 | +8,T000053,0.5 | ||
89 | +8,T000059,5 | ||
90 | +8,T000061,5 | ||
91 | +8,T000072,0.5 | ||
92 | +8,T001631,0.5 | ||
93 | +8,T000301,0.5 | ||
94 | +8,T000295,5 | ||
95 | +8,T000294,5 | ||
96 | +8,T001616,5 | ||
97 | +8,T001944,5 | ||
98 | +9,T000049,5 | ||
99 | +9,T000051,0.5 | ||
100 | +9,T000054,5 | ||
101 | +9,T000055,5 | ||
102 | +9,T000056,5 | ||
103 | +9,T000311,5 | ||
104 | +9,T000309,5 | ||
105 | +9,T000297,5 | ||
106 | +9,T000289,5 | ||
107 | +9,T001614,0.5 | ||
108 | +9,T001612,0.5 | ||
109 | +9,T001956,0.5 | ||
110 | +10,T000045,0.5 | ||
111 | +10,T000053,0.5 | ||
112 | +10,T000061,5 | ||
113 | +10,T000069,0.5 | ||
114 | +10,T000068,0.5 | ||
115 | +10,T000312,0.5 | ||
116 | +10,T000303,0.5 | ||
117 | +10,T000297,0.5 | ||
118 | +10,T000287,5 | ||
119 | +10,T001615,5 | ||
120 | +10,T001609,5 | ||
121 | +10,T001915,5 | ||
122 | +11,T000036,5 | ||
123 | +11,T000046,0.5 | ||
124 | +11,T000054,5 | ||
125 | +11,T000077,0.5 | ||
126 | +11,T000069,0.5 | ||
127 | +11,T001628,5 | ||
128 | +11,T000308,5 | ||
129 | +11,T000301,0.5 | ||
130 | +11,T000288,5 | ||
131 | +11,T001610,0.5 | ||
132 | +11,T001600,5 | ||
133 | +11,T001914,5 | ||
134 | +12,T000045,0.5 | ||
135 | +12,T000048,0.5 | ||
136 | +12,T000056,5 | ||
137 | +12,T000075,5 | ||
138 | +12,T000063,0.5 | ||
139 | +12,T001631,5 | ||
140 | +12,T000309,0.5 | ||
141 | +12,T000302,0.5 | ||
142 | +12,T000291,5 | ||
143 | +12,T001616,0.5 | ||
144 | +12,T001918,5 | ||
145 | +12,T001947,5 | ||
146 | +13,T000043,5 | ||
147 | +13,T000052,5 | ||
148 | +13,T000059,5 | ||
149 | +13,T000071,5 | ||
150 | +13,T001628,0.5 | ||
151 | +13,T000307,5 | ||
152 | +13,T000299,0.5 | ||
153 | +13,T000291,5 | ||
154 | +13,T001614,0.5 | ||
155 | +13,T001604,5 | ||
156 | +13,T001921,0.5 | ||
157 | +13,T001911,0.5 |
-
Please register or login to post a comment