Commit 284aaad9 authored by SHREYANSH JAIN's avatar SHREYANSH JAIN

added assignment2 ML fodler

parent 4948f373
...@@ -13,7 +13,7 @@ with open('logcosh.log','r') as csvfile: ...@@ -13,7 +13,7 @@ with open('logcosh.log','r') as csvfile:
plt.plot(x,y, label='LOGCOSH') plt.plot(x,y,'--', label='LOGCOSH')
plt.xlabel('epoch') plt.xlabel('epoch')
plt.ylabel('mean_squared_loss') plt.ylabel('mean_squared_loss')
......
import numpy as np
import argparse
import csv
import warnings
'''
Commented portion may not help much in
optimization but will help in visualization !!
'''
def mean_squared_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = (1/samples)*np.sum(np.square(ydata-guess))
return err
raise NotImplementedError
def mean_squared_gradient(xdata, ydata, weights):
samples = np.shape(xdata)[0]
guess = np.dot(xdata,weights)
gradient = (2/samples)*np.dot(xdata.T,(guess-ydata))
return gradient
raise NotImplementedError
def mean_absolute_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = (1/samples)*np.sum(np.absolute(ydata-guess))
return err
raise NotImplementedError
def mean_absolute_gradient(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
signInfo = np.sign(guess-ydata)
gradient = np.dot(xdata.T,signInfo)/samples
return gradient
raise NotImplementedError
def mean_log_cosh_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
warnings.filterwarnings("error")
try:
err = np.sum(np.log(np.cosh(guess-ydata)))/samples
except Exception as e:
err = np.sum(np.absolute(guess-ydata)+np.log(2))/samples
return err
raise NotImplementedError
def mean_log_cosh_gradient(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
gradient = np.dot(xdata.T,np.tanh(guess-ydata))/samples
return gradient
raise NotImplementedError
def root_mean_squared_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = np.sqrt(np.divide(np.sum(np.square(ydata.T-guess)),samples))
return err
raise NotImplementedError
def root_mean_squared_gradient(xdata, ydata, weights):
samples = np.shape(xdata)[0]
guess = np.dot(xdata,weights)
gradient = mean_squared_gradient(xdata, ydata, weights)/(2*root_mean_squared_loss(xdata, ydata, weights))
return gradient
raise NotImplementedError
class LinearRegressor:
def __init__(self, dims):
self.dims = dims
self.W = np.random.rand(dims)
#self.W = np.random.uniform(low=0.0, high=1.0, size=dims)
return
raise NotImplementedError
def train(self, xtrain, ytrain, loss_function, gradient_function, epoch=100, lr=1):
errlog = []
samples = np.shape(xtrain)[0]
for iterations in range(epoch):
self.W = self.W - lr*gradient_function(xtrain,ytrain,self.W)
errlog.append(loss_function(xtrain,ytrain,self.W))
# errlog.append(mean_squared_loss(xtrain,ytrain,self.W))
return errlog
raise NotImplementedError
def predict(self, xtest):
return np.dot(xtest,self.W)
raise NotImplementedError
def read_dataset(trainfile, testfile):
xtrain = []
ytrain = []
xtest = []
with open(trainfile,'r') as f:
reader = csv.reader(f,delimiter=',')
next(reader, None)
for row in reader:
xtrain.append(row[:-1])
ytrain.append(row[-1])
with open(testfile,'r') as f:
reader = csv.reader(f,delimiter=',')
next(reader, None)
for row in reader:
xtest.append(row)
return np.array(xtrain), np.array(ytrain), np.array(xtest)
def one_hot_encoding(value_list, classes):
res = np.eye(classes)[value_list.reshape(-1)]
return res.reshape(list(value_list.shape)+[classes])
norm_dict = {}
dictionary_of_classes_for_features = {
2 : 5,
3 : 25,
5: 8,
7: 5
}
dictionary_of_days = {
'Monday' : 1,
'Tuesday': 2,
'Wednesday': 3,
'Thursday' : 4,
'Friday' : 5,
'Saturday': 6,
'Sunday' : 7
}
def slicer(arr, beg, end):
return np.array([i[beg:end] for i in arr]).reshape(-1, 1)
"""
#for normalization of parametes 'wind speed' and 'humidity' uncoment
def normalize(arr):
arr = arr
if not norm_dict: # make dictionary once at training to be used later during test
# for i in range(arr.shape[1]):
norm_dict['init'] = [np.min(arr), np.max(arr)]
#norm_dict['init'] = [np.mean(arr), np.std(arr)]
# for i in range(arr.shape[1]):
arr = np.array([(x - norm_dict['init'][0])/(norm_dict['init'][1] - norm_dict['init'][0]) for x in arr]) # min-max
#arr = np.array([(x - norm_dict['init'][0])/(norm_dict['init'][1]) for x in arr]) # standardization
return arr
"""
def preprocess_dataset(xdata, ydata=None):
# converting weekdays to numeric for one_hot_encoding
"""
#for normalization of parametes 'wind speed' and 'humidity' uncoment
xdata[:, 10] = normalize(xdata[:, 10].astype('float'))# normalized
xdata[:, 11] = normalize(xdata[:, 10].astype('float'))"""
xdata[:, 5] = [dictionary_of_days[i] for i in xdata[:, 5]]
cat_cols = [2, 3, 5, 7]
for i in cat_cols:
# dropping 2 columns for C-1 encoding and removing additional 0 column
t = one_hot_encoding(xdata[:, i].astype('int'), dictionary_of_classes_for_features[i])[:, 2:]
xdata = np.concatenate((xdata, t),axis=1)
xdata = np.delete(xdata, cat_cols, 1) # removing useless columns
xdata = np.delete(xdata, 6, 1)
xdata = np.delete(xdata, 8, 1)
# extracting features from date
month = slicer(xdata[:, 1], 5,7)
t = one_hot_encoding(month[:,0].astype('int'), 13)[:, 2:]
xdata = np.concatenate((xdata, t), axis=1)
date = slicer(xdata[:, 1], 8, 10)
week = np.ceil(date.astype('int') / 7) # week of month
t = one_hot_encoding(week[:,0].astype('int'), 6)[:, 2:]
xdata = np.concatenate((xdata, t), axis=1)
xdata = xdata[:,2:] # dropping first 2 unnecessary columns
xdata = xdata.astype('float32')
bias = np.ones((np.shape(xdata)[0],1)) # adding Bias in feature Matrix
xdata = np.concatenate((bias,xdata),axis=1)
if ydata is None:
return xdata
ydata = ydata.astype('float32')
return xdata,ydata
raise NotImplementedError
dictionary_of_losses = {
'mse':(mean_squared_loss, mean_squared_gradient),
'mae':(mean_absolute_loss, mean_absolute_gradient),
'rmse':(root_mean_squared_loss, root_mean_squared_gradient),
'logcosh':(mean_log_cosh_loss, mean_log_cosh_gradient),
}
"""
#For outliers removal from wind speed column uncomment
def out(x, std, mean):
if ((x < mean + 2 * std)and (x > mean - 2 * std)):
return 0
else:
return 1
def outlier(xtrain, ytrain, std, mean):
a =[]
for i in xtrain[:, 11].astype('float32'):
a.append(out(i,std, mean))
a = np.array(a)
xdata = np.concatenate((xtrain, a.reshape(-1, 1)), axis=1)
ytrain = np.delete(ytrain, np.argwhere(xdata[:, -1].astype('int') > 0), 0)
xdata = np.delete(xdata, np.argwhere(xdata[:, -1].astype('int') > 0), 0)
xdata = np.delete(xdata, -1, 1)
return (xdata, ytrain)"""
def main():
# You are free to modify the main function as per your requirements.
# Uncomment the below lines and pass the appropriate value
xtrain, ytrain, xtest = read_dataset(args.train_file, args.test_file)
"""
#For outliers removal from wind speed column uncomment
std = np.std(xtrain[:, 11].astype('float32'))
mean = np.mean(xtrain[:, 11].astype('float32'))
xtrain, ytrain =outlier(xtrain, ytrain, std, mean)"""
xtrainprocessed, ytrainprocessed = preprocess_dataset(xtrain, ytrain)
xtestprocessed = preprocess_dataset(xtest)
model = LinearRegressor(np.shape(xtrainprocessed)[1])
# The loss function is provided by command line argument
loss_fn, loss_grad = dictionary_of_losses[args.loss]
errlog = model.train(xtrainprocessed, ytrainprocessed, loss_fn, loss_grad, args.epoch, args.lr)
ytest = model.predict(xtestprocessed)
ytest = ytest.astype('int')
ytest[ytest<0] = 0
print(ytest)
output = [(i,ytest[i]) for i in range(len(ytest))]
np.savetxt("prediction.csv",output,delimiter=',',fmt="%d",header="instance (id),count",comments='')
#np.savetxt("error.log",errlog,delimiter='\n',fmt="%f")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--loss', default='mse', choices=['mse','mae','rmse','logcosh'], help='loss function')
parser.add_argument('--lr', default=1.0, type=float, help='learning rate')
parser.add_argument('--epoch', default=100, type=int, help='number of epochs')
parser.add_argument('--train_file', type=str, help='location of the training file')
parser.add_argument('--test_file', type=str, help='location of the test file')
args = parser.parse_args()
main()
instance (id),count instance (id),count
0,280 0,452
1,93 1,65
2,136 2,254
3,215 3,309
4,94 4,108
5,142 5,163
6,215 6,233
7,231 7,402
8,182 8,196
9,132 9,148
10,210 10,279
11,21 11,8
12,139 12,177
13,0 13,0
14,68 14,53
15,248 15,403
16,35 16,0
17,135 17,163
18,248 18,306
19,108 19,72
20,73 20,77
21,201 21,253
22,67 22,39
23,188 23,244
24,121 24,146
25,218 25,227
26,79 26,83
27,152 27,185
28,198 28,213
29,211 29,292
30,117 30,112
31,121 31,126
32,250 32,427
33,151 33,196
34,128 34,139
35,141 35,163
36,0 36,0
37,132 37,279
38,142 38,181
39,180 39,201
40,158 40,164
41,186 41,222
42,255 42,324
43,207 43,272
44,170 44,221
45,149 45,195
46,36 46,0
47,151 47,298
48,150 48,186
49,228 49,279
50,184 50,214
51,210 51,216
52,191 52,232
53,62 53,52
54,164 54,182
55,215 55,256
56,215 56,245
57,159 57,189
58,142 58,178
59,65 59,34
60,172 60,208
61,199 61,230
62,241 62,438
63,291 63,341
64,236 64,426
65,306 65,513
66,182 66,237
67,138 67,119
68,279 68,324
69,160 69,182
70,231 70,274
71,61 71,60
72,261 72,281
73,218 73,400
74,169 74,222
75,269 75,446
76,46 76,34
77,146 77,197
78,185 78,220
79,174 79,223
80,152 80,228
81,177 81,340
82,104 82,74
83,3 83,0
84,130 84,147
85,183 85,233
86,218 86,255
87,137 87,168
88,174 88,235
89,224 89,305
90,215 90,262
91,194 91,265
92,69 92,48
93,195 93,227
94,171 94,217
95,173 95,223
96,48 96,0
97,157 97,151
98,205 98,255
99,163 99,230
100,154 100,196
101,194 101,231
102,92 102,114
103,76 103,60
104,198 104,366
105,112 105,135
106,229 106,391
107,237 107,434
108,107 108,79
109,152 109,170
110,122 110,152
111,142 111,177
112,65 112,54
113,184 113,264
114,0 114,0
115,94 115,97
116,238 116,309
117,99 117,103
118,176 118,215
119,127 119,148
120,161 120,306
121,43 121,28
122,174 122,330
123,181 123,228
124,9 124,0
125,138 125,177
126,28 126,15
127,111 127,81
128,181 128,188
129,257 129,463
130,0 130,0
131,216 131,333
132,75 132,65
133,142 133,167
134,124 134,143
135,0 135,0
136,183 136,228
137,207 137,256
138,54 138,28
139,95 139,74
140,225 140,278
141,146 141,169
142,211 142,258
143,232 143,299
144,276 144,330
145,129 145,171
146,257 146,346
147,281 147,370
148,252 148,448
149,220 149,235
150,104 150,123
151,168 151,225
152,135 152,147
153,266 153,331
154,217 154,370
155,239 155,271
156,291 156,459
157,58 157,40
158,176 158,200
159,136 159,156
160,136 160,151
161,150 161,167
162,205 162,358
163,203 163,231
164,92 164,82
165,97 165,115
166,149 166,199
167,211 167,273
168,181 168,211
169,256 169,358
170,115 170,120
171,138 171,154
172,173 172,217
173,208 173,242
174,164 174,163
175,106 175,118
176,274 176,464
177,220 177,272
178,82 178,78
179,82 179,78
180,54 180,55
181,226 181,265
182,273 182,362
183,12 183,0
184,191 184,222
185,0 185,0
186,201 186,348
187,17 187,0
188,92 188,71
189,236 189,275
190,220 190,254
191,124 191,151
192,242 192,324
193,147 193,208
194,169 194,313
195,126 195,146
196,208 196,232
197,90 197,114
198,75 198,86
199,80 199,45
200,199 200,227
201,74 201,68
202,213 202,382
203,251 203,298
204,103 204,108
205,111 205,85
206,262 206,316
207,205 207,222
208,58 208,50
209,97 209,87
210,224 210,395
211,80 211,66
212,53 212,48
213,118 213,114
214,193 214,189
215,251 215,275
216,270 216,436
217,56 217,30
218,113 218,246
219,136 219,158
220,239 220,396
221,95 221,101
222,263 222,442
223,187 223,188
224,267 224,473
225,165 225,229
226,319 226,491
227,23 227,0
228,169 228,310
229,240 229,315
230,190 230,254
231,157 231,220
232,185 232,199
233,85 233,66
234,177 234,171
235,80 235,82
236,30 236,21
237,225 237,272
238,114 238,93
239,90 239,108
240,166 240,193
241,238 241,297
242,202 242,236
243,92 243,104
244,59 244,24
245,27 245,14
246,91 246,74
247,200 247,205
248,219 248,258
249,91 249,63
250,135 250,167
251,46 251,49
252,18 252,0
253,125 253,121
254,176 254,194
255,146 255,157
256,81 256,55
257,5 257,0
258,111 258,120
259,105 259,132
260,174 260,238
261,95 261,99
262,124 262,149
263,212 263,276
264,162 264,289
265,129 265,159
266,179 266,198
267,208 267,244
268,94 268,87
269,158 269,197
270,150 270,169
271,222 271,262
272,178 272,224
273,198 273,249
274,145 274,158
275,282 275,344
276,193 276,272
277,210 277,240
278,157 278,163
279,95 279,88
280,181 280,234
281,164 281,178
282,230 282,268
283,178 283,224
284,238 284,303
285,28 285,0
286,223 286,371
287,240 287,312
288,79 288,64
289,159 289,185
290,99 290,76
291,40 291,22
292,213 292,306
293,178 293,167
294,49 294,30
295,84 295,70
296,276 296,479
297,306 297,508
298,0 298,0
299,196 299,259
300,178 300,237
301,284 301,452
302,183 302,248
303,162 303,185
304,101 304,105
305,238 305,264
306,290 306,511
307,154 307,146
308,218 308,296
309,267 309,453
310,73 310,52
311,0 311,0
312,180 312,238
313,115 313,119
314,192 314,344
315,80 315,30
316,131 316,153
317,187 317,221
318,240 318,446
319,82 319,82
320,153 320,187
321,138 321,133
322,206 322,283
323,172 323,190
324,85 324,79
325,213 325,235
326,99 326,121
327,139 327,178
328,160 328,232
329,185 329,216
330,143 330,184
331,212 331,374
332,0 332,0
333,188 333,214
334,278 334,363
335,91 335,95
336,136 336,176
337,37 337,30
338,220 338,265
339,160 339,202
340,153 340,177
341,131 341,136
342,102 342,124
343,67 343,63
344,255 344,322
345,145 345,162
346,124 346,262
347,196 347,272
348,143 348,212
349,130 349,138
350,125 350,140
351,183 351,334
352,247 352,435
353,11 353,0
354,182 354,332
355,191 355,242
356,173 356,198
357,288 357,371
358,21 358,0
359,124 359,164
360,230 360,274
361,153 361,144
362,70 362,30
363,138 363,156
364,183 364,365
365,248 365,424
366,201 366,262
367,275 367,385
368,235 368,422
369,188 369,232
370,158 370,210
371,219 371,316
372,233 372,384
373,212 373,267
374,27 374,0
375,153 375,175
376,32 376,10
377,34 377,0
378,72 378,43
379,104 379,73
380,175 380,238
381,125 381,134
382,132 382,107
383,240 383,320
384,214 384,240
385,6 385,0
386,246 386,271
387,76 387,64
388,92 388,56
389,169 389,192
390,270 390,326
391,0 391,0
392,152 392,190
393,114 393,111
394,80 394,88
395,201 395,360
396,95 396,91
397,41 397,37
398,77 398,56
399,153 399,291
400,213 400,287
401,98 401,70
402,85 402,79
403,283 403,451
404,65 404,50
405,118 405,164
406,255 406,448
407,223 407,247
408,141 408,158
409,157 409,202
410,187 410,349
411,117 411,142
412,25 412,6
413,37 413,3
414,124 414,121
415,125 415,136
416,184 416,229
417,232 417,269
418,0 418,0
419,182 419,208
420,211 420,234
421,143 421,291
422,144 422,173
423,204 423,250
424,195 424,260
425,238 425,268
426,235 426,378
427,200 427,350
428,154 428,164
429,289 429,457
430,262 430,319
431,83 431,84
432,78 432,79
433,192 433,237
434,137 434,162
435,219 435,253
436,145 436,176
437,153 437,175
438,138 438,206
439,191 439,354
440,230 440,278
441,63 441,37
442,193 442,211
443,222 443,390
444,105 444,145
445,233 445,282
446,139 446,131
447,157 447,173
448,86 448,63
449,184 449,278
450,275 450,465
451,228 451,231
452,83 452,73
453,203 453,380
454,114 454,117
455,4 455,0
456,42 456,23
457,202 457,229
458,46 458,26
459,135 459,118
460,197 460,236
461,258 461,339
462,115 462,139
463,225 463,255
464,68 464,55
465,102 465,108
466,231 466,300
467,3 467,0
468,239 468,315
469,171 469,172
470,67 470,52
471,124 471,127
472,241 472,271
473,75 473,71
474,178 474,208
475,101 475,71
476,247 476,320
477,119 477,132
478,99 478,82
479,209 479,258
480,131 480,163
481,35 481,15
482,168 482,212
483,37 483,13
484,175 484,203
485,108 485,114
486,146 486,163
487,144 487,180
488,126 488,150
489,154 489,177
490,219 490,260
491,206 491,239
492,118 492,147
493,166 493,177
494,253 494,398
495,159 495,208
496,77 496,73
497,96 497,99
498,202 498,277
499,90 499,59
500,195 500,263
501,118 501,89
502,88 502,88
503,77 503,84
504,134 504,138
505,183 505,231
506,148 506,168
507,201 507,224
508,33 508,0
509,176 509,218
510,179 510,224
511,243 511,398
512,246 512,250
513,112 513,130
514,130 514,154
515,296 515,385
516,26 516,21
517,272 517,379
518,124 518,146
519,118 519,115
520,142 520,170
521,267 521,304
522,135 522,121
523,42 523,21
524,229 524,308
525,173 525,201
526,123 526,125
527,84 527,83
528,77 528,69
529,241 529,278
530,158 530,188
531,219 531,304
532,120 532,145
533,84 533,82
534,227 534,405
535,117 535,130
536,150 536,163
537,189 537,225
538,171 538,185
539,248 539,358
540,98 540,115
541,191 541,236
542,112 542,116
543,229 543,392
544,135 544,118
545,158 545,178
546,211 546,274
547,115 547,127
548,252 548,318
549,112 549,116
550,0 550,0
551,92 551,71
552,129 552,137
553,157 553,199
554,34 554,11
555,117 555,217
556,100 556,224
557,176 557,237
558,194 558,214
559,194 559,221
560,140 560,137
561,56 561,45
562,160 562,194
563,223 563,393
564,184 564,205
565,202 565,243
566,207 566,277
567,223 567,291
568,201 568,203
569,81 569,79
570,250 570,309
571,92 571,87
572,229 572,255
573,91 573,114
574,226 574,389
575,15 575,0
576,257 576,303
577,54 577,43
578,243 578,296
579,272 579,452
580,193 580,198
581,166 581,218
582,234 582,318
583,93 583,122
584,271 584,332
585,111 585,129
586,44 586,27
587,142 587,184
588,191 588,214
589,247 589,317
590,284 590,348
591,213 591,292
592,145 592,186
593,83 593,66
594,62 594,53
595,0 595,0
596,146 596,300
597,249 597,346
598,121 598,151
599,120 599,112
600,219 600,291
601,215 601,306
602,27 602,2
603,149 603,167
604,104 604,99
605,181 605,228
606,141 606,154
607,191 607,344
608,81 608,42
609,0 609,0
610,95 610,54
611,114 611,124
612,17 612,0
613,226 613,227
614,144 614,178
615,143 615,180
616,187 616,219
617,230 617,279
618,166 618,176
619,153 619,305
620,215 620,271
621,224 621,371
622,186 622,199
623,251 623,316
624,182 624,206
625,219 625,273
626,64 626,31
627,181 627,305
628,89 628,87
629,114 629,109
630,153 630,181
631,141 631,289
632,185 632,183
633,135 633,169
634,80 634,70
635,110 635,135
636,210 636,274
637,109 637,101
638,182 638,240
639,213 639,389
640,124 640,112
641,198 641,213
642,59 642,55
643,297 643,517
644,170 644,196
645,83 645,60
646,101 646,125
647,183 647,210
648,149 648,199
649,152 649,215
650,146 650,161
651,193 651,196
652,176 652,207
653,190 653,246
654,0 654,0
655,75 655,71
656,246 656,280
657,186 657,193
658,224 658,258
659,20 659,0
660,189 660,352
661,185 661,273
662,182 662,236
663,110 663,115
664,165 664,309
665,85 665,87
666,62 666,46
667,63 667,52
668,174 668,183
669,136 669,177
670,113 670,106
671,69 671,55
672,72 672,60
673,196 673,246
674,144 674,181
675,114 675,133
676,227 676,399
677,96 677,92
678,187 678,256
679,93 679,75
680,119 680,144
681,145 681,163
682,231 682,404
683,151 683,173
684,168 684,206
685,235 685,296
686,270 686,363
687,107 687,125
688,100 688,103
689,102 689,73
690,249 690,327
691,183 691,197
692,153 692,165
693,111 693,105
694,115 694,128
695,262 695,429
696,240 696,272
697,104 697,101
698,157 698,188
699,161 699,195
700,61 700,55
701,234 701,291
702,248 702,307
703,206 703,229
704,55 704,49
705,140 705,130
706,0 706,0
707,285 707,485
708,124 708,164
709,163 709,198
710,0 710,0
711,197 711,345
712,76 712,37
713,168 713,202
714,200 714,247
715,181 715,234
716,133 716,162
717,275 717,340
718,160 718,192
719,80 719,76
720,80 720,44
721,176 721,223
722,163 722,220
723,166 723,199
724,162 724,206
725,233 725,278
726,287 726,382
727,183 727,183
728,14 728,0
729,182 729,251
730,103 730,119
731,0 731,0
732,322 732,533
733,169 733,234
734,250 734,290
735,48 735,43
736,176 736,328
737,175 737,315
738,147 738,171
739,183 739,203
740,217 740,259
741,195 741,257
742,89 742,103
743,62 743,48
744,159 744,196
745,238 745,266
746,121 746,118
747,118 747,144
748,146 748,194
749,26 749,0
750,154 750,300
751,89 751,118
752,174 752,230
753,149 753,175
754,0 754,0
755,105 755,107
756,109 756,123
757,138 757,179
758,136 758,185
759,201 759,220
760,209 760,276
761,172 761,197
762,186 762,193
763,96 763,101
764,193 764,204
765,135 765,183
766,45 766,31
767,181 767,201
768,86 768,59
769,143 769,138
770,169 770,322
771,277 771,358
772,206 772,259
773,54 773,54
774,0 774,0
775,227 775,282
776,103 776,101
777,87 777,99
778,141 778,151
779,116 779,128
780,180 780,236
781,118 781,131
782,199 782,366
783,181 783,229
784,237 784,272
785,237 785,296
786,174 786,201
787,91 787,100
788,198 788,351
789,122 789,156
790,193 790,355
791,111 791,159
792,213 792,271
793,143 793,169
794,247 794,393
795,189 795,226
796,92 796,99
797,35 797,0
798,112 798,120
799,60 799,29
800,127 800,149
801,118 801,121
802,196 802,201
803,52 803,40
804,70 804,61
805,148 805,164
806,235 806,378
807,175 807,228
808,114 808,139
809,196 809,295
810,174 810,319
811,174 811,192
812,148 812,186
813,0 813,0
814,252 814,348
815,254 815,350
816,208 816,224
817,277 817,435
818,150 818,206
819,249 819,355
820,247 820,343
821,222 821,318
822,289 822,455
823,71 823,76
824,72 824,70
825,223 825,265
826,246 826,308
827,50 827,8
828,246 828,315
829,112 829,88
830,106 830,88
831,107 831,109
832,269 832,327
833,111 833,103
834,212 834,234
835,172 835,310
836,195 836,218
837,214 837,351
838,86 838,74
839,108 839,130
840,174 840,219
841,172 841,232
842,179 842,197
843,162 843,204
844,155 844,196
845,196 845,257
846,229 846,295
847,227 847,278
848,198 848,243
849,273 849,362
850,272 850,463
851,118 851,231
852,272 852,433
853,161 853,323
854,99 854,120
855,286 855,471
856,113 856,99
857,251 857,311
858,37 858,6
859,174 859,211
860,241 860,301
861,77 861,96
862,152 862,199
863,161 863,175
864,42 864,10
865,170 865,316
866,102 866,121
867,160 867,213
868,118 868,120
869,216 869,250
870,155 870,190
871,201 871,213
872,0 872,0
873,209 873,243
874,254 874,341
875,29 875,2
876,83 876,99
877,149 877,157
878,255 878,310
879,156 879,196
880,274 880,440
881,114 881,103
882,233 882,259
883,254 883,267
884,234 884,276
885,228 885,397
886,121 886,261
887,242 887,279
888,191 888,230
889,179 889,233
890,182 890,192
891,95 891,72
892,52 892,32
893,167 893,182
894,206 894,219
895,162 895,317
896,87 896,59
897,0 897,0
898,266 898,365
899,196 899,252
900,65 900,34
901,265 901,324
902,39 902,24
903,136 903,159
904,188 904,355
905,234 905,272
906,168 906,201
907,136 907,257
908,187 908,336
909,231 909,300
910,183 910,214
911,186 911,219
912,298 912,479
913,97 913,121
914,223 914,254
915,187 915,230
916,151 916,180
917,195 917,217
918,258 918,281
919,145 919,179
920,21 920,0
921,96 921,92
922,224 922,249
923,272 923,298
924,224 924,273
925,139 925,167
926,166 926,190
927,111 927,110
928,201 928,254
929,210 929,229
930,123 930,152
931,77 931,81
932,238 932,380
933,106 933,114
934,132 934,165
935,189 935,251
936,145 936,181
937,174 937,312
938,65 938,36
939,157 939,157
940,151 940,166
941,160 941,309
942,118 942,108
943,119 943,146
944,92 944,65
945,169 945,322
946,179 946,215
947,202 947,371
948,107 948,128
949,72 949,43
950,0 950,0
951,192 951,253
952,167 952,208
953,77 953,73
954,49 954,7
955,218 955,321
956,185 956,361
957,62 957,32
958,305 958,508
959,131 959,109
960,111 960,99
961,253 961,274
962,78 962,79
963,286 963,462
964,63 964,33
965,80 965,51
966,146 966,207
967,263 967,363
968,16 968,0
969,178 969,236
970,129 970,162
971,114 971,118
972,178 972,203
973,154 973,282
974,0 974,0
975,186 975,208
976,8 976,0
977,274 977,451
978,73 978,64
979,264 979,420
980,143 980,157
981,157 981,217
982,90 982,51
983,110 983,125
984,220 984,393
985,243 985,248
986,261 986,291
987,226 987,416
988,178 988,349
989,90 989,100
990,153 990,227
991,177 991,204
992,98 992,100
993,218 993,375
994,0 994,0
995,70 995,74
996,0 996,0
997,95 997,88
998,229 998,273
999,106 999,97
1000,111 1000,91
1001,78 1001,77
1002,163 1002,234
1003,197 1003,218
1004,236 1004,294
1005,275 1005,342
1006,203 1006,328
1007,111 1007,105
1008,90 1008,111
1009,86 1009,88
1010,125 1010,241
1011,283 1011,445
1012,165 1012,185
1013,82 1013,96
1014,156 1014,162
1015,150 1015,195
1016,229 1016,222
1017,162 1017,169
1018,136 1018,148
1019,119 1019,127
1020,163 1020,221
1021,171 1021,229
1022,123 1022,111
1023,267 1023,336
1024,47 1024,27
1025,245 1025,280
1026,0 1026,0
1027,130 1027,167
1028,76 1028,92
1029,215 1029,268
1030,293 1030,492
1031,224 1031,251
1032,266 1032,339
1033,153 1033,136
1034,59 1034,47
1035,205 1035,356
1036,219 1036,254
1037,0 1037,0
1038,47 1038,47
1039,172 1039,174
1040,55 1040,48
1041,104 1041,73
1042,307 1042,512
1043,181 1043,348
1044,89 1044,71
1045,83 1045,71
1046,180 1046,239
1047,208 1047,241
1048,186 1048,212
1049,102 1049,86
1050,37 1050,0
1051,197 1051,214
1052,130 1052,155
1053,216 1053,229
1054,124 1054,176
1055,176 1055,195
1056,186 1056,272
1057,86 1057,62
1058,89 1058,69
1059,138 1059,280
1060,0 1060,0
1061,112 1061,133
1062,206 1062,249
1063,250 1063,317
1064,96 1064,73
1065,61 1065,50
1066,88 1066,82
1067,196 1067,256
1068,62 1068,56
1069,282 1069,435
1070,0 1070,0
1071,177 1071,195
1072,198 1072,355
1073,114 1073,92
1074,226 1074,285
1075,156 1075,173
1076,195 1076,281
1077,287 1077,392
1078,228 1078,305
1079,35 1079,13
1080,180 1080,209
1081,134 1081,140
1082,82 1082,55
1083,166 1083,223
1084,60 1084,51
1085,221 1085,237
1086,154 1086,295
1087,143 1087,141
1088,17 1088,0
1089,163 1089,181
1090,41 1090,23
1091,165 1091,200
1092,46 1092,1
1093,142 1093,277
1094,32 1094,0
1095,80 1095,44
1096,120 1096,116
1097,88 1097,91
1098,256 1098,354
1099,99 1099,85
1100,201 1100,255
1101,294 1101,489
1102,46 1102,22
1103,74 1103,70
1104,173 1104,207
1105,159 1105,199
1106,69 1106,56
1107,98 1107,141
1108,173 1108,232
1109,277 1109,463
1110,170 1110,170
1111,192 1111,264
1112,83 1112,75
1113,78 1113,84
1114,56 1114,23
1115,267 1115,434
1116,131 1116,143
1117,126 1117,255
1118,86 1118,108
1119,71 1119,72
1120,255 1120,427
1121,165 1121,199
1122,182 1122,211
1123,178 1123,210
1124,102 1124,113
1125,174 1125,205
1126,186 1126,222
1127,83 1127,55
1128,157 1128,176
1129,84 1129,87
1130,189 1130,267
1131,171 1131,165
1132,94 1132,68
1133,81 1133,79
1134,65 1134,63
1135,0 1135,0
1136,163 1136,299
1137,208 1137,224
1138,88 1138,88
1139,22 1139,0
1140,273 1140,338
1141,62 1141,46
1142,83 1142,60
1143,41 1143,30
1144,214 1144,245
1145,173 1145,196
1146,143 1146,171
1147,186 1147,217
1148,73 1148,58
1149,154 1149,204
1150,200 1150,244
1151,167 1151,213
1152,0 1152,0
1153,173 1153,206
1154,155 1154,188
1155,99 1155,120
1156,139 1156,154
1157,200 1157,232
1158,80 1158,74
1159,55 1159,30
1160,134 1160,152
1161,199 1161,224
1162,209 1162,236
1163,109 1163,134
1164,134 1164,149
1165,268 1165,461
1166,0 1166,0
1167,182 1167,224
1168,79 1168,64
1169,160 1169,179
1170,83 1170,88
1171,0 1171,0
1172,289 1172,464
1173,83 1173,105
1174,152 1174,298
1175,197 1175,279
1176,168 1176,192
1177,186 1177,240
1178,194 1178,235
1179,97 1179,67
1180,114 1180,110
1181,150 1181,176
1182,125 1182,153
1183,79 1183,54
1184,163 1184,184
1185,135 1185,169
1186,91 1186,89
1187,109 1187,80
1188,195 1188,233
1189,0 1189,0
1190,188 1190,242
1191,195 1191,214
1192,193 1192,216
1193,101 1193,85
1194,137 1194,274
1195,114 1195,108
1196,68 1196,58
1197,191 1197,241
1198,183 1198,232
1199,237 1199,315
1200,72 1200,76
1201,85 1201,87
1202,104 1202,116
1203,0 1203,0
1204,63 1204,57
1205,80 1205,64
1206,190 1206,349
1207,211 1207,267
1208,190 1208,260
1209,100 1209,209
1210,56 1210,0
1211,117 1211,150
1212,68 1212,63
1213,186 1213,263
1214,212 1214,241
1215,210 1215,397
1216,219 1216,383
1217,162 1217,206
1218,177 1218,300
1219,193 1219,257
1220,246 1220,274
1221,163 1221,181
1222,171 1222,201
1223,193 1223,227
1224,178 1224,253
1225,241 1225,331
1226,69 1226,46
1227,229 1227,258
1228,143 1228,153
1229,0 1229,0
1230,84 1230,108
1231,210 1231,259
1232,179 1232,251
1233,221 1233,259
1234,284 1234,476
1235,149 1235,175
1236,175 1236,215
1237,214 1237,283
1238,304 1238,494
1239,250 1239,302
1240,0 1240,0
1241,136 1241,107
1242,150 1242,285
1243,163 1243,180
1244,108 1244,104
1245,129 1245,140
1246,148 1246,174
1247,166 1247,169
1248,118 1248,114
1249,0 1249,0
1250,253 1250,324
1251,157 1251,212
1252,142 1252,258
1253,135 1253,126
1254,185 1254,337
1255,140 1255,147
1256,102 1256,122
1257,215 1257,279
1258,114 1258,112
1259,215 1259,249
1260,147 1260,190
1261,256 1261,455
1262,262 1262,447
1263,166 1263,206
1264,134 1264,184
1265,114 1265,104
1266,220 1266,289
1267,214 1267,248
1268,140 1268,191
1269,80 1269,62
1270,175 1270,221
1271,209 1271,271
1272,105 1272,114
1273,171 1273,211
1274,133 1274,171
1275,280 1275,355
1276,0 1276,0
1277,79 1277,80
1278,94 1278,59
1279,225 1279,291
1280,306 1280,393
1281,107 1281,108
1282,0 1282,0
1283,163 1283,214
1284,35 1284,17
1285,142 1285,184
1286,0 1286,0
1287,199 1287,265
1288,100 1288,90
1289,138 1289,169
1290,229 1290,278
1291,118 1291,122
1292,244 1292,261
1293,140 1293,169
1294,139 1294,146
1295,191 1295,182
1296,274 1296,466
1297,89 1297,65
1298,145 1298,188
1299,163 1299,159
1300,200 1300,244
1301,160 1301,200
1302,99 1302,119
1303,86 1303,98
1304,238 1304,257
1305,200 1305,249
1306,236 1306,261
1307,96 1307,64
1308,221 1308,267
1309,115 1309,138
1310,156 1310,217
1311,155 1311,170
1312,125 1312,137
1313,199 1313,245
1314,0 1314,0
1315,270 1315,470
1316,116 1316,113
1317,230 1317,232
1318,118 1318,163
1319,203 1319,294
1320,174 1320,210
1321,179 1321,216
1322,289 1322,367
1323,160 1323,181
1324,204 1324,293
1325,119 1325,108
1326,124 1326,127
1327,81 1327,69
1328,171 1328,306
1329,85 1329,81
1330,0 1330,0
1331,163 1331,203
1332,195 1332,349
1333,214 1333,371
1334,200 1334,245
1335,135 1335,169
1336,66 1336,51
1337,284 1337,457
1338,21 1338,0
1339,165 1339,321
1340,243 1340,272
1341,207 1341,266
1342,242 1342,393
1343,105 1343,109
1344,235 1344,260
1345,234 1345,328
1346,213 1346,266
1347,200 1347,228
1348,188 1348,241
1349,218 1349,258
1350,124 1350,163
1351,212 1351,276
1352,149 1352,282
1353,224 1353,382
1354,99 1354,95
1355,196 1355,359
1356,211 1356,392
1357,94 1357,220
1358,48 1358,32
1359,284 1359,477
1360,149 1360,171
1361,229 1361,281
1362,286 1362,474
1363,109 1363,234
1364,254 1364,321
1365,182 1365,224
1366,230 1366,252
1367,155 1367,198
1368,134 1368,184
1369,31 1369,0
1370,147 1370,149
1371,203 1371,266
1372,140 1372,193
1373,172 1373,211
1374,285 1374,450
1375,92 1375,79
1376,93 1376,80
1377,209 1377,255
1378,184 1378,198
1379,223 1379,240
1380,257 1380,358
1381,169 1381,156
1382,255 1382,347
1383,208 1383,246
1384,187 1384,225
1385,116 1385,253
1386,145 1386,182
1387,33 1387,0
1388,184 1388,239
1389,72 1389,56
1390,177 1390,233
1391,204 1391,228
1392,82 1392,49
1393,191 1393,361
1394,81 1394,57
1395,136 1395,157
1396,218 1396,267
1397,163 1397,202
1398,154 1398,208
1399,216 1399,250
1400,121 1400,154
1401,132 1401,273
1402,18 1402,0
1403,98 1403,64
1404,129 1404,120
1405,0 1405,0
1406,0 1406,0
1407,43 1407,0
1408,251 1408,344
1409,11 1409,0
1410,244 1410,340
1411,236 1411,306
1412,23 1412,0
1413,183 1413,219
1414,198 1414,266
1415,109 1415,116
1416,108 1416,96
1417,122 1417,120
1418,60 1418,43
1419,98 1419,78
1420,213 1420,368
1421,65 1421,49
1422,247 1422,283
1423,223 1423,285
1424,66 1424,68
1425,62 1425,68
1426,115 1426,78
1427,186 1427,226
1428,201 1428,349
1429,190 1429,288
1430,142 1430,208
1431,284 1431,343
1432,220 1432,383
1433,0 1433,0
1434,131 1434,127
1435,64 1435,9
1436,121 1436,121
1437,184 1437,268
1438,195 1438,250
1439,44 1439,28
1440,56 1440,40
1441,22 1441,0
1442,273 1442,349
1443,102 1443,94
1444,95 1444,80
1445,238 1445,281
1446,248 1446,339
1447,276 1447,344
1448,128 1448,134
1449,280 1449,360
1450,0 1450,0
1451,245 1451,291
1452,81 1452,96
1453,125 1453,129
1454,113 1454,127
1455,200 1455,235
1456,84 1456,71
1457,194 1457,270
1458,146 1458,179
1459,37 1459,9
1460,86 1460,77
1461,120 1461,145
1462,0 1462,0
1463,178 1463,204
1464,151 1464,189
1465,249 1465,287
1466,230 1466,290
1467,236 1467,378
1468,89 1468,64
1469,241 1469,304
1470,110 1470,101
1471,141 1471,136
1472,125 1472,124
1473,65 1473,55
1474,80 1474,64
1475,138 1475,149
1476,222 1476,375
1477,233 1477,400
1478,186 1478,203
1479,215 1479,288
1480,259 1480,333
1481,101 1481,93
1482,180 1482,357
1483,182 1483,211
1484,225 1484,363
1485,126 1485,139
1486,3 1486,0
1487,119 1487,114
1488,201 1488,353
1489,210 1489,232
1490,128 1490,98
1491,97 1491,72
1492,154 1492,167
1493,0 1493,0
1494,84 1494,57
1495,142 1495,166
1496,95 1496,53
1497,227 1497,272
1498,119 1498,237
1499,193 1499,193
1500,278 1500,323
1501,0 1501,0
1502,204 1502,253
1503,201 1503,243
1504,91 1504,100
1505,0 1505,0
1506,173 1506,199
1507,60 1507,59
1508,165 1508,168
1509,65 1509,54
1510,67 1510,72
1511,0 1511,0
1512,112 1512,88
1513,40 1513,12
1514,154 1514,168
1515,230 1515,356
1516,237 1516,321
1517,210 1517,382
1518,154 1518,210
1519,301 1519,499
1520,16 1520,0
1521,232 1521,389
1522,119 1522,132
1523,192 1523,223
1524,43 1524,23
1525,207 1525,236
1526,245 1526,329
1527,0 1527,0
1528,214 1528,367
1529,65 1529,76
1530,171 1530,242
1531,52 1531,31
1532,37 1532,0
1533,164 1533,175
1534,22 1534,0
1535,155 1535,196
1536,96 1536,86
1537,109 1537,78
1538,181 1538,218
1539,210 1539,278
1540,169 1540,196
1541,91 1541,82
1542,83 1542,60
1543,113 1543,97
1544,258 1544,305
1545,148 1545,163
1546,206 1546,330
1547,185 1547,209
1548,164 1548,174
1549,49 1549,32
1550,156 1550,199
1551,129 1551,165
1552,118 1552,146
1553,191 1553,226
1554,173 1554,200
1555,123 1555,244
1556,227 1556,249
1557,233 1557,260
1558,192 1558,230
1559,162 1559,177
1560,135 1560,175
1561,0 1561,0
1562,162 1562,238
1563,253 1563,299
1564,111 1564,96
1565,149 1565,186
1566,237 1566,278
1567,189 1567,252
1568,297 1568,485
1569,183 1569,191
1570,92 1570,71
1571,218 1571,309
1572,144 1572,180
1573,14 1573,0
1574,171 1574,211
1575,159 1575,200
1576,173 1576,192
1577,159 1577,206
1578,234 1578,417
1579,155 1579,193
1580,70 1580,66
1581,298 1581,443
1582,241 1582,314
1583,145 1583,274
1584,256 1584,326
1585,155 1585,212
1586,64 1586,45
1587,191 1587,236
1588,134 1588,146
1589,65 1589,156
1590,120 1590,121
1591,273 1591,463
1592,112 1592,222
1593,129 1593,182
1594,256 1594,283
1595,211 1595,245
1596,124 1596,142
1597,146 1597,187
1598,172 1598,226
1599,225 1599,417
1600,145 1600,157
1601,152 1601,206
1602,215 1602,267
1603,82 1603,96
1604,143 1604,184
1605,238 1605,291
1606,220 1606,291
1607,89 1607,88
1608,268 1608,454
1609,242 1609,307
1610,262 1610,425
1611,208 1611,254
1612,97 1612,78
1613,226 1613,283
1614,166 1614,207
1615,130 1615,152
1616,177 1616,192
1617,109 1617,129
1618,20 1618,0
1619,44 1619,40
1620,51 1620,9
1621,167 1621,221
1622,179 1622,215
1623,294 1623,372
1624,269 1624,326
1625,19 1625,0
1626,141 1626,151
1627,89 1627,99
1628,232 1628,310
1629,218 1629,289
1630,172 1630,314
1631,166 1631,189
1632,149 1632,176
1633,120 1633,157
1634,184 1634,193
1635,163 1635,183
1636,218 1636,252
1637,242 1637,318
1638,163 1638,170
1639,171 1639,208
1640,147 1640,173
1641,261 1641,432
1642,138 1642,144
1643,190 1643,213
1644,210 1644,369
1645,252 1645,279
1646,159 1646,190
1647,87 1647,99
1648,30 1648,20
1649,186 1649,229
1650,181 1650,207
1651,208 1651,263
1652,112 1652,97
1653,319 1653,500
1654,125 1654,111
1655,33 1655,13
1656,106 1656,129
1657,72 1657,68
1658,254 1658,345
1659,56 1659,31
1660,202 1660,248
1661,211 1661,246
1662,243 1662,301
1663,86 1663,92
1664,65 1664,69
1665,112 1665,134
1666,63 1666,55
1667,0 1667,0
1668,82 1668,29
1669,180 1669,358
1670,62 1670,45
1671,136 1671,189
1672,121 1672,129
1673,108 1673,89
1674,256 1674,355
1675,23 1675,0
1676,229 1676,368
1677,274 1677,450
1678,96 1678,113
1679,235 1679,414
1680,224 1680,262
1681,16 1681,0
1682,271 1682,442
1683,79 1683,102
1684,170 1684,190
1685,131 1685,136
1686,239 1686,325
1687,152 1687,185
1688,108 1688,86
1689,36 1689,22
1690,70 1690,59
1691,222 1691,265
1692,169 1692,222
1693,119 1693,136
1694,212 1694,288
1695,74 1695,69
1696,116 1696,113
1697,54 1697,43
1698,32 1698,0
1699,228 1699,295
1700,148 1700,194
1701,198 1701,247
1702,143 1702,185
1703,225 1703,265
1704,56 1704,34
1705,241 1705,278
1706,219 1706,384
1707,30 1707,0
1708,52 1708,29
1709,211 1709,269
1710,97 1710,90
1711,154 1711,175
1712,146 1712,191
1713,134 1713,137
1714,185 1714,197
1715,20 1715,0
1716,190 1716,228
1717,243 1717,263
1718,170 1718,308
1719,128 1719,169
1720,82 1720,109
1721,17 1721,0
1722,122 1722,166
1723,174 1723,223
1724,173 1724,217
1725,148 1725,191
1726,138 1726,106
1727,157 1727,293
1728,93 1728,92
1729,210 1729,246
1730,115 1730,133
1731,232 1731,403
1732,171 1732,183
1733,164 1733,209
1734,75 1734,66
1735,172 1735,307
1736,108 1736,93
1737,4 1737,0
1738,216 1738,376
1739,175 1739,217
1740,192 1740,208
1741,45 1741,40
1742,126 1742,146
1743,143 1743,155
1744,240 1744,273
1745,136 1745,140
1746,147 1746,147
1747,182 1747,220
1748,277 1748,359
1749,171 1749,208
1750,110 1750,83
1751,128 1751,131
1752,141 1752,160
1753,44 1753,23
1754,223 1754,283
1755,207 1755,269
1756,102 1756,107
1757,234 1757,258
1758,87 1758,75
1759,111 1759,127
1760,115 1760,117
1761,0 1761,0
1762,233 1762,306
1763,192 1763,243
1764,115 1764,106
1765,272 1765,442
1766,261 1766,436
1767,36 1767,4
1768,128 1768,165
1769,243 1769,322
1770,103 1770,84
1771,175 1771,192
1772,229 1772,259
1773,70 1773,63
1774,240 1774,395
1775,4 1775,0
1776,146 1776,210
1777,125 1777,175
1778,37 1778,24
1779,2 1779,0
1780,245 1780,326
1781,201 1781,267
1782,175 1782,198
1783,163 1783,315
1784,215 1784,298
1785,102 1785,82
1786,244 1786,404
1787,116 1787,111
1788,191 1788,256
1789,220 1789,267
1790,168 1790,203
1791,203 1791,245
1792,152 1792,150
1793,168 1793,225
1794,120 1794,133
1795,221 1795,289
1796,245 1796,323
1797,138 1797,203
1798,241 1798,319
1799,230 1799,284
1800,93 1800,109
1801,126 1801,133
1802,80 1802,52
1803,122 1803,92
1804,139 1804,161
1805,107 1805,128
1806,256 1806,321
1807,155 1807,307
1808,111 1808,103
1809,240 1809,326
1810,45 1810,33
1811,232 1811,319
1812,181 1812,192
1813,26 1813,6
1814,121 1814,134
1815,43 1815,28
1816,225 1816,349
1817,209 1817,223
1818,210 1818,273
1819,149 1819,180
1820,158 1820,171
1821,253 1821,318
1822,209 1822,260
1823,58 1823,27
1824,149 1824,184
1825,106 1825,78
1826,174 1826,323
1827,260 1827,315
1828,222 1828,249
1829,282 1829,361
1830,19 1830,0
1831,78 1831,77
1832,41 1832,22
1833,165 1833,207
1834,159 1834,205
1835,125 1835,159
1836,233 1836,310
1837,142 1837,140
1838,182 1838,190
1839,177 1839,212
1840,177 1840,219
1841,238 1841,391
1842,157 1842,175
1843,256 1843,305
1844,233 1844,267
1845,77 1845,48
1846,176 1846,196
1847,37 1847,0
1848,299 1848,479
1849,0 1849,0
1850,107 1850,137
1851,291 1851,340
1852,187 1852,203
1853,219 1853,267
1854,213 1854,281
1855,238 1855,415
1856,133 1856,145
1857,280 1857,461
1858,111 1858,107
1859,131 1859,125
1860,248 1860,320
1861,95 1861,109
1862,243 1862,245
1863,136 1863,177
1864,88 1864,85
1865,81 1865,86
1866,222 1866,307
1867,113 1867,83
1868,33 1868,0
1869,229 1869,268
1870,258 1870,414
1871,179 1871,209
1872,252 1872,337
1873,143 1873,143
1874,257 1874,429
1875,128 1875,125
1876,218 1876,239
1877,194 1877,222
1878,277 1878,348
1879,223 1879,312
1880,101 1880,82
1881,240 1881,328
1882,136 1882,110
1883,88 1883,89
1884,77 1884,42
1885,50 1885,50
1886,76 1886,56
1887,212 1887,287
1888,168 1888,178
1889,233 1889,326
1890,196 1890,258
1891,35 1891,11
1892,139 1892,141
1893,90 1893,93
1894,130 1894,156
1895,60 1895,16
1896,85 1896,85
1897,76 1897,83
1898,146 1898,194
1899,262 1899,344
1900,213 1900,299
1901,156 1901,201
1902,160 1902,209
1903,208 1903,281
1904,188 1904,219
1905,216 1905,369
1906,237 1906,269
1907,299 1907,374
1908,177 1908,203
1909,57 1909,44
1910,228 1910,306
1911,175 1911,232
1912,171 1912,174
1913,105 1913,110
1914,163 1914,202
1915,18 1915,0
1916,120 1916,89
1917,150 1917,195
1918,70 1918,48
1919,233 1919,270
1920,136 1920,167
1921,0 1921,0
1922,263 1922,309
1923,264 1923,324
1924,26 1924,18
1925,93 1925,93
1926,67 1926,57
1927,237 1927,405
1928,290 1928,469
1929,222 1929,243
1930,108 1930,93
1931,287 1931,374
1932,59 1932,61
1933,64 1933,63
1934,195 1934,336
1935,193 1935,265
1936,284 1936,364
1937,183 1937,343
1938,269 1938,334
1939,71 1939,68
1940,23 1940,0
1941,215 1941,243
1942,189 1942,221
1943,174 1943,179
1944,18 1944,5
1945,5 1945,0
1946,180 1946,200
1947,212 1947,263
1948,250 1948,395
1949,104 1949,119
1950,108 1950,137
1951,9 1951,0
1952,93 1952,98
1953,132 1953,145
1954,179 1954,188
1955,203 1955,244
1956,105 1956,75
1957,119 1957,142
1958,264 1958,310
1959,72 1959,80
1960,85 1960,59
1961,242 1961,268
1962,252 1962,304
1963,282 1963,486
1964,203 1964,233
1965,201 1965,243
1966,131 1966,122
1967,179 1967,335
1968,209 1968,275
1969,24 1969,0
1970,138 1970,151
1971,12 1971,0
1972,0 1972,0
1973,149 1973,180
1974,207 1974,358
1975,159 1975,223
1976,274 1976,355
1977,270 1977,340
1978,241 1978,304
1979,272 1979,363
1980,168 1980,212
1981,34 1981,25
1982,221 1982,234
1983,0 1983,0
1984,189 1984,230
1985,224 1985,240
1986,66 1986,47
1987,118 1987,116
1988,230 1988,279
1989,112 1989,123
1990,13 1990,0
1991,114 1991,96
1992,223 1992,273
1993,296 1993,453
1994,158 1994,148
1995,46 1995,37
1996,77 1996,89
1997,201 1997,224
1998,213 1998,276
1999,83 1999,68
2000,134 2000,160
2001,126 2001,125
2002,27 2002,7
2003,50 2003,42
2004,26 2004,0
2005,148 2005,158
2006,161 2006,168
2007,116 2007,144
2008,141 2008,268
2009,138 2009,148
2010,244 2010,295
2011,69 2011,62
2012,143 2012,286
2013,196 2013,245
2014,277 2014,353
2015,142 2015,136
2016,193 2016,206
2017,104 2017,97
2018,257 2018,432
2019,45 2019,22
2020,153 2020,153
2021,201 2021,238
2022,189 2022,254
2023,222 2023,316
2024,6 2024,0
2025,221 2025,298
2026,137 2026,176
2027,18 2027,0
2028,135 2028,181
2029,241 2029,393
2030,150 2030,162
2031,245 2031,305
2032,167 2032,187
2033,235 2033,271
2034,155 2034,193
2035,181 2035,208
2036,78 2036,59
2037,163 2037,191
2038,39 2038,9
2039,181 2039,175
2040,198 2040,261
2041,42 2041,28
2042,200 2042,355
2043,259 2043,284
2044,100 2044,120
2045,84 2045,63
2046,264 2046,314
2047,0 2047,0
2048,243 2048,390
2049,281 2049,432
2050,276 2050,324
2051,69 2051,50
2052,0 2052,0
2053,195 2053,237
2054,139 2054,180
2055,55 2055,48
2056,144 2056,195
2057,52 2057,23
2058,232 2058,366
2059,106 2059,102
2060,195 2060,228
2061,48 2061,20
2062,86 2062,67
2063,165 2063,174
2064,180 2064,228
2065,189 2065,251
2066,184 2066,216
2067,79 2067,81
2068,41 2068,12
2069,145 2069,168
2070,255 2070,306
2071,41 2071,5
2072,171 2072,318
2073,41 2073,30
2074,221 2074,293
2075,142 2075,193
2076,280 2076,340
2077,166 2077,183
2078,202 2078,244
2079,180 2079,193
2080,238 2080,295
2081,169 2081,306
2082,282 2082,439
2083,157 2083,142
2084,108 2084,105
2085,268 2085,460
2086,167 2086,176
2087,156 2087,182
2088,175 2088,199
2089,65 2089,23
2090,78 2090,78
2091,40 2091,22
2092,40 2092,41
2093,214 2093,375
2094,287 2094,368
2095,233 2095,301
2096,23 2096,0
2097,228 2097,390
2098,107 2098,133
2099,222 2099,397
2100,4 2100,0
2101,215 2101,302
2102,0 2102,0
2103,142 2103,172
2104,239 2104,258
2105,182 2105,234
2106,145 2106,147
2107,103 2107,131
2108,228 2108,417
2109,178 2109,218
2110,214 2110,248
2111,89 2111,88
2112,121 2112,149
2113,38 2113,0
2114,83 2114,87
2115,284 2115,492
2116,160 2116,185
2117,67 2117,61
2118,151 2118,286
2119,80 2119,85
2120,86 2120,58
2121,110 2121,134
2122,147 2122,185
2123,83 2123,58
2124,160 2124,183
2125,135 2125,169
2126,276 2126,374
2127,101 2127,102
2128,162 2128,194
2129,126 2129,141
2130,109 2130,120
2131,196 2131,226
2132,191 2132,235
2133,76 2133,47
2134,118 2134,129
2135,224 2135,260
2136,176 2136,209
2137,188 2137,247
2138,258 2138,441
2139,180 2139,240
2140,248 2140,275
2141,99 2141,92
2142,251 2142,334
2143,271 2143,382
2144,36 2144,1
2145,131 2145,116
2146,158 2146,162
2147,154 2147,199
2148,252 2148,289
2149,103 2149,123
2150,212 2150,226
2151,36 2151,15
2152,274 2152,482
2153,238 2153,296
2154,229 2154,278
2155,193 2155,228
2156,226 2156,279
2157,117 2157,161
2158,161 2158,186
2159,118 2159,125
2160,171 2160,213
2161,200 2161,238
2162,242 2162,423
2163,53 2163,54
2164,22 2164,0
2165,226 2165,258
2166,241 2166,271
2167,245 2167,299
2168,217 2168,386
2169,179 2169,205
2170,197 2170,285
2171,210 2171,257
2172,214 2172,387
2173,38 2173,25
2174,220 2174,266
2175,106 2175,54
2176,237 2176,342
2177,261 2177,310
2178,176 2178,212
2179,217 2179,386
2180,43 2180,31
2181,224 2181,261
2182,0 2182,0
2183,257 2183,427
2184,211 2184,233
2185,227 2185,275
2186,102 2186,87
2187,169 2187,202
2188,228 2188,245
2189,295 2189,486
2190,112 2190,130
2191,8 2191,0
2192,245 2192,311
2193,159 2193,146
2194,97 2194,68
2195,156 2195,195
2196,224 2196,292
2197,138 2197,147
2198,278 2198,433
2199,244 2199,440
2200,155 2200,194
2201,82 2201,103
2202,86 2202,95
2203,134 2203,154
2204,265 2204,316
2205,135 2205,136
2206,48 2206,16
2207,148 2207,193
2208,31 2208,0
2209,151 2209,191
2210,122 2210,143
2211,114 2211,102
2212,194 2212,228
2213,208 2213,357
2214,193 2214,223
2215,25 2215,17
2216,135 2216,175
2217,55 2217,55
2218,116 2218,138
2219,191 2219,253
2220,286 2220,372
2221,28 2221,0
2222,216 2222,255
2223,230 2223,253
2224,56 2224,49
2225,197 2225,239
2226,63 2226,38
2227,245 2227,283
2228,112 2228,87
2229,185 2229,230
2230,114 2230,138
2231,11 2231,0
2232,234 2232,271
2233,113 2233,109
2234,57 2234,54
2235,69 2235,48
2236,233 2236,253
2237,224 2237,388
2238,186 2238,263
2239,107 2239,227
2240,133 2240,161
2241,253 2241,454
2242,242 2242,305
2243,212 2243,279
2244,267 2244,358
2245,97 2245,87
2246,229 2246,270
2247,233 2247,292
2248,208 2248,243
2249,210 2249,378
2250,20 2250,0
2251,281 2251,465
2252,177 2252,195
2253,210 2253,252
2254,189 2254,203
2255,77 2255,59
2256,232 2256,300
2257,290 2257,459
2258,39 2258,16
2259,171 2259,236
2260,241 2260,296
2261,219 2261,224
2262,211 2262,251
2263,127 2263,135
2264,186 2264,214
2265,241 2265,328
2266,39 2266,26
2267,255 2267,435
2268,132 2268,163
2269,110 2269,234
2270,95 2270,104
2271,104 2271,122
2272,89 2272,109
2273,126 2273,105
2274,253 2274,335
2275,136 2275,190
2276,207 2276,283
2277,243 2277,301
2278,162 2278,198
2279,140 2279,135
2280,201 2280,276
2281,0 2281,0
2282,128 2282,156
2283,169 2283,223
2284,63 2284,51
2285,117 2285,138
2286,163 2286,154
2287,118 2287,144
2288,137 2288,167
2289,229 2289,277
2290,112 2290,102
2291,118 2291,250
2292,98 2292,89
2293,51 2293,34
2294,195 2294,243
2295,62 2295,47
2296,88 2296,105
2297,99 2297,79
2298,0 2298,0
2299,260 2299,341
2300,59 2300,76
2301,206 2301,302
2302,260 2302,310
2303,275 2303,388
2304,144 2304,200
2305,301 2305,373
2306,158 2306,316
2307,154 2307,178
2308,138 2308,136
2309,99 2309,92
2310,72 2310,78
2311,61 2311,56
2312,63 2312,52
2313,207 2313,225
2314,166 2314,202
2315,3 2315,0
2316,217 2316,281
2317,200 2317,235
2318,0 2318,0
2319,146 2319,153
2320,188 2320,331
2321,76 2321,79
2322,7 2322,0
2323,200 2323,254
2324,131 2324,142
2325,24 2325,9
2326,239 2326,260
2327,126 2327,156
2328,62 2328,65
2329,103 2329,135
2330,150 2330,188
2331,256 2331,339
2332,120 2332,125
2333,69 2333,36
2334,157 2334,154
2335,50 2335,24
2336,105 2336,108
2337,42 2337,25
2338,297 2338,478
2339,240 2339,400
2340,0 2340,0
2341,223 2341,323
2342,270 2342,315
2343,171 2343,322
2344,220 2344,245
2345,0 2345,0
2346,166 2346,213
2347,228 2347,272
2348,126 2348,133
2349,173 2349,206
2350,197 2350,210
2351,172 2351,225
2352,203 2352,237
2353,180 2353,212
2354,101 2354,92
2355,0 2355,0
2356,280 2356,452
2357,95 2357,64
2358,76 2358,48
2359,132 2359,141
2360,0 2360,0
2361,239 2361,304
2362,183 2362,205
2363,280 2363,443
2364,262 2364,466
2365,271 2365,328
2366,0 2366,0
2367,137 2367,175
2368,121 2368,88
2369,124 2369,139
2370,235 2370,281
2371,168 2371,229
2372,270 2372,318
2373,181 2373,220
2374,38 2374,22
2375,187 2375,228
2376,97 2376,111
2377,212 2377,264
2378,100 2378,77
2379,241 2379,267
2380,196 2380,230
2381,28 2381,12
2382,91 2382,52
2383,252 2383,302
2384,272 2384,311
2385,106 2385,222
2386,109 2386,123
2387,260 2387,307
2388,122 2388,240
2389,257 2389,317
2390,179 2390,195
2391,260 2391,331
2392,36 2392,20
2393,28 2393,6
2394,49 2394,41
2395,201 2395,285
2396,147 2396,194
2397,81 2397,88
2398,195 2398,373
2399,254 2399,268
2400,2 2400,0
2401,0 2401,0
2402,0 2402,0
2403,145 2403,160
2404,131 2404,149
2405,108 2405,91
2406,271 2406,339
2407,75 2407,74
2408,194 2408,255
2409,168 2409,198
2410,160 2410,196
2411,275 2411,370
2412,150 2412,171
2413,219 2413,256
2414,128 2414,251
2415,50 2415,42
2416,28 2416,1
2417,246 2417,411
2418,165 2418,177
2419,141 2419,184
2420,276 2420,464
2421,140 2421,190
2422,256 2422,315
2423,113 2423,165
2424,177 2424,342
2425,183 2425,244
2426,73 2426,40
2427,132 2427,135
2428,109 2428,79
2429,21 2429,0
2430,195 2430,228
2431,287 2431,483
2432,193 2432,256
2433,122 2433,144
2434,207 2434,250
2435,265 2435,314
2436,240 2436,320
2437,126 2437,143
2438,120 2438,143
2439,104 2439,137
2440,113 2440,112
2441,241 2441,437
2442,192 2442,199
2443,174 2443,181
2444,227 2444,365
2445,190 2445,320
2446,76 2446,57
2447,195 2447,200
2448,164 2448,191
2449,169 2449,199
2450,117 2450,139
2451,74 2451,74
2452,252 2452,337
2453,215 2453,274
2454,71 2454,72
2455,233 2455,272
2456,173 2456,231
2457,95 2457,106
2458,129 2458,138
2459,262 2459,448
2460,205 2460,266
2461,201 2461,345
2462,156 2462,163
2463,320 2463,501
2464,188 2464,319
2465,71 2465,58
2466,126 2466,140
2467,223 2467,277
2468,188 2468,256
2469,260 2469,316
2470,118 2470,154
2471,160 2471,163
2472,225 2472,285
2473,85 2473,92
2474,76 2474,47
2475,99 2475,91
2476,211 2476,239
2477,134 2477,185
2478,148 2478,156
2479,107 2479,101
2480,21 2480,0
2481,203 2481,231
2482,280 2482,471
2483,0 2483,0
2484,223 2484,319
2485,243 2485,319
2486,181 2486,223
2487,114 2487,113
2488,249 2488,301
2489,82 2489,69
2490,188 2490,217
2491,170 2491,333
2492,241 2492,338
2493,60 2493,41
2494,135 2494,140
2495,245 2495,392
2496,193 2496,222
2497,216 2497,239
2498,103 2498,223
2499,218 2499,272
2500,150 2500,152
2501,62 2501,31
2502,176 2502,332
2503,238 2503,306
2504,240 2504,288
2505,246 2505,299
2506,160 2506,192
2507,72 2507,54
2508,214 2508,261
2509,110 2509,118
2510,176 2510,206
2511,55 2511,38
2512,168 2512,218
2513,143 2513,168
2514,173 2514,243
2515,203 2515,250
2516,51 2516,51
2517,195 2517,237
2518,298 2518,487
2519,120 2519,125
2520,133 2520,180
2521,204 2521,207
2522,238 2522,306
2523,134 2523,164
2524,123 2524,128
2525,157 2525,135
2526,88 2526,63
2527,99 2527,110
2528,167 2528,307
2529,43 2529,26
2530,119 2530,244
2531,70 2531,70
2532,186 2532,249
2533,199 2533,284
2534,72 2534,40
2535,246 2535,414
2536,179 2536,220
2537,228 2537,325
2538,206 2538,273
2539,159 2539,171
2540,0 2540,0
2541,135 2541,153
2542,31 2542,12
2543,122 2543,152
2544,133 2544,155
2545,187 2545,215
2546,262 2546,441
2547,221 2547,251
2548,236 2548,300
2549,69 2549,37
2550,286 2550,371
2551,160 2551,186
2552,156 2552,199
2553,188 2553,209
2554,249 2554,347
2555,175 2555,201
2556,149 2556,272
2557,57 2557,33
2558,172 2558,233
2559,212 2559,260
2560,79 2560,76
2561,197 2561,244
2562,172 2562,335
2563,239 2563,433
2564,160 2564,188
2565,9 2565,0
2566,194 2566,220
2567,117 2567,130
2568,94 2568,88
2569,105 2569,102
2570,63 2570,42
2571,176 2571,324
2572,0 2572,0
2573,158 2573,162
2574,219 2574,282
2575,181 2575,324
2576,90 2576,103
2577,212 2577,294
2578,86 2578,104
2579,124 2579,157
2580,100 2580,97
2581,184 2581,335
2582,280 2582,353
2583,250 2583,308
2584,218 2584,235
2585,140 2585,113
2586,143 2586,175
2587,205 2587,237
2588,143 2588,280
2589,242 2589,272
2590,138 2590,148
2591,91 2591,92
2592,137 2592,180
2593,167 2593,177
2594,291 2594,447
2595,188 2595,245
2596,28 2596,0
2597,194 2597,241
2598,161 2598,292
2599,232 2599,272
2600,299 2600,495
2601,254 2601,342
2602,120 2602,126
2603,91 2603,88
2604,69 2604,62
2605,183 2605,185
2606,52 2606,20
2607,122 2607,141
2608,246 2608,305
2609,88 2609,69
2610,183 2610,234
2611,222 2611,288
2612,0 2612,0
2613,194 2613,359
2614,134 2614,170
2615,154 2615,187
2616,114 2616,93
2617,286 2617,457
2618,2 2618,0
2619,172 2619,202
2620,46 2620,28
2621,150 2621,285
2622,29 2622,4
2623,153 2623,143
2624,46 2624,42
2625,94 2625,69
2626,124 2626,133
2627,175 2627,218
2628,224 2628,290
2629,235 2629,301
2630,110 2630,95
2631,153 2631,191
2632,151 2632,206
2633,164 2633,159
2634,222 2634,284
2635,113 2635,127
2636,96 2636,102
2637,263 2637,308
2638,90 2638,81
2639,210 2639,359
2640,204 2640,231
2641,165 2641,167
2642,193 2642,342
2643,197 2643,253
2644,87 2644,83
2645,226 2645,388
2646,89 2646,75
2647,40 2647,20
2648,231 2648,397
2649,202 2649,263
2650,105 2650,76
2651,10 2651,0
2652,175 2652,189
2653,91 2653,79
2654,144 2654,158
2655,208 2655,208
2656,60 2656,45
2657,49 2657,34
2658,93 2658,94
2659,93 2659,97
2660,219 2660,412
2661,104 2661,99
2662,129 2662,158
2663,102 2663,62
2664,244 2664,299
2665,31 2665,0
2666,291 2666,461
2667,231 2667,385
2668,86 2668,90
2669,156 2669,174
2670,146 2670,283
2671,170 2671,219
2672,129 2672,154
2673,223 2673,298
2674,95 2674,92
2675,237 2675,379
2676,174 2676,336
2677,85 2677,85
2678,131 2678,145
2679,53 2679,37
2680,233 2680,289
2681,221 2681,310
2682,225 2682,288
2683,301 2683,522
2684,216 2684,268
2685,279 2685,387
2686,118 2686,149
2687,110 2687,116
2688,278 2688,362
2689,92 2689,64
2690,39 2690,7
2691,225 2691,412
2692,9 2692,0
2693,64 2693,80
2694,164 2694,209
2695,207 2695,364
2696,222 2696,373
2697,164 2697,182
2698,119 2698,154
2699,156 2699,168
2700,207 2700,229
2701,92 2701,67
2702,19 2702,0
2703,150 2703,168
2704,114 2704,109
2705,196 2705,366
2706,144 2706,198
2707,169 2707,312
2708,106 2708,125
2709,103 2709,65
2710,34 2710,0
2711,274 2711,452
2712,116 2712,114
2713,192 2713,219
2714,240 2714,270
2715,163 2715,293
2716,218 2716,247
2717,110 2717,99
2718,86 2718,77
2719,181 2719,325
2720,13 2720,0
2721,9 2721,0
2722,255 2722,461
2723,219 2723,271
2724,153 2724,302
2725,82 2725,102
2726,102 2726,114
2727,221 2727,258
2728,204 2728,231
2729,140 2729,184
2730,243 2730,285
2731,99 2731,99
2732,157 2732,190
2733,64 2733,32
2734,100 2734,76
2735,127 2735,135
2736,153 2736,212
2737,247 2737,316
2738,91 2738,79
2739,70 2739,53
2740,97 2740,92
2741,38 2741,15
2742,195 2742,256
2743,122 2743,150
2744,209 2744,232
2745,113 2745,101
2746,188 2746,334
2747,158 2747,209
2748,248 2748,304
2749,170 2749,155
2750,93 2750,84
2751,106 2751,125
2752,196 2752,193
2753,167 2753,168
2754,164 2754,207
2755,294 2755,485
2756,181 2756,365
2757,184 2757,217
2758,98 2758,105
2759,163 2759,184
2760,27 2760,0
2761,130 2761,243
2762,175 2762,254
2763,102 2763,125
2764,0 2764,0
2765,198 2765,196
2766,156 2766,177
2767,78 2767,81
2768,232 2768,232
2769,170 2769,292
2770,238 2770,363
2771,0 2771,0
2772,121 2772,96
2773,88 2773,101
2774,159 2774,207
2775,12 2775,0
2776,170 2776,184
2777,241 2777,338
2778,238 2778,284
2779,270 2779,466
2780,143 2780,139
2781,220 2781,257
2782,80 2782,70
2783,193 2783,241
2784,113 2784,128
2785,219 2785,280
2786,155 2786,204
2787,267 2787,455
2788,135 2788,146
2789,254 2789,333
2790,203 2790,369
2791,78 2791,63
2792,204 2792,214
2793,88 2793,77
2794,52 2794,45
2795,286 2795,375
2796,235 2796,300
2797,194 2797,257
2798,79 2798,55
2799,216 2799,266
2800,153 2800,301
2801,103 2801,80
2802,251 2802,279
2803,197 2803,276
2804,247 2804,404
2805,218 2805,252
2806,72 2806,53
2807,225 2807,311
2808,197 2808,192
2809,39 2809,22
2810,67 2810,44
2811,185 2811,209
2812,114 2812,113
2813,0 2813,0
2814,0 2814,0
2815,216 2815,346
2816,21 2816,0
2817,100 2817,84
2818,173 2818,180
2819,163 2819,313
2820,290 2820,476
2821,247 2821,303
2822,235 2822,325
2823,95 2823,119
2824,135 2824,261
2825,139 2825,138
2826,141 2826,165
2827,19 2827,0
2828,139 2828,182
2829,204 2829,243
2830,257 2830,417
2831,176 2831,314
2832,275 2832,362
2833,101 2833,121
2834,79 2834,64
2835,198 2835,352
2836,95 2836,92
2837,188 2837,222
2838,205 2838,268
2839,75 2839,66
2840,189 2840,233
2841,148 2841,184
2842,22 2842,0
2843,208 2843,233
2844,163 2844,180
2845,27 2845,0
2846,145 2846,181
2847,120 2847,135
2848,236 2848,288
2849,150 2849,148
2850,164 2850,191
2851,119 2851,160
2852,53 2852,39
2853,0 2853,0
2854,74 2854,82
2855,134 2855,283
2856,127 2856,153
2857,123 2857,126
2858,39 2858,19
2859,17 2859,0
2860,175 2860,257
2861,109 2861,128
2862,172 2862,217
2863,4 2863,0
2864,10 2864,0
2865,217 2865,252
2866,183 2866,236
2867,66 2867,47
2868,168 2868,323
2869,194 2869,275
2870,168 2870,213
2871,167 2871,213
2872,115 2872,243
2873,195 2873,225
2874,279 2874,471
2875,108 2875,96
2876,41 2876,28
2877,130 2877,172
2878,170 2878,243
2879,153 2879,175
2880,53 2880,46
2881,165 2881,186
2882,193 2882,221
2883,99 2883,62
2884,214 2884,224
2885,226 2885,266
2886,19 2886,0
2887,162 2887,209
2888,230 2888,378
2889,110 2889,140
2890,0 2890,0
2891,245 2891,298
2892,131 2892,157
2893,197 2893,225
2894,125 2894,147
2895,226 2895,282
2896,220 2896,390
2897,158 2897,177
2898,188 2898,228
2899,236 2899,267
2900,159 2900,312
2901,175 2901,235
2902,239 2902,435
2903,108 2903,109
2904,177 2904,265
2905,60 2905,39
2906,165 2906,177
2907,86 2907,75
2908,78 2908,65
2909,143 2909,162
2910,159 2910,164
2911,221 2911,281
2912,169 2912,205
2913,12 2913,0
2914,209 2914,235
2915,154 2915,150
2916,253 2916,292
2917,85 2917,81
2918,186 2918,237
2919,89 2919,60
2920,226 2920,295
2921,65 2921,74
2922,79 2922,86
2923,136 2923,166
2924,132 2924,152
2925,42 2925,31
2926,131 2926,161
2927,245 2927,283
2928,155 2928,209
2929,173 2929,208
2930,185 2930,198
2931,100 2931,119
2932,239 2932,331
2933,114 2933,103
2934,104 2934,118
2935,140 2935,134
2936,87 2936,71
2937,301 2937,458
2938,233 2938,312
2939,141 2939,155
2940,122 2940,127
2941,74 2941,76
2942,223 2942,243
2943,131 2943,165
2944,238 2944,262
2945,171 2945,170
2946,186 2946,212
2947,105 2947,98
2948,158 2948,309
2949,186 2949,261
2950,53 2950,15
2951,106 2951,120
2952,94 2952,81
2953,138 2953,113
2954,122 2954,126
2955,0 2955,0
2956,209 2956,371
2957,72 2957,72
2958,56 2958,53
2959,175 2959,221
2960,132 2960,166
2961,163 2961,197
2962,96 2962,97
2963,30 2963,0
2964,82 2964,76
2965,238 2965,314
2966,205 2966,357
2967,163 2967,311
2968,120 2968,78
2969,118 2969,144
2970,141 2970,103
2971,43 2971,18
2972,172 2972,207
2973,57 2973,45
2974,114 2974,114
2975,279 2975,459
2976,129 2976,142
2977,232 2977,418
2978,54 2978,52
2979,171 2979,190
2980,193 2980,257
2981,188 2981,263
2982,257 2982,303
2983,294 2983,480
2984,190 2984,257
2985,145 2985,261
2986,0 2986,0
2987,171 2987,308
2988,80 2988,63
2989,210 2989,255
2990,63 2990,60
2991,51 2991,26
2992,166 2992,221
2993,74 2993,71
2994,246 2994,306
2995,274 2995,354
2996,134 2996,173
2997,247 2997,297
2998,173 2998,209
2999,110 2999,103
3000,62 3000,33
3001,209 3001,246
3002,84 3002,65
3003,199 3003,206
3004,177 3004,195
3005,120 3005,154
3006,218 3006,246
3007,0 3007,0
3008,200 3008,245
3009,221 3009,270
3010,210 3010,260
3011,73 3011,72
3012,112 3012,73
3013,0 3013,0
3014,86 3014,93
3015,31 3015,0
3016,124 3016,139
3017,111 3017,117
3018,66 3018,58
3019,167 3019,212
3020,209 3020,258
3021,54 3021,14
3022,201 3022,219
3023,154 3023,166
3024,312 3024,492
3025,181 3025,237
3026,215 3026,239
3027,118 3027,110
3028,221 3028,260
3029,161 3029,189
3030,237 3030,297
3031,187 3031,218
3032,110 3032,69
3033,181 3033,203
3034,225 3034,311
3035,211 3035,377
3036,172 3036,208
3037,129 3037,134
3038,31 3038,1
3039,141 3039,179
3040,140 3040,142
3041,230 3041,295
3042,15 3042,0
3043,166 3043,197
3044,175 3044,180
3045,108 3045,119
3046,156 3046,148
3047,167 3047,221
3048,146 3048,160
3049,99 3049,79
3050,117 3050,124
3051,208 3051,262
3052,132 3052,129
3053,107 3053,138
3054,186 3054,208
3055,171 3055,231
3056,194 3056,320
3057,247 3057,271
3058,216 3058,227
3059,172 3059,190
3060,118 3060,97
3061,243 3061,270
3062,145 3062,188
3063,150 3063,144
3064,114 3064,127
3065,118 3065,120
3066,36 3066,0
3067,49 3067,25
3068,126 3068,109
3069,229 3069,308
3070,213 3070,299
3071,102 3071,94
3072,187 3072,227
3073,102 3073,106
3074,121 3074,148
3075,141 3075,189
3076,118 3076,90
3077,74 3077,65
3078,132 3078,131
3079,133 3079,174
3080,103 3080,98
3081,232 3081,244
3082,124 3082,122
3083,241 3083,301
3084,221 3084,370
3085,238 3085,308
3086,185 3086,251
3087,186 3087,225
3088,112 3088,117
3089,52 3089,23
3090,169 3090,189
3091,182 3091,330
3092,0 3092,0
3093,135 3093,179
3094,106 3094,127
3095,96 3095,104
3096,115 3096,91
3097,262 3097,332
3098,128 3098,165
3099,89 3099,68
3100,256 3100,325
3101,118 3101,79
3102,293 3102,460
3103,306 3103,490
3104,257 3104,316
3105,154 3105,202
3106,166 3106,206
3107,43 3107,18
3108,44 3108,12
3109,79 3109,58
3110,209 3110,290
3111,144 3111,154
3112,177 3112,220
3113,107 3113,117
3114,23 3114,0
3115,202 3115,352
3116,153 3116,171
3117,131 3117,263
3118,97 3118,137
3119,199 3119,368
3120,90 3120,90
3121,136 3121,140
3122,223 3122,242
3123,231 3123,290
3124,241 3124,296
3125,226 3125,283
3126,107 3126,133
3127,143 3127,137
3128,155 3128,167
3129,275 3129,461
3130,234 3130,274
3131,138 3131,190
3132,154 3132,156
3133,258 3133,329
3134,204 3134,227
3135,44 3135,35
3136,144 3136,294
3137,216 3137,351
3138,113 3138,91
3139,89 3139,84
3140,207 3140,232
3141,220 3141,389
3142,124 3142,112
3143,133 3143,132
3144,259 3144,322
3145,116 3145,127
3146,0 3146,0
3147,106 3147,112
3148,144 3148,288
3149,183 3149,206
3150,80 3150,78
3151,225 3151,276
3152,79 3152,63
3153,226 3153,296
3154,193 3154,268
3155,0 3155,0
3156,143 3156,140
3157,100 3157,97
3158,0 3158,0
3159,198 3159,237
3160,141 3160,194
3161,182 3161,202
3162,217 3162,403
3163,52 3163,36
3164,229 3164,256
3165,155 3165,169
3166,91 3166,89
3167,224 3167,383
3168,148 3168,138
3169,99 3169,80
3170,249 3170,268
3171,279 3171,349
3172,109 3172,136
3173,107 3173,132
3174,113 3174,103
3175,168 3175,193
3176,66 3176,68
3177,153 3177,194
3178,192 3178,219
3179,206 3179,264
3180,179 3180,230
3181,59 3181,50
3182,101 3182,124
3183,163 3183,198
3184,171 3184,233
3185,98 3185,115
3186,112 3186,104
3187,127 3187,159
3188,191 3188,202
3189,237 3189,300
3190,92 3190,81
3191,188 3191,255
3192,206 3192,299
3193,106 3193,80
3194,97 3194,92
3195,191 3195,225
3196,149 3196,181
3197,140 3197,163
3198,94 3198,105
3199,135 3199,148
3200,87 3200,99
3201,72 3201,88
3202,139 3202,192
3203,89 3203,73
3204,150 3204,201
3205,20 3205,0
3206,210 3206,278
3207,81 3207,64
3208,226 3208,304
3209,85 3209,65
3210,282 3210,455
3211,133 3211,132
3212,211 3212,261
3213,8 3213,0
3214,211 3214,261
3215,198 3215,259
3216,184 3216,231
3217,246 3217,300
3218,134 3218,139
3219,63 3219,32
3220,153 3220,195
3221,88 3221,63
3222,141 3222,179
3223,249 3223,316
3224,136 3224,182
3225,24 3225,13
3226,178 3226,229
3227,68 3227,32
3228,221 3228,274
3229,227 3229,257
3230,161 3230,202
3231,197 3231,195
3232,241 3232,326
3233,271 3233,442
3234,182 3234,184
3235,232 3235,265
3236,145 3236,185
3237,149 3237,167
3238,106 3238,73
3239,18 3239,0
3240,138 3240,102
3241,122 3241,169
3242,107 3242,84
3243,42 3243,26
3244,85 3244,66
3245,97 3245,94
3246,187 3246,202
3247,196 3247,234
3248,140 3248,181
3249,21 3249,0
3250,247 3250,247
3251,89 3251,92
3252,118 3252,115
3253,128 3253,140
3254,182 3254,245
3255,1 3255,0
3256,199 3256,242
3257,249 3257,403
3258,152 3258,168
3259,178 3259,222
3260,273 3260,370
3261,136 3261,151
3262,87 3262,75
3263,0 3263,0
3264,0 3264,0
3265,224 3265,383
3266,116 3266,127
3267,251 3267,311
3268,142 3268,161
3269,84 3269,71
3270,112 3270,102
3271,135 3271,142
3272,185 3272,256
3273,266 3273,456
3274,217 3274,387
3275,170 3275,219
3276,184 3276,215
3277,119 3277,78
3278,0 3278,0
3279,250 3279,266
3280,196 3280,270
3281,188 3281,230
3282,241 3282,307
3283,142 3283,153
3284,35 3284,0
3285,219 3285,244
3286,0 3286,0
3287,136 3287,126
3288,224 3288,317
3289,119 3289,123
3290,243 3290,291
3291,191 3291,244
3292,192 3292,214
3293,210 3293,392
3294,123 3294,159
3295,213 3295,253
3296,119 3296,127
3297,77 3297,74
3298,156 3298,195
3299,239 3299,277
3300,173 3300,213
3301,51 3301,58
3302,305 3302,486
3303,29 3303,0
3304,43 3304,38
3305,84 3305,79
3306,34 3306,7
3307,257 3307,315
3308,88 3308,74
3309,226 3309,256
3310,158 3310,174
3311,90 3311,108
3312,88 3312,125
3313,146 3313,144
3314,154 3314,204
3315,212 3315,287
3316,56 3316,51
3317,231 3317,283
3318,252 3318,283
3319,211 3319,231
3320,102 3320,215
3321,212 3321,261
3322,257 3322,440
3323,87 3323,87
3324,82 3324,71
3325,51 3325,33
3326,163 3326,212
3327,117 3327,141
3328,93 3328,96
3329,234 3329,280
3330,142 3330,164
3331,230 3331,237
3332,75 3332,59
3333,112 3333,161
3334,0 3334,0
3335,93 3335,102
3336,105 3336,143
3337,225 3337,378
3338,198 3338,208
3339,78 3339,72
3340,170 3340,216
3341,186 3341,258
3342,199 3342,246
3343,206 3343,252
3344,106 3344,135
3345,124 3345,139
3346,222 3346,321
3347,88 3347,87
3348,121 3348,232
3349,87 3349,84
3350,179 3350,245
3351,145 3351,150
3352,259 3352,309
3353,59 3353,37
3354,230 3354,309
3355,43 3355,35
3356,99 3356,112
3357,100 3357,142
3358,100 3358,78
3359,88 3359,89
3360,4 3360,0
3361,147 3361,227
3362,96 3362,121
3363,278 3363,437
3364,196 3364,279
3365,159 3365,190
3366,103 3366,64
3367,267 3367,317
3368,229 3368,250
3369,58 3369,59
3370,150 3370,167
3371,113 3371,94
3372,66 3372,51
3373,206 3373,355
3374,47 3374,31
3375,78 3375,81
3376,144 3376,177
3377,198 3377,229
3378,98 3378,98
3379,75 3379,64
3380,147 3380,178
3381,194 3381,262
3382,206 3382,216
3383,0 3383,0
3384,220 3384,409
3385,266 3385,344
3386,245 3386,290
3387,206 3387,247
3388,218 3388,308
3389,113 3389,132
3390,254 3390,309
3391,143 3391,136
3392,116 3392,168
3393,169 3393,204
3394,6 3394,0
3395,165 3395,178
3396,184 3396,215
3397,228 3397,269
3398,162 3398,195
3399,83 3399,93
3400,103 3400,69
3401,230 3401,267
3402,112 3402,126
3403,170 3403,239
3404,153 3404,191
3405,202 3405,284
3406,114 3406,128
3407,122 3407,121
3408,104 3408,86
3409,287 3409,357
3410,213 3410,259
3411,231 3411,312
3412,249 3412,336
3413,195 3413,348
3414,154 3414,183
3415,57 3415,49
3416,221 3416,272
3417,160 3417,163
3418,154 3418,178
3419,178 3419,180
3420,171 3420,317
3421,247 3421,284
3422,103 3422,120
3423,293 3423,471
3424,180 3424,255
3425,97 3425,110
3426,130 3426,131
3427,128 3427,163
3428,156 3428,176
3429,232 3429,297
3430,156 3430,193
3431,104 3431,86
3432,69 3432,50
3433,55 3433,51
3434,189 3434,245
3435,0 3435,0
3436,261 3436,355
3437,236 3437,260
3438,36 3438,13
3439,92 3439,127
3440,174 3440,224
3441,279 3441,422
3442,101 3442,118
3443,32 3443,0
3444,122 3444,150
3445,100 3445,123
3446,263 3446,453
3447,78 3447,79
3448,202 3448,268
3449,218 3449,266
3450,219 3450,406
3451,229 3451,280
3452,100 3452,103
3453,69 3453,59
3454,178 3454,236
3455,130 3455,128
3456,220 3456,265
3457,97 3457,122
3458,130 3458,117
3459,136 3459,148
3460,152 3460,148
3461,192 3461,261
3462,101 3462,105
3463,212 3463,275
3464,188 3464,345
3465,154 3465,169
3466,124 3466,142
3467,103 3467,98
3468,282 3468,327
3469,111 3469,95
3470,98 3470,85
3471,196 3471,214
3472,43 3472,2
3473,79 3473,81
3474,77 3474,64
3475,174 3475,199
3476,23 3476,4
3477,173 3477,241
3478,0 3478,0
3479,210 3479,202
3480,235 3480,276
3481,16 3481,0
3482,226 3482,302
3483,99 3483,112
3484,273 3484,472
3485,13 3485,0
3486,30 3486,0
3487,116 3487,119
3488,266 3488,351
3489,142 3489,167
3490,177 3490,202
3491,131 3491,114
3492,176 3492,215
3493,216 3493,294
3494,250 3494,333
3495,63 3495,70
3496,59 3496,46
3497,274 3497,349
3498,68 3498,39
3499,183 3499,215
3500,211 3500,212
3501,159 3501,156
3502,266 3502,352
3503,100 3503,94
3504,117 3504,141
3505,125 3505,165
3506,185 3506,339
3507,131 3507,129
3508,104 3508,115
3509,135 3509,177
3510,173 3510,205
3511,257 3511,309
3512,189 3512,348
3513,92 3513,84
import csv
import pickle
import numpy as np
from main import *
import nn
pathToTestCase = "testcases/testcase_01.pkl"
load_test_case = pickle.load(open(pathToTestCase, 'rb'))
task_detail = {
1: [4, 'Forward Pass'],
2: [6, 'Forward + Backward Pass'],
3: [2, 'Update weights'],
4: [1, 'Check Relu'],
5: [1, 'Check Relu Gradient'],
6: [3, 'Check Softmax'],
7: [7, 'Check Softmax Gradient'],
8: [3, 'Check Cross Entropy Loss'],
9: [3, 'Check Cross Entropy Loss Gradient']
}
def check_forward(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['forward_input'])
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(2,1,'relu'))
nn1.addLayer(nn.FullyConnectedLayer(1,2,'softmax'))
output_X = input_X
ind = 0
weights = load_test_case['forward_weights']
biases = load_test_case['forward_biases']
layers = nn1.layers
for l in layers:
l.weights = weights[ind]
l.biases = biases[ind]
ind+=1
for l in nn1.layers:
output_X = l.forwardpass(output_X)
studentAnswer = output_X
teacherAnswer = load_test_case['forward_output']
teacherAnswer = np.round(teacherAnswer, 5)
studentAnswer = np.round(studentAnswer, 5)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_backward(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['backward_input'])
input_delta = np.asarray(load_test_case['backward_input_delta'])
nn1 = nn.NeuralNetwork(0.0, .1, 1)
nn1.addLayer(nn.FullyConnectedLayer(2,5,'relu'))
nn1.addLayer(nn.FullyConnectedLayer(5,2,'softmax'))
ind = 0
weights = load_test_case['backward_weights']
biases = load_test_case['backward_biases']
layers = nn1.layers
for l in layers:
l.weights = weights[ind]
l.biases = biases[ind]
ind+=1
activations = [input_X]
for l in layers:
activations.append(l.forwardpass(activations[-1]))
# activations = load_test_case['backward_input_activations']
weightsGrad = list()
biasesGrad = list()
delta = input_delta
for i in range(len(layers)-1, -1, -1):
delta = layers[i].backwardpass(activations[i], delta)
weightsGrad.append(layers[i].weightsGrad)
biasesGrad.append(layers[i].biasesGrad)
studentAnswerdelta = delta
studentAnswerweightsGrad = weightsGrad
studentAnswerbiasesGrad = biasesGrad
teacherAnswerdelta = load_test_case['backward_output']
teacherAnswerweightsGrad = load_test_case['backward_weightsGrad']
teacherAnswerbiasesGrad = load_test_case['backward_biasesGrad']
teacherAnswerdelta = np.asarray(teacherAnswerdelta)
teacherAnswerweightsGrad = np.asarray(teacherAnswerweightsGrad)
teacherAnswerbiasesGrad = np.asarray(teacherAnswerbiasesGrad)
studentAnswerdelta = np.round(studentAnswerdelta, 6)
teacherAnswerdelta = np.round(teacherAnswerdelta, 6)
studentAnswerweightsGrad = [np.round(x, 6) for x in studentAnswerweightsGrad]
teacherAnswerweightsGrad = [np.round(x, 6) for x in teacherAnswerweightsGrad]
studentAnswerbiasesGrad = [np.round(x, 6) for x in studentAnswerbiasesGrad]
teacherAnswerbiasesGrad = [np.round(x, 6) for x in teacherAnswerbiasesGrad]
print('Student Answer deltas', studentAnswerdelta)
print('Correct Answer deltas', teacherAnswerdelta)
print('Student Answer weights Gradient', studentAnswerweightsGrad)
print('Correct Answer weights Gradient', teacherAnswerweightsGrad)
print('Student Answer biases Gradient', studentAnswerbiasesGrad)
print('Correct Answer biases Gradient', teacherAnswerbiasesGrad)
print('Correct', np.array_equal(studentAnswerdelta, teacherAnswerdelta) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerweightsGrad, teacherAnswerweightsGrad)]) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerbiasesGrad, teacherAnswerbiasesGrad)]))
return (np.array_equal(studentAnswerdelta, teacherAnswerdelta) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerweightsGrad, teacherAnswerweightsGrad)]) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerbiasesGrad, teacherAnswerbiasesGrad)]))
def check_updateweights(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(2,1,'relu'))
weights = load_test_case['updateweights_weights']
biases = load_test_case['updateweights_biases']
weightsGrad = load_test_case['updateweights_weightsGrad']
biasesGrad = load_test_case['updateweights_biasesGrad']
layer = nn1.layers[0]
layer.weights = weights
layer.biases = biases
layer.weightsGrad = weightsGrad
layer.biasesGrad = biasesGrad
layer.updateWeights(0.01)
studentAnswer = [layer.weights, layer.biases]
teacherAnswer = load_test_case['updateweights_output']
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
teacherAnswer_weight = np.round(teacherAnswer[0], 6)
studentAnswer_weight = np.round(studentAnswer[0], 6)
teacherAnswer_bias = np.round(teacherAnswer[1], 6)
studentAnswer_bias = np.round(studentAnswer[1], 6)
print('Correct', np.array_equal(studentAnswer[0], teacherAnswer[0]) and np.array_equal(studentAnswer[1], teacherAnswer[1]))
return np.array_equal(studentAnswer[0], teacherAnswer[0]) and np.array_equal(studentAnswer[1], teacherAnswer[1])
def check_relu(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['relu_input']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'relu'))
output_X = input_X
output_X = nn1.layers[0].relu_of_X(output_X)
studentAnswer = output_X
teacherAnswer = load_test_case['relu_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_gardient_relu(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['gardient_relu_input']).reshape(1,4)
input_delta = np.asarray(load_test_case['gardient_relu_input_delta']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'relu'))
output_X = input_X
output_X = nn1.layers[0].gradient_relu_of_X(output_X, input_delta)
studentAnswer = output_X
teacherAnswer = load_test_case['gardient_relu_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_softmax(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['softmax_input']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'softmax'))
output_X = input_X
output_X = nn1.layers[0].softmax_of_X(output_X)
studentAnswer = output_X
teacherAnswer = load_test_case['softmax_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_gardient_softmax(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['gardient_softmax_input']).reshape(1,4)
input_delta = np.asarray(load_test_case['gardient_softmax_input_delta']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'softmax'))
output_X = input_X
output_X = nn1.layers[0].gradient_softmax_of_X(output_X, input_delta)
studentAnswer = output_X
teacherAnswer = load_test_case['gardient_softmax_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_crossEntropyLoss(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_Y = np.asarray(load_test_case['crossEntropyLoss_input_Y']).reshape(2, 10)
input_Y_pred = np.asarray(load_test_case['crossEntropyLoss_input_Y_pred']).reshape(2, 10)
nn1 = nn.NeuralNetwork(0.0, 4, 1)
output_Y = nn1.crossEntropyLoss(input_Y, input_Y_pred)
studentAnswer = output_Y
teacherAnswer = load_test_case['crossEntropyLoss_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_crossEntropyDelta(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_Y = np.asarray(load_test_case['crossEntropyDelta_input_Y']).reshape(2, 10)
input_Y_pred = np.asarray(load_test_case['crossEntropyDelta_input_Y_pred']).reshape(2, 10)
nn1 = nn.NeuralNetwork(0.0, 4, 1)
output_Y = nn1.crossEntropyDelta(input_Y, input_Y_pred)
studentAnswer = output_Y
teacherAnswer = load_test_case['crossEntropyDelta_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
if __name__ == "__main__":
np.random.seed(42)
print()
correct_status = False
total_marks = 0
try:
correct_status = check_forward(1)
total_marks+=(correct_status*task_detail[1][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 1)
print("Correct False")
try:
correct_status = check_backward(2)
total_marks+=(correct_status*task_detail[2][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 2)
print("Correct False")
try:
correct_status = check_updateweights(3)
total_marks+=(correct_status*task_detail[3][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 3)
print("Correct False")
try:
correct_status = check_relu(4)
total_marks+=(correct_status*task_detail[4][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 4)
print("Correct False")
try:
correct_status = check_gardient_relu(5)
total_marks+=(correct_status*task_detail[5][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 5)
print("Correct False")
try:
correct_status = check_softmax(6)
total_marks+=(correct_status*task_detail[6][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 6)
print("Correct False")
try:
correct_status = check_gardient_softmax(7)
total_marks+=(correct_status*task_detail[7][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 7)
print("Correct False")
try:
correct_status = check_crossEntropyLoss(8)
total_marks+=(correct_status*task_detail[8][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 8)
print("Correct False")
try:
correct_status = check_crossEntropyDelta(9)
total_marks+=(correct_status*task_detail[9][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 9)
print("Correct False")
print('='*20 + ' TASK Finish ' + '='*20)
full_marks = 0
for x in range(len(task_detail)):
full_marks += task_detail[x+1][0]
print(' You got', total_marks, 'Marks Out of', full_marks, 'for', pathToTestCase.split('/')[1].split('.')[0])
print('='*53)
print()
import numpy as np
import nn
import csv
import pickle
def taskXor():
XTrain, YTrain, XVal, YVal, XTest, YTest = loadXor()
# Create a NeuralNetwork object 'nn1' as follows with optimal parameters. For parameter definition, refer to nn.py file.
# nn1 = nn.NeuralNetwork(lr, batchSize, epochs)
# Add layers to neural network corresponding to inputs and outputs of given data
# Eg. nn1.addLayer(FullyConnectedLayer(x,y))
###############################################
# TASK 3a (Marks 7) - YOUR CODE HERE
raise NotImplementedError
###############################################
nn1.train(XTrain, YTrain, XVal, YVal)
pred, acc = nn1.validate(XTest, YTest)
with open("predictionsXor.csv", 'w') as file:
writer = csv.writer(file)
writer.writerow(["id", "prediction"])
for i, p in enumerate(pred):
writer.writerow([i, p])
print('Test Accuracy',acc)
return nn1
def preprocessMnist(X):
# Perform any data preprocessing that you wish to do here
# Input: A 2-d numpy array containing an entire train, val or test split | Shape: n x 28*28
# Output: A 2-d numpy array of the same shape as the input (If the size is changed, you will get downstream errors)
###############################################
# TASK 3c (Marks 0) - YOUR CODE HERE
raise NotImplementedError
###############################################
def taskMnist():
XTrain, YTrain, XVal, YVal, XTest, _ = loadMnist()
# Create a NeuralNetwork object 'nn1' as follows with optimal parameters. For parameter definition, refer to nn.py file.
# nn1 = nn.NeuralNetwork(lr, batchSize, epochs)
# Add layers to neural network corresponding to inputs and outputs of given data
# Eg. nn1.addLayer(FullyConnectedLayer(x,y))
###############################################
# TASK 3b (Marks 13) - YOUR CODE HERE
raise NotImplementedError
###############################################
nn1.train(XTrain, YTrain, XVal, YVal)
pred, _ = nn1.validate(XTest, None)
with open("predictionsMnist.csv", 'w') as file:
writer = csv.writer(file)
writer.writerow(["id", "prediction"])
for i, p in enumerate(pred):
writer.writerow([i, p])
return nn1
################################# UTILITY FUNCTIONS ############################################
def oneHotEncodeY(Y, nb_classes):
# Calculates one-hot encoding for a given list of labels
# Input :- Y : An integer or a list of labels
# Output :- Coreesponding one hot encoded vector or the list of one-hot encoded vectors
return (np.eye(nb_classes)[Y]).astype(int)
def loadXor():
# This is a toy dataset with 10k points and 2 labels.
# The output can represented as the XOR of the input as described in the problem statement
# There are 7k training points, 1k validation points and 2k test points
train = pickle.load(open("data/xor/train.pkl", 'rb'))
test = pickle.load(open("data/xor/test.pkl", 'rb'))
testX, testY = np.array(test[0]), np.array(oneHotEncodeY(test[1],2))
trainX, trainY = np.array(train[0][:7000]), np.array(oneHotEncodeY(train[1][:7000],2))
valX, valY = np.array(train[0][7000:]), np.array(oneHotEncodeY(train[1][7000:],2))
return trainX, trainY, valX, valY, testX, testY
def loadMnist():
# MNIST dataset has 50k train, 10k val, 10k test
# The test labels have not been provided for this task
train = pickle.load(open("data/mnist/train.pkl", 'rb'))
test = pickle.load(open("data/mnist/test.pkl", 'rb'))
testX = preprocessMnist(np.array(test[0]))
testY = None # For MNIST the test labels have not been provided
trainX, trainY = preprocessMnist(np.array(train[0][:50000])), np.array(oneHotEncodeY(train[1][:50000],10))
valX, valY = preprocessMnist(np.array(train[0][50000:])), np.array(oneHotEncodeY(train[1][50000:],10))
return trainX, trainY, valX, valY, testX, testY
#################################################################################################
if __name__ == "__main__":
np.random.seed(7)
taskXor()
taskMnist()
import numpy as np
class NeuralNetwork:
def __init__(self, lr, batchSize, epochs):
# Method to initialize a Neural Network Object
# Parameters
# lr - learning rate
# batchSize - Mini batch size
# epochs - Number of epochs for training
self.lr = lr
self.batchSize = batchSize
self.epochs = epochs
self.layers = []
def addLayer(self, layer):
# Method to add layers to the Neural Network
self.layers.append(layer)
def train(self, trainX, trainY, validX=None, validY=None):
# Method for training the Neural Network
# Input
# trainX - A list of training input data to the neural network
# trainY - Corresponding list of training data labels
# validX - A list of validation input data to the neural network
# validY - Corresponding list of validation data labels
# The methods trains the weights and baises using the training data(trainX, trainY)
# Feel free to print accuracy at different points using the validate() or computerAccuracy() functions of this class
###############################################
# TASK 2c (Marks 0) - YOUR CODE HERE
raise NotImplementedError
###############################################
def crossEntropyLoss(self, Y, predictions):
# Input
# Y : Ground truth labels (encoded as 1-hot vectors) | shape = batchSize x number of output labels
# predictions : Predictions of the model | shape = batchSize x number of output labels
# Returns the cross-entropy loss between the predictions and the ground truth labels | shape = scalar
###############################################
# TASK 2a (Marks 3) - YOUR CODE HERE
raise NotImplementedError
###############################################
def crossEntropyDelta(self, Y, predictions):
# Input
# Y : Ground truth labels (encoded as 1-hot vectors) | shape = batchSize x number of output labels
# predictions : Predictions of the model | shape = batchSize x number of output labels
# Returns the derivative of the loss with respect to the last layer outputs, ie dL/dp_i where p_i is the ith
# output of the last layer of the network | shape = batchSize x number of output labels
###############################################
# TASK 2b (Marks 3) - YOUR CODE HERE
raise NotImplementedError
###############################################
def computeAccuracy(self, Y, predictions):
# Returns the accuracy given the true labels Y and final output of the model
correct = 0
for i in range(len(Y)):
if np.argmax(Y[i]) == np.argmax(predictions[i]):
correct += 1
accuracy = (float(correct) / len(Y)) * 100
return accuracy
def validate(self, validX, validY):
# Input
# validX : Validation Input Data
# validY : Validation Labels
# Returns the predictions and validation accuracy evaluated over the current neural network model
valActivations = self.predict(validX)
pred = np.argmax(valActivations, axis=1)
if validY is not None:
valAcc = self.computeAccuracy(validY, valActivations)
return pred, valAcc
else:
return pred, None
def predict(self, X):
# Input
# X : Current Batch of Input Data as an nparray
# Output
# Returns the predictions made by the model (which are the activations output by the last layer)
# Note: Activations at the first layer(input layer) is X itself
activations = X
for l in self.layers:
activations = l.forwardpass(activations)
return activations
class FullyConnectedLayer:
def __init__(self, in_nodes, out_nodes, activation):
# Method to initialize a Fully Connected Layer
# Parameters
# in_nodes - number of input nodes of this layer
# out_nodes - number of output nodes of this layer
self.in_nodes = in_nodes
self.out_nodes = out_nodes
self.activation = activation
# Stores a quantity that is computed in the forward pass but actually used in the backward pass. Try to identify
# this quantity to avoid recomputing it in the backward pass and hence, speed up computation
self.data = None
# Create np arrays of appropriate sizes for weights and biases and initialise them as you see fit
###############################################
# TASK 1a (Marks 0) - YOUR CODE HERE
raise NotImplementedError
self.weights = None
self.biases = None
###############################################
# NOTE: You must NOT change the above code but you can add extra variables if necessary
# Store the gradients with respect to the weights and biases in these variables during the backward pass
self.weightsGrad = None
self.biasesGrad = None
def relu_of_X(self, X):
# Input
# data : Output from current layer/input for Activation | shape: batchSize x self.out_nodes
# Returns: Activations after one forward pass through this relu layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation relu
###############################################
# TASK 1b (Marks 1) - YOUR CODE HERE
raise NotImplementedError
###############################################
def gradient_relu_of_X(self, X, delta):
# Input
# data : Output from next layer/input | shape: batchSize x self.out_nodes
# delta : del_Error/ del_activation_curr | shape: batchSize x self.out_nodes
# Returns: Current del_Error to pass to current layer in backward pass through relu layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation relu amd during backwardpass
###############################################
# TASK 1e (Marks 1) - YOUR CODE HERE
raise NotImplementedError
###############################################
def softmax_of_X(self, X):
# Input
# data : Output from current layer/input for Activation | shape: batchSize x self.out_nodes
# Returns: Activations after one forward pass through this softmax layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation softmax
###############################################
# TASK 1c (Marks 3) - YOUR CODE HERE
raise NotImplementedError
###############################################
def gradient_softmax_of_X(self, X, delta):
# Input
# data : Output from next layer/input | shape: batchSize x self.out_nodes
# delta : del_Error/ del_activation_curr | shape: batchSize x self.out_nodes
# Returns: Current del_Error to pass to current layer in backward pass through softmax layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation softmax amd during backwardpass
# Hint: You might need to compute Jacobian first
###############################################
# TASK 1f (Marks 7) - YOUR CODE HERE
raise NotImplementedError
###############################################
def forwardpass(self, X):
# Input
# activations : Activations from previous layer/input | shape: batchSize x self.in_nodes
# Returns: Activations after one forward pass through this layer | shape: batchSize x self.out_nodes
# You may need to write different code for different activation layers
###############################################
# TASK 1d (Marks 4) - YOUR CODE HERE
if self.activation == 'relu':
raise NotImplementedError
elif self.activation == 'softmax':
raise NotImplementedError
else:
print("ERROR: Incorrect activation specified: " + self.activation)
exit()
###############################################
def backwardpass(self, activation_prev, delta):
# Input
# activation_prev : Output from next layer/input | shape: batchSize x self.out_nodes]
# delta : del_Error/ del_activation_curr | shape: self.out_nodes
# Output
# new_delta : del_Error/ del_activation_prev | shape: self.in_nodes
# You may need to write different code for different activation layers
# Just compute and store the gradients here - do not make the actual updates
###############################################
# TASK 1g (Marks 6) - YOUR CODE HERE
if self.activation == 'relu':
inp_delta = self.gradient_relu_of_X(self.data, delta)
elif self.activation == 'softmax':
inp_delta = self.gradient_softmax_of_X(self.data, delta)
else:
print("ERROR: Incorrect activation specified: " + self.activation)
exit()
###############################################
def updateWeights(self, lr):
# Input
# lr: Learning rate being used
# Output: None
# This function should actually update the weights using the gradients computed in the backwardpass
###############################################
# TASK 1h (Marks 2) - YOUR CODE HERE
raise NotImplementedError
###############################################
\ No newline at end of file
instance (id),count
0,8
1,466
2,176
3,58
4,288
5,795
6,1
7,292
8,427
9,73
10,216
11,11
12,171
13,238
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment