Commit 284aaad9 authored by SHREYANSH JAIN's avatar SHREYANSH JAIN

added assignment2 ML fodler

parent 4948f373
......@@ -13,7 +13,7 @@ with open('logcosh.log','r') as csvfile:
plt.plot(x,y, label='LOGCOSH')
plt.plot(x,y,'--', label='LOGCOSH')
plt.xlabel('epoch')
plt.ylabel('mean_squared_loss')
......
import numpy as np
import argparse
import csv
import warnings
'''
Commented portion may not help much in
optimization but will help in visualization !!
'''
def mean_squared_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = (1/samples)*np.sum(np.square(ydata-guess))
return err
raise NotImplementedError
def mean_squared_gradient(xdata, ydata, weights):
samples = np.shape(xdata)[0]
guess = np.dot(xdata,weights)
gradient = (2/samples)*np.dot(xdata.T,(guess-ydata))
return gradient
raise NotImplementedError
def mean_absolute_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = (1/samples)*np.sum(np.absolute(ydata-guess))
return err
raise NotImplementedError
def mean_absolute_gradient(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
signInfo = np.sign(guess-ydata)
gradient = np.dot(xdata.T,signInfo)/samples
return gradient
raise NotImplementedError
def mean_log_cosh_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
warnings.filterwarnings("error")
try:
err = np.sum(np.log(np.cosh(guess-ydata)))/samples
except Exception as e:
err = np.sum(np.absolute(guess-ydata)+np.log(2))/samples
return err
raise NotImplementedError
def mean_log_cosh_gradient(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
gradient = np.dot(xdata.T,np.tanh(guess-ydata))/samples
return gradient
raise NotImplementedError
def root_mean_squared_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = np.sqrt(np.divide(np.sum(np.square(ydata.T-guess)),samples))
return err
raise NotImplementedError
def root_mean_squared_gradient(xdata, ydata, weights):
samples = np.shape(xdata)[0]
guess = np.dot(xdata,weights)
gradient = mean_squared_gradient(xdata, ydata, weights)/(2*root_mean_squared_loss(xdata, ydata, weights))
return gradient
raise NotImplementedError
class LinearRegressor:
def __init__(self, dims):
self.dims = dims
self.W = np.random.rand(dims)
#self.W = np.random.uniform(low=0.0, high=1.0, size=dims)
return
raise NotImplementedError
def train(self, xtrain, ytrain, loss_function, gradient_function, epoch=100, lr=1):
errlog = []
samples = np.shape(xtrain)[0]
for iterations in range(epoch):
self.W = self.W - lr*gradient_function(xtrain,ytrain,self.W)
errlog.append(loss_function(xtrain,ytrain,self.W))
# errlog.append(mean_squared_loss(xtrain,ytrain,self.W))
return errlog
raise NotImplementedError
def predict(self, xtest):
return np.dot(xtest,self.W)
raise NotImplementedError
def read_dataset(trainfile, testfile):
xtrain = []
ytrain = []
xtest = []
with open(trainfile,'r') as f:
reader = csv.reader(f,delimiter=',')
next(reader, None)
for row in reader:
xtrain.append(row[:-1])
ytrain.append(row[-1])
with open(testfile,'r') as f:
reader = csv.reader(f,delimiter=',')
next(reader, None)
for row in reader:
xtest.append(row)
return np.array(xtrain), np.array(ytrain), np.array(xtest)
def one_hot_encoding(value_list, classes):
res = np.eye(classes)[value_list.reshape(-1)]
return res.reshape(list(value_list.shape)+[classes])
norm_dict = {}
dictionary_of_classes_for_features = {
2 : 5,
3 : 25,
5: 8,
7: 5
}
dictionary_of_days = {
'Monday' : 1,
'Tuesday': 2,
'Wednesday': 3,
'Thursday' : 4,
'Friday' : 5,
'Saturday': 6,
'Sunday' : 7
}
def slicer(arr, beg, end):
return np.array([i[beg:end] for i in arr]).reshape(-1, 1)
"""
#for normalization of parametes 'wind speed' and 'humidity' uncoment
def normalize(arr):
arr = arr
if not norm_dict: # make dictionary once at training to be used later during test
# for i in range(arr.shape[1]):
norm_dict['init'] = [np.min(arr), np.max(arr)]
#norm_dict['init'] = [np.mean(arr), np.std(arr)]
# for i in range(arr.shape[1]):
arr = np.array([(x - norm_dict['init'][0])/(norm_dict['init'][1] - norm_dict['init'][0]) for x in arr]) # min-max
#arr = np.array([(x - norm_dict['init'][0])/(norm_dict['init'][1]) for x in arr]) # standardization
return arr
"""
def preprocess_dataset(xdata, ydata=None):
# converting weekdays to numeric for one_hot_encoding
"""
#for normalization of parametes 'wind speed' and 'humidity' uncoment
xdata[:, 10] = normalize(xdata[:, 10].astype('float'))# normalized
xdata[:, 11] = normalize(xdata[:, 10].astype('float'))"""
xdata[:, 5] = [dictionary_of_days[i] for i in xdata[:, 5]]
cat_cols = [2, 3, 5, 7]
for i in cat_cols:
# dropping 2 columns for C-1 encoding and removing additional 0 column
t = one_hot_encoding(xdata[:, i].astype('int'), dictionary_of_classes_for_features[i])[:, 2:]
xdata = np.concatenate((xdata, t),axis=1)
xdata = np.delete(xdata, cat_cols, 1) # removing useless columns
xdata = np.delete(xdata, 6, 1)
xdata = np.delete(xdata, 8, 1)
# extracting features from date
month = slicer(xdata[:, 1], 5,7)
t = one_hot_encoding(month[:,0].astype('int'), 13)[:, 2:]
xdata = np.concatenate((xdata, t), axis=1)
date = slicer(xdata[:, 1], 8, 10)
week = np.ceil(date.astype('int') / 7) # week of month
t = one_hot_encoding(week[:,0].astype('int'), 6)[:, 2:]
xdata = np.concatenate((xdata, t), axis=1)
xdata = xdata[:,2:] # dropping first 2 unnecessary columns
xdata = xdata.astype('float32')
bias = np.ones((np.shape(xdata)[0],1)) # adding Bias in feature Matrix
xdata = np.concatenate((bias,xdata),axis=1)
if ydata is None:
return xdata
ydata = ydata.astype('float32')
return xdata,ydata
raise NotImplementedError
dictionary_of_losses = {
'mse':(mean_squared_loss, mean_squared_gradient),
'mae':(mean_absolute_loss, mean_absolute_gradient),
'rmse':(root_mean_squared_loss, root_mean_squared_gradient),
'logcosh':(mean_log_cosh_loss, mean_log_cosh_gradient),
}
"""
#For outliers removal from wind speed column uncomment
def out(x, std, mean):
if ((x < mean + 2 * std)and (x > mean - 2 * std)):
return 0
else:
return 1
def outlier(xtrain, ytrain, std, mean):
a =[]
for i in xtrain[:, 11].astype('float32'):
a.append(out(i,std, mean))
a = np.array(a)
xdata = np.concatenate((xtrain, a.reshape(-1, 1)), axis=1)
ytrain = np.delete(ytrain, np.argwhere(xdata[:, -1].astype('int') > 0), 0)
xdata = np.delete(xdata, np.argwhere(xdata[:, -1].astype('int') > 0), 0)
xdata = np.delete(xdata, -1, 1)
return (xdata, ytrain)"""
def main():
# You are free to modify the main function as per your requirements.
# Uncomment the below lines and pass the appropriate value
xtrain, ytrain, xtest = read_dataset(args.train_file, args.test_file)
"""
#For outliers removal from wind speed column uncomment
std = np.std(xtrain[:, 11].astype('float32'))
mean = np.mean(xtrain[:, 11].astype('float32'))
xtrain, ytrain =outlier(xtrain, ytrain, std, mean)"""
xtrainprocessed, ytrainprocessed = preprocess_dataset(xtrain, ytrain)
xtestprocessed = preprocess_dataset(xtest)
model = LinearRegressor(np.shape(xtrainprocessed)[1])
# The loss function is provided by command line argument
loss_fn, loss_grad = dictionary_of_losses[args.loss]
errlog = model.train(xtrainprocessed, ytrainprocessed, loss_fn, loss_grad, args.epoch, args.lr)
ytest = model.predict(xtestprocessed)
ytest = ytest.astype('int')
ytest[ytest<0] = 0
print(ytest)
output = [(i,ytest[i]) for i in range(len(ytest))]
np.savetxt("prediction.csv",output,delimiter=',',fmt="%d",header="instance (id),count",comments='')
#np.savetxt("error.log",errlog,delimiter='\n',fmt="%f")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--loss', default='mse', choices=['mse','mae','rmse','logcosh'], help='loss function')
parser.add_argument('--lr', default=1.0, type=float, help='learning rate')
parser.add_argument('--epoch', default=100, type=int, help='number of epochs')
parser.add_argument('--train_file', type=str, help='location of the training file')
parser.add_argument('--test_file', type=str, help='location of the test file')
args = parser.parse_args()
main()
instance (id),count
0,280
1,93
2,136
3,215
4,94
5,142
6,215
7,231
8,182
9,132
10,210
11,21
12,139
0,452
1,65
2,254
3,309
4,108
5,163
6,233
7,402
8,196
9,148
10,279
11,8
12,177
13,0
14,68
15,248
16,35
17,135
18,248
19,108
20,73
21,201
22,67
23,188
24,121
25,218
26,79
27,152
28,198
29,211
30,117
31,121
32,250
33,151
34,128
35,141
14,53
15,403
16,0
17,163
18,306
19,72
20,77
21,253
22,39
23,244
24,146
25,227
26,83
27,185
28,213
29,292
30,112
31,126
32,427
33,196
34,139
35,163
36,0
37,132
38,142
39,180
40,158
41,186
42,255
43,207
44,170
45,149
46,36
47,151
48,150
49,228
50,184
51,210
52,191
53,62
54,164
55,215
56,215
57,159
58,142
59,65
60,172
61,199
62,241
63,291
64,236
65,306
66,182
67,138
68,279
69,160
70,231
71,61
72,261
73,218
74,169
75,269
76,46
77,146
78,185
79,174
80,152
81,177
82,104
83,3
84,130
85,183
86,218
87,137
88,174
89,224
90,215
91,194
92,69
93,195
94,171
95,173
96,48
97,157
98,205
99,163
100,154
101,194
102,92
103,76
104,198
105,112
106,229
107,237
108,107
109,152
110,122
111,142
112,65
113,184
37,279
38,181
39,201
40,164
41,222
42,324
43,272
44,221
45,195
46,0
47,298
48,186
49,279
50,214
51,216
52,232
53,52
54,182
55,256
56,245
57,189
58,178
59,34
60,208
61,230
62,438
63,341
64,426
65,513
66,237
67,119
68,324
69,182
70,274
71,60
72,281
73,400
74,222
75,446
76,34
77,197
78,220
79,223
80,228
81,340
82,74
83,0
84,147
85,233
86,255
87,168
88,235
89,305
90,262
91,265
92,48
93,227
94,217
95,223
96,0
97,151
98,255
99,230
100,196
101,231
102,114
103,60
104,366
105,135
106,391
107,434
108,79
109,170
110,152
111,177
112,54
113,264
114,0
115,94
116,238
117,99
118,176
119,127
120,161
121,43
122,174
123,181
124,9
125,138
126,28
127,111
128,181
129,257
115,97
116,309
117,103
118,215
119,148
120,306
121,28
122,330
123,228
124,0
125,177
126,15
127,81
128,188
129,463
130,0
131,216
132,75
133,142
134,124
131,333
132,65
133,167
134,143
135,0
136,183
137,207
138,54
139,95
140,225
141,146
142,211
143,232
144,276
145,129
146,257
147,281
148,252
149,220
150,104
151,168
152,135
153,266
154,217
155,239
156,291
157,58
158,176
159,136
160,136
161,150
162,205
163,203
164,92
165,97
166,149
167,211
168,181
169,256
170,115
171,138
172,173
173,208
174,164
175,106
176,274
177,220
178,82
179,82
180,54
181,226
182,273
183,12
184,191
136,228
137,256
138,28
139,74
140,278
141,169
142,258
143,299
144,330
145,171
146,346
147,370
148,448
149,235
150,123
151,225
152,147
153,331
154,370
155,271
156,459
157,40
158,200
159,156
160,151
161,167
162,358
163,231
164,82
165,115
166,199
167,273
168,211
169,358
170,120
171,154
172,217
173,242
174,163
175,118
176,464
177,272
178,78
179,78
180,55
181,265
182,362
183,0
184,222
185,0
186,201
187,17
188,92
189,236
190,220
191,124
192,242
193,147
194,169
195,126
196,208
197,90
198,75
199,80
200,199
201,74
202,213
203,251
204,103
205,111
206,262
207,205
208,58
209,97
210,224
211,80
212,53
213,118
214,193
215,251
216,270
217,56
218,113
219,136
220,239
221,95
222,263
223,187
224,267
225,165
226,319
227,23
228,169
229,240
230,190
231,157
232,185
233,85
234,177
235,80
236,30
237,225
238,114
239,90
240,166
241,238
242,202
243,92
244,59
245,27
246,91
247,200
248,219
249,91
250,135
251,46
252,18
253,125
254,176
255,146
256,81
257,5
258,111
259,105
260,174
261,95
262,124
263,212
264,162
265,129
266,179
267,208
268,94
269,158
270,150
271,222
272,178
273,198
274,145
275,282
276,193
277,210
278,157
279,95
280,181
281,164
282,230
283,178
284,238
285,28
286,223
287,240
288,79
289,159
290,99
291,40
292,213
293,178
294,49
295,84
296,276
297,306
186,348
187,0
188,71
189,275
190,254
191,151
192,324
193,208
194,313
195,146
196,232
197,114
198,86
199,45
200,227
201,68
202,382
203,298
204,108
205,85
206,316
207,222
208,50
209,87
210,395
211,66
212,48
213,114
214,189
215,275
216,436
217,30
218,246
219,158
220,396
221,101
222,442
223,188
224,473
225,229
226,491
227,0
228,310
229,315
230,254
231,220
232,199
233,66
234,171
235,82
236,21
237,272
238,93
239,108
240,193
241,297
242,236
243,104
244,24
245,14
246,74
247,205
248,258
249,63
250,167
251,49
252,0
253,121
254,194
255,157
256,55
257,0
258,120
259,132
260,238
261,99
262,149
263,276
264,289
265,159
266,198
267,244
268,87
269,197
270,169
271,262
272,224
273,249
274,158
275,344
276,272
277,240
278,163
279,88
280,234
281,178
282,268
283,224
284,303
285,0
286,371
287,312
288,64
289,185
290,76
291,22
292,306
293,167
294,30
295,70
296,479
297,508
298,0
299,196
300,178
301,284
302,183
303,162
304,101
305,238
306,290
307,154
308,218
309,267
310,73
299,259
300,237
301,452
302,248
303,185
304,105
305,264
306,511
307,146
308,296
309,453
310,52
311,0
312,180
313,115
314,192
315,80
316,131
317,187
318,240
312,238
313,119
314,344
315,30
316,153
317,221
318,446
319,82
320,153
321,138
322,206
323,172
324,85
325,213
326,99
327,139
328,160
329,185
330,143
331,212
320,187
321,133
322,283
323,190
324,79
325,235
326,121
327,178
328,232
329,216
330,184
331,374
332,0
333,188
334,278
335,91
336,136
337,37
338,220
339,160
340,153
341,131
342,102
343,67
344,255
345,145
346,124
347,196
348,143
349,130
350,125
351,183
352,247
353,11
354,182
355,191
356,173
357,288
358,21
359,124
360,230
361,153
362,70
363,138
364,183
365,248
366,201
367,275
368,235
369,188
370,158
371,219
372,233
373,212
374,27
375,153
376,32
377,34
378,72
379,104
380,175
381,125
382,132
383,240
384,214
385,6
386,246
387,76
388,92
389,169
390,270
333,214
334,363
335,95
336,176
337,30
338,265
339,202
340,177
341,136
342,124
343,63
344,322
345,162
346,262
347,272
348,212
349,138
350,140
351,334
352,435
353,0
354,332
355,242
356,198
357,371
358,0
359,164
360,274
361,144
362,30
363,156
364,365
365,424
366,262
367,385
368,422
369,232
370,210
371,316
372,384
373,267
374,0
375,175
376,10
377,0
378,43
379,73
380,238
381,134
382,107
383,320
384,240
385,0
386,271
387,64
388,56
389,192
390,326
391,0
392,152
393,114
394,80
395,201
396,95
397,41
398,77
399,153
400,213
401,98
402,85
403,283
404,65
405,118
406,255
407,223
408,141
409,157
410,187
411,117
412,25
413,37
414,124
415,125
416,184
417,232
392,190
393,111
394,88
395,360
396,91
397,37
398,56
399,291
400,287
401,70
402,79
403,451
404,50
405,164
406,448
407,247
408,158
409,202
410,349
411,142
412,6
413,3
414,121
415,136
416,229
417,269
418,0
419,182
420,211
421,143
422,144
423,204
424,195
425,238
426,235
427,200
428,154
429,289
430,262
431,83
432,78
433,192
434,137
435,219
436,145
437,153
438,138
439,191
440,230
441,63
442,193
443,222
444,105
445,233
446,139
447,157
448,86
449,184
450,275
451,228
452,83
453,203
454,114
455,4
456,42
457,202
458,46
459,135
460,197
461,258
462,115
463,225
464,68
465,102
466,231
467,3
468,239
469,171
470,67
471,124
472,241
473,75
474,178
475,101
476,247
477,119
478,99
479,209
480,131
481,35
482,168
483,37
484,175
485,108
486,146
487,144
488,126
489,154
490,219
491,206
492,118
493,166
494,253
495,159
496,77
497,96
498,202
499,90
500,195
501,118
419,208
420,234
421,291
422,173
423,250
424,260
425,268
426,378
427,350
428,164
429,457
430,319
431,84
432,79
433,237
434,162
435,253
436,176
437,175
438,206
439,354
440,278
441,37
442,211
443,390
444,145
445,282
446,131
447,173
448,63
449,278
450,465
451,231
452,73
453,380
454,117
455,0
456,23
457,229
458,26
459,118
460,236
461,339
462,139
463,255
464,55
465,108
466,300
467,0
468,315
469,172
470,52
471,127
472,271
473,71
474,208
475,71
476,320
477,132
478,82
479,258
480,163
481,15
482,212
483,13
484,203
485,114
486,163
487,180
488,150
489,177
490,260
491,239
492,147
493,177
494,398
495,208
496,73
497,99
498,277
499,59
500,263
501,89
502,88
503,77
504,134
505,183
506,148
507,201
508,33
509,176
510,179
511,243
512,246
513,112
514,130
515,296
516,26
517,272
518,124
519,118
520,142
521,267
522,135
523,42
524,229
525,173
526,123
527,84
528,77
529,241
530,158
531,219
532,120
533,84
534,227
535,117
536,150
537,189
538,171
539,248
540,98
541,191
542,112
543,229
544,135
545,158
546,211
547,115
548,252
549,112
503,84
504,138
505,231
506,168
507,224
508,0
509,218
510,224
511,398
512,250
513,130
514,154
515,385
516,21
517,379
518,146
519,115
520,170
521,304
522,121
523,21
524,308
525,201
526,125
527,83
528,69
529,278
530,188
531,304
532,145
533,82
534,405
535,130
536,163
537,225
538,185
539,358
540,115
541,236
542,116
543,392
544,118
545,178
546,274
547,127
548,318
549,116
550,0
551,92
552,129
553,157
554,34
555,117
556,100
557,176
558,194
559,194
560,140
561,56
562,160
563,223
564,184
565,202
566,207
567,223
568,201
569,81
570,250
571,92
572,229
573,91
574,226
575,15
576,257
577,54
578,243
579,272
580,193
581,166
582,234
583,93
584,271
585,111
586,44
587,142
588,191
589,247
590,284
591,213
592,145
593,83
594,62
551,71
552,137
553,199
554,11
555,217
556,224
557,237
558,214
559,221
560,137
561,45
562,194
563,393
564,205
565,243
566,277
567,291
568,203
569,79
570,309
571,87
572,255
573,114
574,389
575,0
576,303
577,43
578,296
579,452
580,198
581,218
582,318
583,122
584,332
585,129
586,27
587,184
588,214
589,317
590,348
591,292
592,186
593,66
594,53
595,0
596,146
597,249
598,121
599,120
600,219
601,215
602,27
603,149
604,104
605,181
606,141
607,191
608,81
596,300
597,346
598,151
599,112
600,291
601,306
602,2
603,167
604,99
605,228
606,154
607,344
608,42
609,0
610,95
611,114
612,17
613,226
614,144
615,143
616,187
617,230
618,166
619,153
620,215
621,224
622,186
623,251
624,182
625,219
626,64
627,181
628,89
629,114
630,153
631,141
632,185
633,135
634,80
635,110
636,210
637,109
638,182
639,213
640,124
641,198
642,59
643,297
644,170
645,83
646,101
647,183
648,149
649,152
650,146
651,193
652,176
653,190
610,54
611,124
612,0
613,227
614,178
615,180
616,219
617,279
618,176
619,305
620,271
621,371
622,199
623,316
624,206
625,273
626,31
627,305
628,87
629,109
630,181
631,289
632,183
633,169
634,70
635,135
636,274
637,101
638,240
639,389
640,112
641,213
642,55
643,517
644,196
645,60
646,125
647,210
648,199
649,215
650,161
651,196
652,207
653,246
654,0
655,75
656,246
657,186
658,224
659,20
660,189
661,185
662,182
663,110
664,165
665,85
666,62
667,63
668,174
669,136
670,113
671,69
672,72
673,196
674,144
675,114
676,227
677,96
678,187
679,93
680,119
681,145
682,231
683,151
684,168
685,235
686,270
687,107
688,100
689,102
690,249
691,183
692,153
693,111
694,115
695,262
696,240
697,104
698,157
699,161
700,61
701,234
702,248
703,206
704,55
705,140
655,71
656,280
657,193
658,258
659,0
660,352
661,273
662,236
663,115
664,309
665,87
666,46
667,52
668,183
669,177
670,106
671,55
672,60
673,246
674,181
675,133
676,399
677,92
678,256
679,75
680,144
681,163
682,404
683,173
684,206
685,296
686,363
687,125
688,103
689,73
690,327
691,197
692,165
693,105
694,128
695,429
696,272
697,101
698,188
699,195
700,55
701,291
702,307
703,229
704,49
705,130
706,0
707,285
708,124
709,163
707,485
708,164
709,198
710,0
711,197
712,76
713,168
714,200
715,181
716,133
717,275
718,160
719,80
720,80
721,176
722,163
723,166
724,162
725,233
726,287
711,345
712,37
713,202
714,247
715,234
716,162
717,340
718,192
719,76
720,44
721,223
722,220
723,199
724,206
725,278
726,382
727,183
728,14
729,182
730,103
728,0
729,251
730,119
731,0
732,322
733,169
734,250
735,48
736,176
737,175
738,147
739,183
740,217
741,195
742,89
743,62
744,159
745,238
746,121
747,118
748,146
749,26
750,154
751,89
752,174
753,149
732,533
733,234
734,290
735,43
736,328
737,315
738,171
739,203
740,259
741,257
742,103
743,48
744,196
745,266
746,118
747,144
748,194
749,0
750,300
751,118
752,230
753,175
754,0
755,105
756,109
757,138
758,136
759,201
760,209
761,172
762,186
763,96
764,193
765,135
766,45
767,181
768,86
769,143
770,169
771,277
772,206
755,107
756,123
757,179
758,185
759,220
760,276
761,197
762,193
763,101
764,204
765,183
766,31
767,201
768,59
769,138
770,322
771,358
772,259
773,54
774,0
775,227
776,103
777,87
778,141
779,116
780,180
781,118
782,199
783,181
784,237
785,237
786,174
787,91
788,198
789,122
790,193
791,111
792,213
793,143
794,247
795,189
796,92
797,35
798,112
799,60
800,127
801,118
802,196
803,52
804,70
805,148
806,235
807,175
808,114
809,196
810,174
811,174
812,148
775,282
776,101
777,99
778,151
779,128
780,236
781,131
782,366
783,229
784,272
785,296
786,201
787,100
788,351
789,156
790,355
791,159
792,271
793,169
794,393
795,226
796,99
797,0
798,120
799,29
800,149
801,121
802,201
803,40
804,61
805,164
806,378
807,228
808,139
809,295
810,319
811,192
812,186
813,0
814,252
815,254
816,208
817,277
818,150
819,249
820,247
821,222
822,289
823,71
824,72
825,223
826,246
827,50
828,246
829,112
830,106
831,107
832,269
833,111
834,212
835,172
836,195
837,214
838,86
839,108
840,174
841,172
842,179
843,162
844,155
845,196
846,229
847,227
848,198
849,273
850,272
851,118
852,272
853,161
854,99
855,286
856,113
857,251
858,37
859,174
860,241
861,77
862,152
863,161
864,42
865,170
866,102
867,160
868,118
869,216
870,155
871,201
814,348
815,350
816,224
817,435
818,206
819,355
820,343
821,318
822,455
823,76
824,70
825,265
826,308
827,8
828,315
829,88
830,88
831,109
832,327
833,103
834,234
835,310
836,218
837,351
838,74
839,130
840,219
841,232
842,197
843,204
844,196
845,257
846,295
847,278
848,243
849,362
850,463
851,231
852,433
853,323
854,120
855,471
856,99
857,311
858,6
859,211
860,301
861,96
862,199
863,175
864,10
865,316
866,121
867,213
868,120
869,250
870,190
871,213
872,0
873,209
874,254
875,29
876,83
877,149
878,255
879,156
880,274
881,114
882,233
883,254
884,234
885,228
886,121
887,242
888,191
889,179
890,182
891,95
892,52
893,167
894,206
895,162
896,87
873,243
874,341
875,2
876,99
877,157
878,310
879,196
880,440
881,103
882,259
883,267
884,276
885,397
886,261
887,279
888,230
889,233
890,192
891,72
892,32
893,182
894,219
895,317
896,59
897,0
898,266
899,196
900,65
901,265
902,39
903,136
904,188
905,234
906,168
907,136
908,187
909,231
910,183
911,186
912,298
913,97
914,223
915,187
916,151
917,195
918,258
919,145
920,21
921,96
922,224
923,272
924,224
925,139
926,166
927,111
928,201
929,210
930,123
931,77
932,238
933,106
934,132
935,189
936,145
937,174
938,65
898,365
899,252
900,34
901,324
902,24
903,159
904,355
905,272
906,201
907,257
908,336
909,300
910,214
911,219
912,479
913,121
914,254
915,230
916,180
917,217
918,281
919,179
920,0
921,92
922,249
923,298
924,273
925,167
926,190
927,110
928,254
929,229
930,152
931,81
932,380
933,114
934,165
935,251
936,181
937,312
938,36
939,157
940,151
941,160
942,118
943,119
944,92
945,169
946,179
947,202
948,107
949,72
940,166
941,309
942,108
943,146
944,65
945,322
946,215
947,371
948,128
949,43
950,0
951,192
952,167
953,77
954,49
955,218
956,185
957,62
958,305
959,131
960,111
961,253
962,78
963,286
964,63
965,80
966,146
967,263
968,16
969,178
970,129
971,114
972,178
973,154
951,253
952,208
953,73
954,7
955,321
956,361
957,32
958,508
959,109
960,99
961,274
962,79
963,462
964,33
965,51
966,207
967,363
968,0
969,236
970,162
971,118
972,203
973,282
974,0
975,186
976,8
977,274
978,73
979,264
980,143
981,157
982,90
983,110
984,220
985,243
986,261
987,226
988,178
989,90
990,153
991,177
992,98
993,218
975,208
976,0
977,451
978,64
979,420
980,157
981,217
982,51
983,125
984,393
985,248
986,291
987,416
988,349
989,100
990,227
991,204
992,100
993,375
994,0
995,70
995,74
996,0
997,95
998,229
999,106
1000,111
1001,78
1002,163
1003,197
1004,236
1005,275
1006,203
1007,111
1008,90
1009,86
1010,125
1011,283
1012,165
1013,82
1014,156
1015,150
1016,229
1017,162
1018,136
1019,119
1020,163
1021,171
1022,123
1023,267
1024,47
1025,245
997,88
998,273
999,97
1000,91
1001,77
1002,234
1003,218
1004,294
1005,342
1006,328
1007,105
1008,111
1009,88
1010,241
1011,445
1012,185
1013,96
1014,162
1015,195
1016,222
1017,169
1018,148
1019,127
1020,221
1021,229
1022,111
1023,336
1024,27
1025,280
1026,0
1027,130
1028,76
1029,215
1030,293
1031,224
1032,266
1033,153
1034,59
1035,205
1036,219
1027,167
1028,92
1029,268
1030,492
1031,251
1032,339
1033,136
1034,47
1035,356
1036,254
1037,0
1038,47
1039,172
1040,55
1041,104
1042,307
1043,181
1044,89
1045,83
1046,180
1047,208
1048,186
1049,102
1050,37
1051,197
1052,130
1053,216
1054,124
1055,176
1056,186
1057,86
1058,89
1059,138
1039,174
1040,48
1041,73
1042,512
1043,348
1044,71
1045,71
1046,239
1047,241
1048,212
1049,86
1050,0
1051,214
1052,155
1053,229
1054,176
1055,195
1056,272
1057,62
1058,69
1059,280
1060,0
1061,112
1062,206
1063,250
1064,96
1065,61
1066,88
1067,196
1068,62
1069,282
1061,133
1062,249
1063,317
1064,73
1065,50
1066,82
1067,256
1068,56
1069,435
1070,0
1071,177
1072,198
1073,114
1074,226
1075,156
1076,195
1077,287
1078,228
1079,35
1080,180
1081,134
1082,82
1083,166
1084,60
1085,221
1086,154
1087,143
1088,17
1089,163
1090,41
1091,165
1092,46
1093,142
1094,32
1095,80
1096,120
1097,88
1098,256
1099,99
1100,201
1101,294
1102,46
1103,74
1104,173
1105,159
1106,69
1107,98
1108,173
1109,277
1071,195
1072,355
1073,92
1074,285
1075,173
1076,281
1077,392
1078,305
1079,13
1080,209
1081,140
1082,55
1083,223
1084,51
1085,237
1086,295
1087,141
1088,0
1089,181
1090,23
1091,200
1092,1
1093,277
1094,0
1095,44
1096,116
1097,91
1098,354
1099,85
1100,255
1101,489
1102,22
1103,70
1104,207
1105,199
1106,56
1107,141
1108,232
1109,463
1110,170
1111,192
1112,83
1113,78
1114,56
1115,267
1116,131
1117,126
1118,86
1119,71
1120,255
1121,165
1122,182
1123,178
1124,102
1125,174
1126,186
1127,83
1128,157
1129,84
1130,189
1131,171
1132,94
1133,81
1134,65
1111,264
1112,75
1113,84
1114,23
1115,434
1116,143
1117,255
1118,108
1119,72
1120,427
1121,199
1122,211
1123,210
1124,113
1125,205
1126,222
1127,55
1128,176
1129,87
1130,267
1131,165
1132,68
1133,79
1134,63
1135,0
1136,163
1137,208
1136,299
1137,224
1138,88
1139,22
1140,273
1141,62
1142,83
1143,41
1144,214
1145,173
1146,143
1147,186
1148,73
1149,154
1150,200
1151,167
1139,0
1140,338
1141,46
1142,60
1143,30
1144,245
1145,196
1146,171
1147,217
1148,58
1149,204
1150,244
1151,213
1152,0
1153,173
1154,155
1155,99
1156,139
1157,200
1158,80
1159,55
1160,134
1161,199
1162,209
1163,109
1164,134
1165,268
1153,206
1154,188
1155,120
1156,154
1157,232
1158,74
1159,30
1160,152
1161,224
1162,236
1163,134
1164,149
1165,461
1166,0
1167,182
1168,79
1169,160
1170,83
1167,224
1168,64
1169,179
1170,88
1171,0
1172,289
1173,83
1174,152
1175,197
1176,168
1177,186
1178,194
1179,97
1180,114
1181,150
1182,125
1183,79
1184,163
1185,135
1186,91
1187,109
1188,195
1172,464
1173,105
1174,298
1175,279
1176,192
1177,240
1178,235
1179,67
1180,110
1181,176
1182,153
1183,54
1184,184
1185,169
1186,89
1187,80
1188,233
1189,0
1190,188
1191,195
1192,193
1193,101
1194,137
1195,114
1196,68
1197,191
1198,183
1199,237
1200,72
1201,85
1202,104
1190,242
1191,214
1192,216
1193,85
1194,274
1195,108
1196,58
1197,241
1198,232
1199,315
1200,76
1201,87
1202,116
1203,0
1204,63
1205,80
1206,190
1207,211
1208,190
1209,100
1210,56
1211,117
1212,68
1213,186
1214,212
1215,210
1216,219
1217,162
1218,177
1219,193
1220,246
1221,163
1222,171
1223,193
1224,178
1225,241
1226,69
1227,229
1228,143
1204,57
1205,64
1206,349
1207,267
1208,260
1209,209
1210,0
1211,150
1212,63
1213,263
1214,241
1215,397
1216,383
1217,206
1218,300
1219,257
1220,274
1221,181
1222,201
1223,227
1224,253
1225,331
1226,46
1227,258
1228,153
1229,0
1230,84
1231,210
1232,179
1233,221
1234,284
1235,149
1236,175
1237,214
1238,304
1239,250
1230,108
1231,259
1232,251
1233,259
1234,476
1235,175
1236,215
1237,283
1238,494
1239,302
1240,0
1241,136
1242,150
1243,163
1244,108
1245,129
1246,148
1247,166
1248,118
1241,107
1242,285
1243,180
1244,104
1245,140
1246,174
1247,169
1248,114
1249,0
1250,253
1251,157
1252,142
1253,135
1254,185
1255,140
1256,102
1257,215
1258,114
1259,215
1260,147
1261,256
1262,262
1263,166
1264,134
1265,114
1266,220
1267,214
1268,140
1269,80
1270,175
1271,209
1272,105
1273,171
1274,133
1275,280
1250,324
1251,212
1252,258
1253,126
1254,337
1255,147
1256,122
1257,279
1258,112
1259,249
1260,190
1261,455
1262,447
1263,206
1264,184
1265,104
1266,289
1267,248
1268,191
1269,62
1270,221
1271,271
1272,114
1273,211
1274,171
1275,355
1276,0
1277,79
1278,94
1279,225
1280,306
1281,107
1277,80
1278,59
1279,291
1280,393
1281,108
1282,0
1283,163
1284,35
1285,142
1283,214
1284,17
1285,184
1286,0
1287,199
1288,100
1289,138
1290,229
1291,118
1292,244
1293,140
1294,139
1295,191
1296,274
1297,89
1298,145
1299,163
1300,200
1301,160
1302,99
1303,86
1304,238
1305,200
1306,236
1307,96
1308,221
1309,115
1310,156
1311,155
1312,125
1313,199
1287,265
1288,90
1289,169
1290,278
1291,122
1292,261
1293,169
1294,146
1295,182
1296,466
1297,65
1298,188
1299,159
1300,244
1301,200
1302,119
1303,98
1304,257
1305,249
1306,261
1307,64
1308,267
1309,138
1310,217
1311,170
1312,137
1313,245
1314,0
1315,270
1316,116
1317,230
1318,118
1319,203
1320,174
1321,179
1322,289
1323,160
1324,204
1325,119
1326,124
1327,81
1328,171
1329,85
1315,470
1316,113
1317,232
1318,163
1319,294
1320,210
1321,216
1322,367
1323,181
1324,293
1325,108
1326,127
1327,69
1328,306
1329,81
1330,0
1331,163
1332,195
1333,214
1334,200
1335,135
1336,66
1337,284
1338,21
1339,165
1340,243
1341,207
1342,242
1343,105
1344,235
1345,234
1346,213
1347,200
1348,188
1349,218
1350,124
1351,212
1352,149
1353,224
1354,99
1355,196
1356,211
1357,94
1358,48
1359,284
1360,149
1361,229
1362,286
1363,109
1364,254
1365,182
1366,230
1367,155
1368,134
1369,31
1370,147
1371,203
1372,140
1373,172
1374,285
1375,92
1376,93
1377,209
1378,184
1379,223
1380,257
1381,169
1382,255
1383,208
1384,187
1385,116
1386,145
1387,33
1388,184
1389,72
1390,177
1391,204
1392,82
1393,191
1394,81
1395,136
1396,218
1397,163
1398,154
1399,216
1400,121
1401,132
1402,18
1403,98
1404,129
1331,203
1332,349
1333,371
1334,245
1335,169
1336,51
1337,457
1338,0
1339,321
1340,272
1341,266
1342,393
1343,109
1344,260
1345,328
1346,266
1347,228
1348,241
1349,258
1350,163
1351,276
1352,282
1353,382
1354,95
1355,359
1356,392
1357,220
1358,32
1359,477
1360,171
1361,281
1362,474
1363,234
1364,321
1365,224
1366,252
1367,198
1368,184
1369,0
1370,149
1371,266
1372,193
1373,211
1374,450
1375,79
1376,80
1377,255
1378,198
1379,240
1380,358
1381,156
1382,347
1383,246
1384,225
1385,253
1386,182
1387,0
1388,239
1389,56
1390,233
1391,228
1392,49
1393,361
1394,57
1395,157
1396,267
1397,202
1398,208
1399,250
1400,154
1401,273
1402,0
1403,64
1404,120
1405,0
1406,0
1407,43
1408,251
1409,11
1410,244
1411,236
1412,23
1413,183
1414,198
1415,109
1416,108
1417,122
1418,60
1419,98
1420,213
1421,65
1422,247
1423,223
1424,66
1425,62
1426,115
1427,186
1428,201
1429,190
1430,142
1431,284
1432,220
1407,0
1408,344
1409,0
1410,340
1411,306
1412,0
1413,219
1414,266
1415,116
1416,96
1417,120
1418,43
1419,78
1420,368
1421,49
1422,283
1423,285
1424,68
1425,68
1426,78
1427,226
1428,349
1429,288
1430,208
1431,343
1432,383
1433,0
1434,131
1435,64
1434,127
1435,9
1436,121
1437,184
1438,195
1439,44
1440,56
1441,22
1442,273
1443,102
1444,95
1445,238
1446,248
1447,276
1448,128
1449,280
1437,268
1438,250
1439,28
1440,40
1441,0
1442,349
1443,94
1444,80
1445,281
1446,339
1447,344
1448,134
1449,360
1450,0
1451,245
1452,81
1453,125
1454,113
1455,200
1456,84
1457,194
1458,146
1459,37
1460,86
1461,120
1451,291
1452,96
1453,129
1454,127
1455,235
1456,71
1457,270
1458,179
1459,9
1460,77
1461,145
1462,0
1463,178
1464,151
1465,249
1466,230
1467,236
1468,89
1469,241
1470,110
1471,141
1472,125
1473,65
1474,80
1475,138
1476,222
1477,233
1478,186
1479,215
1480,259
1481,101
1482,180
1483,182
1484,225
1485,126
1486,3
1487,119
1488,201
1489,210
1490,128
1491,97
1492,154
1463,204
1464,189
1465,287
1466,290
1467,378
1468,64
1469,304
1470,101
1471,136
1472,124
1473,55
1474,64
1475,149
1476,375
1477,400
1478,203
1479,288
1480,333
1481,93
1482,357
1483,211
1484,363
1485,139
1486,0
1487,114
1488,353
1489,232
1490,98
1491,72
1492,167
1493,0
1494,84
1495,142
1496,95
1497,227
1498,119
1494,57
1495,166
1496,53
1497,272
1498,237
1499,193
1500,278
1500,323
1501,0
1502,204
1503,201
1504,91
1502,253
1503,243
1504,100
1505,0
1506,173
1507,60
1508,165
1509,65
1510,67
1506,199
1507,59
1508,168
1509,54
1510,72
1511,0
1512,112
1513,40
1514,154
1515,230
1516,237
1517,210
1518,154
1519,301
1520,16
1521,232
1522,119
1523,192
1524,43
1525,207
1526,245
1512,88
1513,12
1514,168
1515,356
1516,321
1517,382
1518,210
1519,499
1520,0
1521,389
1522,132
1523,223
1524,23
1525,236
1526,329
1527,0
1528,214
1529,65
1530,171
1531,52
1532,37
1533,164
1534,22
1535,155
1536,96
1537,109
1538,181
1539,210
1540,169
1541,91
1542,83
1543,113
1544,258
1545,148
1546,206
1547,185
1548,164
1549,49
1550,156
1551,129
1552,118
1553,191
1554,173
1555,123
1556,227
1557,233
1558,192
1559,162
1560,135
1528,367
1529,76
1530,242
1531,31
1532,0
1533,175
1534,0
1535,196
1536,86
1537,78
1538,218
1539,278
1540,196
1541,82
1542,60
1543,97
1544,305
1545,163
1546,330
1547,209
1548,174
1549,32
1550,199
1551,165
1552,146
1553,226
1554,200
1555,244
1556,249
1557,260
1558,230
1559,177
1560,175
1561,0
1562,162
1563,253
1564,111
1565,149
1566,237
1567,189
1568,297
1569,183
1570,92
1571,218
1572,144
1573,14
1574,171
1575,159
1576,173
1577,159
1578,234
1579,155
1580,70
1581,298
1582,241
1583,145
1584,256
1585,155
1586,64
1587,191
1588,134
1589,65
1590,120
1591,273
1592,112
1593,129
1594,256
1595,211
1596,124
1597,146
1598,172
1599,225
1600,145
1601,152
1602,215
1603,82
1604,143
1605,238
1606,220
1607,89
1608,268
1609,242
1610,262
1611,208
1612,97
1613,226
1614,166
1615,130
1616,177
1617,109
1618,20
1619,44
1620,51
1621,167
1622,179
1623,294
1624,269
1625,19
1626,141
1627,89
1628,232
1629,218
1630,172
1631,166
1632,149
1633,120
1634,184
1635,163
1636,218
1637,242
1638,163
1639,171
1640,147
1641,261
1642,138
1643,190
1644,210
1645,252
1646,159
1647,87
1648,30
1649,186
1650,181
1651,208
1652,112
1653,319
1654,125
1655,33
1656,106
1657,72
1658,254
1659,56
1660,202
1661,211
1662,243
1663,86
1664,65
1665,112
1666,63
1562,238
1563,299
1564,96
1565,186
1566,278
1567,252
1568,485
1569,191
1570,71
1571,309
1572,180
1573,0
1574,211
1575,200
1576,192
1577,206
1578,417
1579,193
1580,66
1581,443
1582,314
1583,274
1584,326
1585,212
1586,45
1587,236
1588,146
1589,156
1590,121
1591,463
1592,222
1593,182
1594,283
1595,245
1596,142
1597,187
1598,226
1599,417
1600,157
1601,206
1602,267
1603,96
1604,184
1605,291
1606,291
1607,88
1608,454
1609,307
1610,425
1611,254
1612,78
1613,283
1614,207
1615,152
1616,192
1617,129
1618,0
1619,40
1620,9
1621,221
1622,215
1623,372
1624,326
1625,0
1626,151
1627,99
1628,310
1629,289
1630,314
1631,189
1632,176
1633,157
1634,193
1635,183
1636,252
1637,318
1638,170
1639,208
1640,173
1641,432
1642,144
1643,213
1644,369
1645,279
1646,190
1647,99
1648,20
1649,229
1650,207
1651,263
1652,97
1653,500
1654,111
1655,13
1656,129
1657,68
1658,345
1659,31
1660,248
1661,246
1662,301
1663,92
1664,69
1665,134
1666,55
1667,0
1668,82
1669,180
1670,62
1671,136
1672,121
1673,108
1674,256
1675,23
1676,229
1677,274
1678,96
1679,235
1680,224
1681,16
1682,271
1683,79
1684,170
1685,131
1686,239
1687,152
1688,108
1689,36
1690,70
1691,222
1692,169
1693,119
1694,212
1695,74
1696,116
1697,54
1698,32
1699,228
1700,148
1701,198
1702,143
1703,225
1704,56
1705,241
1706,219
1707,30
1708,52
1709,211
1710,97
1711,154
1712,146
1713,134
1714,185
1715,20
1716,190
1717,243
1718,170
1719,128
1720,82
1721,17
1722,122
1723,174
1724,173
1725,148
1726,138
1727,157
1728,93
1729,210
1730,115
1731,232
1732,171
1733,164
1734,75
1735,172
1736,108
1737,4
1738,216
1739,175
1740,192
1741,45
1742,126
1743,143
1744,240
1745,136
1668,29
1669,358
1670,45
1671,189
1672,129
1673,89
1674,355
1675,0
1676,368
1677,450
1678,113
1679,414
1680,262
1681,0
1682,442
1683,102
1684,190
1685,136
1686,325
1687,185
1688,86
1689,22
1690,59
1691,265
1692,222
1693,136
1694,288
1695,69
1696,113
1697,43
1698,0
1699,295
1700,194
1701,247
1702,185
1703,265
1704,34
1705,278
1706,384
1707,0
1708,29
1709,269
1710,90
1711,175
1712,191
1713,137
1714,197
1715,0
1716,228
1717,263
1718,308
1719,169
1720,109
1721,0
1722,166
1723,223
1724,217
1725,191
1726,106
1727,293
1728,92
1729,246
1730,133
1731,403
1732,183
1733,209
1734,66
1735,307
1736,93
1737,0
1738,376
1739,217
1740,208
1741,40
1742,146
1743,155
1744,273
1745,140
1746,147
1747,182
1748,277
1749,171
1750,110
1751,128
1752,141
1753,44
1754,223
1755,207
1756,102
1757,234
1758,87
1759,111
1760,115
1747,220
1748,359
1749,208
1750,83
1751,131
1752,160
1753,23
1754,283
1755,269
1756,107
1757,258
1758,75
1759,127
1760,117
1761,0
1762,233
1763,192
1764,115
1765,272
1766,261
1767,36
1768,128
1769,243
1770,103
1771,175
1772,229
1773,70
1774,240
1775,4
1776,146
1777,125
1778,37
1779,2
1780,245
1781,201
1782,175
1783,163
1784,215
1785,102
1786,244
1787,116
1788,191
1789,220
1790,168
1791,203
1792,152
1793,168
1794,120
1795,221
1796,245
1797,138
1798,241
1799,230
1800,93
1801,126
1802,80
1803,122
1804,139
1805,107
1806,256
1807,155
1808,111
1809,240
1810,45
1811,232
1812,181
1813,26
1814,121
1815,43
1816,225
1817,209
1818,210
1819,149
1820,158
1821,253
1822,209
1823,58
1824,149
1825,106
1826,174
1827,260
1828,222
1829,282
1830,19
1831,78
1832,41
1833,165
1834,159
1835,125
1836,233
1837,142
1838,182
1839,177
1840,177
1841,238
1842,157
1843,256
1844,233
1845,77
1846,176
1847,37
1848,299
1762,306
1763,243
1764,106
1765,442
1766,436
1767,4
1768,165
1769,322
1770,84
1771,192
1772,259
1773,63
1774,395
1775,0
1776,210
1777,175
1778,24
1779,0
1780,326
1781,267
1782,198
1783,315
1784,298
1785,82
1786,404
1787,111
1788,256
1789,267
1790,203
1791,245
1792,150
1793,225
1794,133
1795,289
1796,323
1797,203
1798,319
1799,284
1800,109
1801,133
1802,52
1803,92
1804,161
1805,128
1806,321
1807,307
1808,103
1809,326
1810,33
1811,319
1812,192
1813,6
1814,134
1815,28
1816,349
1817,223
1818,273
1819,180
1820,171
1821,318
1822,260
1823,27
1824,184
1825,78
1826,323
1827,315
1828,249
1829,361
1830,0
1831,77
1832,22
1833,207
1834,205
1835,159
1836,310
1837,140
1838,190
1839,212
1840,219
1841,391
1842,175
1843,305
1844,267
1845,48
1846,196
1847,0
1848,479
1849,0
1850,107
1851,291
1852,187
1853,219
1854,213
1855,238
1856,133
1857,280
1858,111
1859,131
1860,248
1861,95
1862,243
1863,136
1864,88
1865,81
1866,222
1867,113
1868,33
1869,229
1870,258
1871,179
1872,252
1850,137
1851,340
1852,203
1853,267
1854,281
1855,415
1856,145
1857,461
1858,107
1859,125
1860,320
1861,109
1862,245
1863,177
1864,85
1865,86
1866,307
1867,83
1868,0
1869,268
1870,414
1871,209
1872,337
1873,143
1874,257
1875,128
1876,218
1877,194
1878,277
1879,223
1880,101
1881,240
1882,136
1883,88
1884,77
1874,429
1875,125
1876,239
1877,222
1878,348
1879,312
1880,82
1881,328
1882,110
1883,89
1884,42
1885,50
1886,76
1887,212
1888,168
1889,233
1890,196
1891,35
1892,139
1893,90
1894,130
1895,60
1886,56
1887,287
1888,178
1889,326
1890,258
1891,11
1892,141
1893,93
1894,156
1895,16
1896,85
1897,76
1898,146
1899,262
1900,213
1901,156
1902,160
1903,208
1904,188
1905,216
1906,237
1907,299
1908,177
1909,57
1910,228
1911,175
1912,171
1913,105
1914,163
1915,18
1916,120
1917,150
1918,70
1919,233
1920,136
1897,83
1898,194
1899,344
1900,299
1901,201
1902,209
1903,281
1904,219
1905,369
1906,269
1907,374
1908,203
1909,44
1910,306
1911,232
1912,174
1913,110
1914,202
1915,0
1916,89
1917,195
1918,48
1919,270
1920,167
1921,0
1922,263
1923,264
1924,26
1922,309
1923,324
1924,18
1925,93
1926,67
1927,237
1928,290
1929,222
1930,108
1931,287
1932,59
1933,64
1934,195
1935,193
1936,284
1937,183
1938,269
1939,71
1940,23
1941,215
1942,189
1943,174
1944,18
1945,5
1946,180
1947,212
1948,250
1949,104
1950,108
1951,9
1952,93
1953,132
1954,179
1955,203
1956,105
1957,119
1958,264
1959,72
1960,85
1961,242
1962,252
1963,282
1964,203
1965,201
1966,131
1967,179
1968,209
1969,24
1970,138
1971,12
1926,57
1927,405
1928,469
1929,243
1930,93
1931,374
1932,61
1933,63
1934,336
1935,265
1936,364
1937,343
1938,334
1939,68
1940,0
1941,243
1942,221
1943,179
1944,5
1945,0
1946,200
1947,263
1948,395
1949,119
1950,137
1951,0
1952,98
1953,145
1954,188
1955,244
1956,75
1957,142
1958,310
1959,80
1960,59
1961,268
1962,304
1963,486
1964,233
1965,243
1966,122
1967,335
1968,275
1969,0
1970,151
1971,0
1972,0
1973,149
1974,207
1975,159
1976,274
1977,270
1978,241
1979,272
1980,168
1981,34
1982,221
1973,180
1974,358
1975,223
1976,355
1977,340
1978,304
1979,363
1980,212
1981,25
1982,234
1983,0
1984,189
1985,224
1986,66
1987,118
1988,230
1989,112
1990,13
1991,114
1992,223
1993,296
1994,158
1995,46
1996,77
1997,201
1998,213
1999,83
2000,134
2001,126
2002,27
2003,50
2004,26
2005,148
2006,161
2007,116
2008,141
2009,138
2010,244
2011,69
2012,143
2013,196
2014,277
2015,142
2016,193
2017,104
2018,257
2019,45
1984,230
1985,240
1986,47
1987,116
1988,279
1989,123
1990,0
1991,96
1992,273
1993,453
1994,148
1995,37
1996,89
1997,224
1998,276
1999,68
2000,160
2001,125
2002,7
2003,42
2004,0
2005,158
2006,168
2007,144
2008,268
2009,148
2010,295
2011,62
2012,286
2013,245
2014,353
2015,136
2016,206
2017,97
2018,432
2019,22
2020,153
2021,201
2022,189
2023,222
2024,6
2025,221
2026,137
2027,18
2028,135
2029,241
2030,150
2031,245
2032,167
2033,235
2034,155
2035,181
2036,78
2037,163
2038,39
2039,181
2040,198
2041,42
2042,200
2043,259
2044,100
2045,84
2046,264
2021,238
2022,254
2023,316
2024,0
2025,298
2026,176
2027,0
2028,181
2029,393
2030,162
2031,305
2032,187
2033,271
2034,193
2035,208
2036,59
2037,191
2038,9
2039,175
2040,261
2041,28
2042,355
2043,284
2044,120
2045,63
2046,314
2047,0
2048,243
2049,281
2050,276
2051,69
2048,390
2049,432
2050,324
2051,50
2052,0
2053,195
2054,139
2055,55
2056,144
2057,52
2058,232
2059,106
2060,195
2061,48
2062,86
2063,165
2064,180
2065,189
2066,184
2067,79
2068,41
2069,145
2070,255
2071,41
2072,171
2073,41
2074,221
2075,142
2076,280
2077,166
2078,202
2079,180
2080,238
2081,169
2082,282
2083,157
2084,108
2085,268
2086,167
2087,156
2088,175
2089,65
2053,237
2054,180
2055,48
2056,195
2057,23
2058,366
2059,102
2060,228
2061,20
2062,67
2063,174
2064,228
2065,251
2066,216
2067,81
2068,12
2069,168
2070,306
2071,5
2072,318
2073,30
2074,293
2075,193
2076,340
2077,183
2078,244
2079,193
2080,295
2081,306
2082,439
2083,142
2084,105
2085,460
2086,176
2087,182
2088,199
2089,23
2090,78
2091,40
2092,40
2093,214
2094,287
2095,233
2096,23
2097,228
2098,107
2099,222
2100,4
2101,215
2091,22
2092,41
2093,375
2094,368
2095,301
2096,0
2097,390
2098,133
2099,397
2100,0
2101,302
2102,0
2103,142
2104,239
2105,182
2106,145
2107,103
2108,228
2109,178
2110,214
2111,89
2112,121
2113,38
2114,83
2115,284
2116,160
2117,67
2118,151
2119,80
2120,86
2121,110
2122,147
2123,83
2124,160
2125,135
2126,276
2127,101
2128,162
2129,126
2130,109
2131,196
2132,191
2133,76
2134,118
2135,224
2136,176
2137,188
2138,258
2139,180
2140,248
2141,99
2142,251
2143,271
2144,36
2145,131
2146,158
2147,154
2148,252
2149,103
2150,212
2151,36
2152,274
2153,238
2154,229
2155,193
2156,226
2157,117
2158,161
2159,118
2160,171
2161,200
2162,242
2163,53
2164,22
2165,226
2166,241
2167,245
2168,217
2169,179
2170,197
2171,210
2172,214
2173,38
2174,220
2175,106
2176,237
2177,261
2178,176
2179,217
2180,43
2181,224
2103,172
2104,258
2105,234
2106,147
2107,131
2108,417
2109,218
2110,248
2111,88
2112,149
2113,0
2114,87
2115,492
2116,185
2117,61
2118,286
2119,85
2120,58
2121,134
2122,185
2123,58
2124,183
2125,169
2126,374
2127,102
2128,194
2129,141
2130,120
2131,226
2132,235
2133,47
2134,129
2135,260
2136,209
2137,247
2138,441
2139,240
2140,275
2141,92
2142,334
2143,382
2144,1
2145,116
2146,162
2147,199
2148,289
2149,123
2150,226
2151,15
2152,482
2153,296
2154,278
2155,228
2156,279
2157,161
2158,186
2159,125
2160,213
2161,238
2162,423
2163,54
2164,0
2165,258
2166,271
2167,299
2168,386
2169,205
2170,285
2171,257
2172,387
2173,25
2174,266
2175,54
2176,342
2177,310
2178,212
2179,386
2180,31
2181,261
2182,0
2183,257
2184,211
2185,227
2186,102
2187,169
2188,228
2189,295
2190,112
2191,8
2192,245
2193,159
2194,97
2195,156
2196,224
2197,138
2198,278
2199,244
2200,155
2201,82
2202,86
2203,134
2204,265
2205,135
2206,48
2207,148
2208,31
2209,151
2210,122
2211,114
2212,194
2213,208
2214,193
2215,25
2216,135
2183,427
2184,233
2185,275
2186,87
2187,202
2188,245
2189,486
2190,130
2191,0
2192,311
2193,146
2194,68
2195,195
2196,292
2197,147
2198,433
2199,440
2200,194
2201,103
2202,95
2203,154
2204,316
2205,136
2206,16
2207,193
2208,0
2209,191
2210,143
2211,102
2212,228
2213,357
2214,223
2215,17
2216,175
2217,55
2218,116
2219,191
2220,286
2221,28
2222,216
2223,230
2224,56
2225,197
2226,63
2227,245
2228,112
2229,185
2230,114
2231,11
2232,234
2233,113
2234,57
2235,69
2236,233
2237,224
2238,186
2239,107
2240,133
2241,253
2242,242
2243,212
2244,267
2245,97
2246,229
2247,233
2248,208
2249,210
2250,20
2251,281
2252,177
2253,210
2254,189
2255,77
2256,232
2257,290
2258,39
2259,171
2260,241
2261,219
2262,211
2263,127
2264,186
2265,241
2266,39
2267,255
2268,132
2269,110
2270,95
2271,104
2272,89
2273,126
2274,253
2275,136
2276,207
2277,243
2278,162
2279,140
2280,201
2218,138
2219,253
2220,372
2221,0
2222,255
2223,253
2224,49
2225,239
2226,38
2227,283
2228,87
2229,230
2230,138
2231,0
2232,271
2233,109
2234,54
2235,48
2236,253
2237,388
2238,263
2239,227
2240,161
2241,454
2242,305
2243,279
2244,358
2245,87
2246,270
2247,292
2248,243
2249,378
2250,0
2251,465
2252,195
2253,252
2254,203
2255,59
2256,300
2257,459
2258,16
2259,236
2260,296
2261,224
2262,251
2263,135
2264,214
2265,328
2266,26
2267,435
2268,163
2269,234
2270,104
2271,122
2272,109
2273,105
2274,335
2275,190
2276,283
2277,301
2278,198
2279,135
2280,276
2281,0
2282,128
2283,169
2284,63
2285,117
2286,163
2287,118
2288,137
2289,229
2290,112
2291,118
2292,98
2293,51
2294,195
2295,62
2296,88
2297,99
2282,156
2283,223
2284,51
2285,138
2286,154
2287,144
2288,167
2289,277
2290,102
2291,250
2292,89
2293,34
2294,243
2295,47
2296,105
2297,79
2298,0
2299,260
2300,59
2301,206
2302,260
2303,275
2304,144
2305,301
2306,158
2307,154
2308,138
2309,99
2310,72
2311,61
2312,63
2313,207
2314,166
2315,3
2316,217
2317,200
2299,341
2300,76
2301,302
2302,310
2303,388
2304,200
2305,373
2306,316
2307,178
2308,136
2309,92
2310,78
2311,56
2312,52
2313,225
2314,202
2315,0
2316,281
2317,235
2318,0
2319,146
2320,188
2321,76
2322,7
2323,200
2324,131
2325,24
2326,239
2327,126
2328,62
2329,103
2330,150
2331,256
2332,120
2333,69
2334,157
2335,50
2336,105
2337,42
2338,297
2339,240
2319,153
2320,331
2321,79
2322,0
2323,254
2324,142
2325,9
2326,260
2327,156
2328,65
2329,135
2330,188
2331,339
2332,125
2333,36
2334,154
2335,24
2336,108
2337,25
2338,478
2339,400
2340,0
2341,223
2342,270
2343,171
2344,220
2341,323
2342,315
2343,322
2344,245
2345,0
2346,166
2347,228
2348,126
2349,173
2350,197
2351,172
2352,203
2353,180
2354,101
2346,213
2347,272
2348,133
2349,206
2350,210
2351,225
2352,237
2353,212
2354,92
2355,0
2356,280
2357,95
2358,76
2359,132
2356,452
2357,64
2358,48
2359,141
2360,0
2361,239
2362,183
2363,280
2364,262
2365,271
2361,304
2362,205
2363,443
2364,466
2365,328
2366,0
2367,137
2368,121
2369,124
2370,235
2371,168
2372,270
2373,181
2374,38
2375,187
2376,97
2377,212
2378,100
2379,241
2380,196
2381,28
2382,91
2383,252
2384,272
2385,106
2386,109
2387,260
2388,122
2389,257
2390,179
2391,260
2392,36
2393,28
2394,49
2395,201
2396,147
2397,81
2398,195
2399,254
2400,2
2367,175
2368,88
2369,139
2370,281
2371,229
2372,318
2373,220
2374,22
2375,228
2376,111
2377,264
2378,77
2379,267
2380,230
2381,12
2382,52
2383,302
2384,311
2385,222
2386,123
2387,307
2388,240
2389,317
2390,195
2391,331
2392,20
2393,6
2394,41
2395,285
2396,194
2397,88
2398,373
2399,268
2400,0
2401,0
2402,0
2403,145
2404,131
2405,108
2406,271
2407,75
2408,194
2409,168
2410,160
2411,275
2412,150
2413,219
2414,128
2415,50
2416,28
2417,246
2418,165
2419,141
2420,276
2421,140
2422,256
2423,113
2424,177
2425,183
2426,73
2427,132
2428,109
2429,21
2430,195
2431,287
2432,193
2433,122
2434,207
2435,265
2436,240
2437,126
2438,120
2439,104
2440,113
2441,241
2442,192
2443,174
2444,227
2445,190
2446,76
2447,195
2448,164
2449,169
2450,117
2403,160
2404,149
2405,91
2406,339
2407,74
2408,255
2409,198
2410,196
2411,370
2412,171
2413,256
2414,251
2415,42
2416,1
2417,411
2418,177
2419,184
2420,464
2421,190
2422,315
2423,165
2424,342
2425,244
2426,40
2427,135
2428,79
2429,0
2430,228
2431,483
2432,256
2433,144
2434,250
2435,314
2436,320
2437,143
2438,143
2439,137
2440,112
2441,437
2442,199
2443,181
2444,365
2445,320
2446,57
2447,200
2448,191
2449,199
2450,139
2451,74
2452,252
2453,215
2454,71
2455,233
2456,173
2457,95
2458,129
2459,262
2460,205
2461,201
2462,156
2463,320
2464,188
2465,71
2466,126
2467,223
2468,188
2469,260
2470,118
2471,160
2472,225
2473,85
2474,76
2475,99
2476,211
2477,134
2478,148
2479,107
2480,21
2481,203
2482,280
2452,337
2453,274
2454,72
2455,272
2456,231
2457,106
2458,138
2459,448
2460,266
2461,345
2462,163
2463,501
2464,319
2465,58
2466,140
2467,277
2468,256
2469,316
2470,154
2471,163
2472,285
2473,92
2474,47
2475,91
2476,239
2477,185
2478,156
2479,101
2480,0
2481,231
2482,471
2483,0
2484,223
2485,243
2486,181
2487,114
2488,249
2489,82
2490,188
2491,170
2492,241
2493,60
2494,135
2495,245
2496,193
2497,216
2498,103
2499,218
2500,150
2501,62
2502,176
2503,238
2504,240
2505,246
2506,160
2507,72
2508,214
2509,110
2510,176
2511,55
2512,168
2513,143
2514,173
2515,203
2484,319
2485,319
2486,223
2487,113
2488,301
2489,69
2490,217
2491,333
2492,338
2493,41
2494,140
2495,392
2496,222
2497,239
2498,223
2499,272
2500,152
2501,31
2502,332
2503,306
2504,288
2505,299
2506,192
2507,54
2508,261
2509,118
2510,206
2511,38
2512,218
2513,168
2514,243
2515,250
2516,51
2517,195
2518,298
2519,120
2520,133
2521,204
2522,238
2523,134
2524,123
2525,157
2526,88
2527,99
2528,167
2529,43
2530,119
2517,237
2518,487
2519,125
2520,180
2521,207
2522,306
2523,164
2524,128
2525,135
2526,63
2527,110
2528,307
2529,26
2530,244
2531,70
2532,186
2533,199
2534,72
2535,246
2536,179
2537,228
2538,206
2539,159
2532,249
2533,284
2534,40
2535,414
2536,220
2537,325
2538,273
2539,171
2540,0
2541,135
2542,31
2543,122
2544,133
2545,187
2546,262
2547,221
2548,236
2549,69
2550,286
2551,160
2552,156
2553,188
2554,249
2555,175
2556,149
2557,57
2558,172
2559,212
2560,79
2561,197
2562,172
2563,239
2564,160
2565,9
2566,194
2567,117
2568,94
2569,105
2570,63
2571,176
2541,153
2542,12
2543,152
2544,155
2545,215
2546,441
2547,251
2548,300
2549,37
2550,371
2551,186
2552,199
2553,209
2554,347
2555,201
2556,272
2557,33
2558,233
2559,260
2560,76
2561,244
2562,335
2563,433
2564,188
2565,0
2566,220
2567,130
2568,88
2569,102
2570,42
2571,324
2572,0
2573,158
2574,219
2575,181
2576,90
2577,212
2578,86
2579,124
2580,100
2581,184
2582,280
2583,250
2584,218
2585,140
2586,143
2587,205
2588,143
2589,242
2590,138
2591,91
2592,137
2593,167
2594,291
2595,188
2596,28
2597,194
2598,161
2599,232
2600,299
2601,254
2602,120
2603,91
2604,69
2605,183
2606,52
2607,122
2608,246
2609,88
2610,183
2611,222
2573,162
2574,282
2575,324
2576,103
2577,294
2578,104
2579,157
2580,97
2581,335
2582,353
2583,308
2584,235
2585,113
2586,175
2587,237
2588,280
2589,272
2590,148
2591,92
2592,180
2593,177
2594,447
2595,245
2596,0
2597,241
2598,292
2599,272
2600,495
2601,342
2602,126
2603,88
2604,62
2605,185
2606,20
2607,141
2608,305
2609,69
2610,234
2611,288
2612,0
2613,194
2614,134
2615,154
2616,114
2617,286
2618,2
2619,172
2620,46
2621,150
2622,29
2623,153
2624,46
2625,94
2626,124
2627,175
2628,224
2629,235
2630,110
2631,153
2632,151
2633,164
2634,222
2635,113
2636,96
2637,263
2638,90
2639,210
2640,204
2641,165
2642,193
2643,197
2644,87
2645,226
2646,89
2647,40
2648,231
2649,202
2650,105
2651,10
2652,175
2653,91
2654,144
2613,359
2614,170
2615,187
2616,93
2617,457
2618,0
2619,202
2620,28
2621,285
2622,4
2623,143
2624,42
2625,69
2626,133
2627,218
2628,290
2629,301
2630,95
2631,191
2632,206
2633,159
2634,284
2635,127
2636,102
2637,308
2638,81
2639,359
2640,231
2641,167
2642,342
2643,253
2644,83
2645,388
2646,75
2647,20
2648,397
2649,263
2650,76
2651,0
2652,189
2653,79
2654,158
2655,208
2656,60
2657,49
2658,93
2659,93
2660,219
2661,104
2662,129
2663,102
2664,244
2665,31
2666,291
2667,231
2668,86
2669,156
2670,146
2671,170
2672,129
2673,223
2674,95
2675,237
2676,174
2656,45
2657,34
2658,94
2659,97
2660,412
2661,99
2662,158
2663,62
2664,299
2665,0
2666,461
2667,385
2668,90
2669,174
2670,283
2671,219
2672,154
2673,298
2674,92
2675,379
2676,336
2677,85
2678,131
2679,53
2680,233
2681,221
2682,225
2683,301
2684,216
2685,279
2686,118
2687,110
2688,278
2689,92
2690,39
2691,225
2692,9
2693,64
2694,164
2695,207
2696,222
2697,164
2698,119
2699,156
2700,207
2701,92
2702,19
2703,150
2704,114
2705,196
2706,144
2707,169
2708,106
2709,103
2710,34
2711,274
2712,116
2713,192
2714,240
2715,163
2716,218
2717,110
2718,86
2719,181
2720,13
2721,9
2722,255
2723,219
2724,153
2725,82
2726,102
2727,221
2728,204
2729,140
2730,243
2678,145
2679,37
2680,289
2681,310
2682,288
2683,522
2684,268
2685,387
2686,149
2687,116
2688,362
2689,64
2690,7
2691,412
2692,0
2693,80
2694,209
2695,364
2696,373
2697,182
2698,154
2699,168
2700,229
2701,67
2702,0
2703,168
2704,109
2705,366
2706,198
2707,312
2708,125
2709,65
2710,0
2711,452
2712,114
2713,219
2714,270
2715,293
2716,247
2717,99
2718,77
2719,325
2720,0
2721,0
2722,461
2723,271
2724,302
2725,102
2726,114
2727,258
2728,231
2729,184
2730,285
2731,99
2732,157
2733,64
2734,100
2735,127
2736,153
2737,247
2738,91
2739,70
2740,97
2741,38
2742,195
2743,122
2744,209
2745,113
2746,188
2747,158
2748,248
2749,170
2750,93
2751,106
2752,196
2753,167
2754,164
2755,294
2756,181
2757,184
2758,98
2759,163
2760,27
2761,130
2762,175
2763,102
2732,190
2733,32
2734,76
2735,135
2736,212
2737,316
2738,79
2739,53
2740,92
2741,15
2742,256
2743,150
2744,232
2745,101
2746,334
2747,209
2748,304
2749,155
2750,84
2751,125
2752,193
2753,168
2754,207
2755,485
2756,365
2757,217
2758,105
2759,184
2760,0
2761,243
2762,254
2763,125
2764,0
2765,198
2766,156
2767,78
2765,196
2766,177
2767,81
2768,232
2769,170
2770,238
2769,292
2770,363
2771,0
2772,121
2773,88
2774,159
2775,12
2776,170
2777,241
2778,238
2779,270
2780,143
2781,220
2782,80
2783,193
2784,113
2785,219
2786,155
2787,267
2788,135
2789,254
2790,203
2791,78
2792,204
2793,88
2794,52
2795,286
2796,235
2797,194
2798,79
2799,216
2800,153
2801,103
2802,251
2803,197
2804,247
2805,218
2806,72
2807,225
2808,197
2809,39
2810,67
2811,185
2812,114
2772,96
2773,101
2774,207
2775,0
2776,184
2777,338
2778,284
2779,466
2780,139
2781,257
2782,70
2783,241
2784,128
2785,280
2786,204
2787,455
2788,146
2789,333
2790,369
2791,63
2792,214
2793,77
2794,45
2795,375
2796,300
2797,257
2798,55
2799,266
2800,301
2801,80
2802,279
2803,276
2804,404
2805,252
2806,53
2807,311
2808,192
2809,22
2810,44
2811,209
2812,113
2813,0
2814,0
2815,216
2816,21
2817,100
2818,173
2819,163
2820,290
2821,247
2822,235
2823,95
2824,135
2825,139
2826,141
2827,19
2828,139
2829,204
2830,257
2831,176
2832,275
2833,101
2834,79
2835,198
2836,95
2837,188
2838,205
2839,75
2840,189
2841,148
2842,22
2843,208
2844,163
2845,27
2846,145
2847,120
2848,236
2849,150
2850,164
2851,119
2852,53
2815,346
2816,0
2817,84
2818,180
2819,313
2820,476
2821,303
2822,325
2823,119
2824,261
2825,138
2826,165
2827,0
2828,182
2829,243
2830,417
2831,314
2832,362
2833,121
2834,64
2835,352
2836,92
2837,222
2838,268
2839,66
2840,233
2841,184
2842,0
2843,233
2844,180
2845,0
2846,181
2847,135
2848,288
2849,148
2850,191
2851,160
2852,39
2853,0
2854,74
2855,134
2856,127
2857,123
2858,39
2859,17
2860,175
2861,109
2862,172
2863,4
2864,10
2865,217
2866,183
2867,66
2868,168
2869,194
2870,168
2871,167
2872,115
2873,195
2874,279
2875,108
2876,41
2877,130
2878,170
2879,153
2880,53
2881,165
2882,193
2883,99
2884,214
2885,226
2886,19
2887,162
2888,230
2889,110
2854,82
2855,283
2856,153
2857,126
2858,19
2859,0
2860,257
2861,128
2862,217
2863,0
2864,0
2865,252
2866,236
2867,47
2868,323
2869,275
2870,213
2871,213
2872,243
2873,225
2874,471
2875,96
2876,28
2877,172
2878,243
2879,175
2880,46
2881,186
2882,221
2883,62
2884,224
2885,266
2886,0
2887,209
2888,378
2889,140
2890,0
2891,245
2892,131
2893,197
2894,125
2895,226
2896,220
2897,158
2898,188
2899,236
2900,159
2901,175
2902,239
2903,108
2904,177
2905,60
2906,165
2907,86
2908,78
2909,143
2910,159
2911,221
2912,169
2913,12
2914,209
2915,154
2916,253
2917,85
2918,186
2919,89
2920,226
2921,65
2922,79
2923,136
2924,132
2925,42
2926,131
2927,245
2928,155
2929,173
2930,185
2931,100
2932,239
2933,114
2934,104
2935,140
2936,87
2937,301
2938,233
2939,141
2940,122
2941,74
2942,223
2943,131
2944,238
2945,171
2946,186
2947,105
2948,158
2949,186
2950,53
2951,106
2952,94
2953,138
2954,122
2891,298
2892,157
2893,225
2894,147
2895,282
2896,390
2897,177
2898,228
2899,267
2900,312
2901,235
2902,435
2903,109
2904,265
2905,39
2906,177
2907,75
2908,65
2909,162
2910,164
2911,281
2912,205
2913,0
2914,235
2915,150
2916,292
2917,81
2918,237
2919,60
2920,295
2921,74
2922,86
2923,166
2924,152
2925,31
2926,161
2927,283
2928,209
2929,208
2930,198
2931,119
2932,331
2933,103
2934,118
2935,134
2936,71
2937,458
2938,312
2939,155
2940,127
2941,76
2942,243
2943,165
2944,262
2945,170
2946,212
2947,98
2948,309
2949,261
2950,15
2951,120
2952,81
2953,113
2954,126
2955,0
2956,209
2956,371
2957,72
2958,56
2959,175
2960,132
2961,163
2962,96
2963,30
2964,82
2965,238
2966,205
2967,163
2968,120
2969,118
2970,141
2971,43
2972,172
2973,57
2958,53
2959,221
2960,166
2961,197
2962,97
2963,0
2964,76
2965,314
2966,357
2967,311
2968,78
2969,144
2970,103
2971,18
2972,207
2973,45
2974,114
2975,279
2976,129
2977,232
2978,54
2979,171
2980,193
2981,188
2982,257
2983,294
2984,190
2985,145
2975,459
2976,142
2977,418
2978,52
2979,190
2980,257
2981,263
2982,303
2983,480
2984,257
2985,261
2986,0
2987,171
2988,80
2989,210
2990,63
2991,51
2992,166
2993,74
2994,246
2995,274
2996,134
2997,247
2998,173
2999,110
3000,62
3001,209
3002,84
3003,199
3004,177
3005,120
3006,218
2987,308
2988,63
2989,255
2990,60
2991,26
2992,221
2993,71
2994,306
2995,354
2996,173
2997,297
2998,209
2999,103
3000,33
3001,246
3002,65
3003,206
3004,195
3005,154
3006,246
3007,0
3008,200
3009,221
3010,210
3011,73
3012,112
3008,245
3009,270
3010,260
3011,72
3012,73
3013,0
3014,86
3015,31
3016,124
3017,111
3018,66
3019,167
3020,209
3021,54
3022,201
3023,154
3024,312
3025,181
3026,215
3027,118
3028,221
3029,161
3030,237
3031,187
3032,110
3033,181
3034,225
3035,211
3036,172
3037,129
3038,31
3039,141
3040,140
3041,230
3042,15
3043,166
3044,175
3045,108
3046,156
3047,167
3048,146
3049,99
3050,117
3051,208
3052,132
3053,107
3054,186
3055,171
3056,194
3057,247
3058,216
3059,172
3060,118
3061,243
3062,145
3063,150
3064,114
3065,118
3066,36
3067,49
3068,126
3069,229
3070,213
3071,102
3072,187
3073,102
3074,121
3075,141
3076,118
3077,74
3078,132
3079,133
3080,103
3081,232
3082,124
3083,241
3084,221
3085,238
3086,185
3087,186
3088,112
3089,52
3090,169
3091,182
3014,93
3015,0
3016,139
3017,117
3018,58
3019,212
3020,258
3021,14
3022,219
3023,166
3024,492
3025,237
3026,239
3027,110
3028,260
3029,189
3030,297
3031,218
3032,69
3033,203
3034,311
3035,377
3036,208
3037,134
3038,1
3039,179
3040,142
3041,295
3042,0
3043,197
3044,180
3045,119
3046,148
3047,221
3048,160
3049,79
3050,124
3051,262
3052,129
3053,138
3054,208
3055,231
3056,320
3057,271
3058,227
3059,190
3060,97
3061,270
3062,188
3063,144
3064,127
3065,120
3066,0
3067,25
3068,109
3069,308
3070,299
3071,94
3072,227
3073,106
3074,148
3075,189
3076,90
3077,65
3078,131
3079,174
3080,98
3081,244
3082,122
3083,301
3084,370
3085,308
3086,251
3087,225
3088,117
3089,23
3090,189
3091,330
3092,0
3093,135
3094,106
3095,96
3096,115
3097,262
3098,128
3099,89
3100,256
3101,118
3102,293
3103,306
3104,257
3105,154
3106,166
3107,43
3108,44
3109,79
3110,209
3111,144
3112,177
3113,107
3114,23
3115,202
3116,153
3117,131
3118,97
3119,199
3093,179
3094,127
3095,104
3096,91
3097,332
3098,165
3099,68
3100,325
3101,79
3102,460
3103,490
3104,316
3105,202
3106,206
3107,18
3108,12
3109,58
3110,290
3111,154
3112,220
3113,117
3114,0
3115,352
3116,171
3117,263
3118,137
3119,368
3120,90
3121,136
3122,223
3123,231
3124,241
3125,226
3126,107
3127,143
3128,155
3129,275
3130,234
3131,138
3132,154
3133,258
3134,204
3135,44
3136,144
3137,216
3138,113
3139,89
3140,207
3141,220
3142,124
3143,133
3144,259
3145,116
3121,140
3122,242
3123,290
3124,296
3125,283
3126,133
3127,137
3128,167
3129,461
3130,274
3131,190
3132,156
3133,329
3134,227
3135,35
3136,294
3137,351
3138,91
3139,84
3140,232
3141,389
3142,112
3143,132
3144,322
3145,127
3146,0
3147,106
3148,144
3149,183
3150,80
3151,225
3152,79
3153,226
3154,193
3147,112
3148,288
3149,206
3150,78
3151,276
3152,63
3153,296
3154,268
3155,0
3156,143
3157,100
3156,140
3157,97
3158,0
3159,198
3160,141
3161,182
3162,217
3163,52
3164,229
3165,155
3166,91
3167,224
3168,148
3169,99
3170,249
3171,279
3172,109
3173,107
3174,113
3175,168
3176,66
3177,153
3178,192
3179,206
3180,179
3181,59
3182,101
3183,163
3184,171
3185,98
3186,112
3187,127
3188,191
3189,237
3190,92
3191,188
3192,206
3193,106
3194,97
3195,191
3196,149
3197,140
3198,94
3199,135
3200,87
3201,72
3202,139
3203,89
3204,150
3205,20
3206,210
3207,81
3208,226
3209,85
3210,282
3211,133
3212,211
3213,8
3214,211
3215,198
3216,184
3217,246
3218,134
3219,63
3220,153
3221,88
3222,141
3223,249
3224,136
3225,24
3226,178
3227,68
3228,221
3229,227
3230,161
3231,197
3232,241
3233,271
3234,182
3235,232
3236,145
3237,149
3238,106
3239,18
3240,138
3241,122
3242,107
3243,42
3244,85
3245,97
3246,187
3247,196
3248,140
3249,21
3159,237
3160,194
3161,202
3162,403
3163,36
3164,256
3165,169
3166,89
3167,383
3168,138
3169,80
3170,268
3171,349
3172,136
3173,132
3174,103
3175,193
3176,68
3177,194
3178,219
3179,264
3180,230
3181,50
3182,124
3183,198
3184,233
3185,115
3186,104
3187,159
3188,202
3189,300
3190,81
3191,255
3192,299
3193,80
3194,92
3195,225
3196,181
3197,163
3198,105
3199,148
3200,99
3201,88
3202,192
3203,73
3204,201
3205,0
3206,278
3207,64
3208,304
3209,65
3210,455
3211,132
3212,261
3213,0
3214,261
3215,259
3216,231
3217,300
3218,139
3219,32
3220,195
3221,63
3222,179
3223,316
3224,182
3225,13
3226,229
3227,32
3228,274
3229,257
3230,202
3231,195
3232,326
3233,442
3234,184
3235,265
3236,185
3237,167
3238,73
3239,0
3240,102
3241,169
3242,84
3243,26
3244,66
3245,94
3246,202
3247,234
3248,181
3249,0
3250,247
3251,89
3252,118
3253,128
3254,182
3255,1
3256,199
3257,249
3258,152
3259,178
3260,273
3261,136
3262,87
3251,92
3252,115
3253,140
3254,245
3255,0
3256,242
3257,403
3258,168
3259,222
3260,370
3261,151
3262,75
3263,0
3264,0
3265,224
3266,116
3267,251
3268,142
3269,84
3270,112
3271,135
3272,185
3273,266
3274,217
3275,170
3276,184
3277,119
3265,383
3266,127
3267,311
3268,161
3269,71
3270,102
3271,142
3272,256
3273,456
3274,387
3275,219
3276,215
3277,78
3278,0
3279,250
3280,196
3281,188
3282,241
3283,142
3284,35
3285,219
3279,266
3280,270
3281,230
3282,307
3283,153
3284,0
3285,244
3286,0
3287,136
3288,224
3289,119
3290,243
3291,191
3292,192
3293,210
3294,123
3295,213
3296,119
3297,77
3298,156
3299,239
3300,173
3301,51
3302,305
3303,29
3304,43
3305,84
3306,34
3307,257
3308,88
3309,226
3310,158
3311,90
3312,88
3313,146
3314,154
3315,212
3316,56
3317,231
3318,252
3319,211
3320,102
3321,212
3322,257
3287,126
3288,317
3289,123
3290,291
3291,244
3292,214
3293,392
3294,159
3295,253
3296,127
3297,74
3298,195
3299,277
3300,213
3301,58
3302,486
3303,0
3304,38
3305,79
3306,7
3307,315
3308,74
3309,256
3310,174
3311,108
3312,125
3313,144
3314,204
3315,287
3316,51
3317,283
3318,283
3319,231
3320,215
3321,261
3322,440
3323,87
3324,82
3325,51
3326,163
3327,117
3328,93
3329,234
3330,142
3331,230
3332,75
3333,112
3324,71
3325,33
3326,212
3327,141
3328,96
3329,280
3330,164
3331,237
3332,59
3333,161
3334,0
3335,93
3336,105
3337,225
3338,198
3339,78
3340,170
3341,186
3342,199
3343,206
3344,106
3345,124
3346,222
3347,88
3348,121
3349,87
3350,179
3351,145
3352,259
3353,59
3354,230
3355,43
3356,99
3357,100
3358,100
3359,88
3360,4
3361,147
3362,96
3363,278
3364,196
3365,159
3366,103
3367,267
3368,229
3369,58
3370,150
3371,113
3372,66
3373,206
3374,47
3375,78
3376,144
3377,198
3335,102
3336,143
3337,378
3338,208
3339,72
3340,216
3341,258
3342,246
3343,252
3344,135
3345,139
3346,321
3347,87
3348,232
3349,84
3350,245
3351,150
3352,309
3353,37
3354,309
3355,35
3356,112
3357,142
3358,78
3359,89
3360,0
3361,227
3362,121
3363,437
3364,279
3365,190
3366,64
3367,317
3368,250
3369,59
3370,167
3371,94
3372,51
3373,355
3374,31
3375,81
3376,177
3377,229
3378,98
3379,75
3380,147
3381,194
3382,206
3379,64
3380,178
3381,262
3382,216
3383,0
3384,220
3385,266
3386,245
3387,206
3388,218
3389,113
3390,254
3391,143
3392,116
3393,169
3394,6
3395,165
3396,184
3397,228
3398,162
3399,83
3400,103
3401,230
3402,112
3403,170
3404,153
3405,202
3406,114
3407,122
3408,104
3409,287
3410,213
3411,231
3412,249
3413,195
3414,154
3415,57
3416,221
3417,160
3418,154
3419,178
3420,171
3421,247
3422,103
3423,293
3424,180
3425,97
3426,130
3427,128
3428,156
3429,232
3430,156
3431,104
3432,69
3433,55
3434,189
3384,409
3385,344
3386,290
3387,247
3388,308
3389,132
3390,309
3391,136
3392,168
3393,204
3394,0
3395,178
3396,215
3397,269
3398,195
3399,93
3400,69
3401,267
3402,126
3403,239
3404,191
3405,284
3406,128
3407,121
3408,86
3409,357
3410,259
3411,312
3412,336
3413,348
3414,183
3415,49
3416,272
3417,163
3418,178
3419,180
3420,317
3421,284
3422,120
3423,471
3424,255
3425,110
3426,131
3427,163
3428,176
3429,297
3430,193
3431,86
3432,50
3433,51
3434,245
3435,0
3436,261
3437,236
3438,36
3439,92
3440,174
3441,279
3442,101
3443,32
3444,122
3445,100
3446,263
3447,78
3448,202
3449,218
3450,219
3451,229
3452,100
3453,69
3454,178
3455,130
3456,220
3457,97
3458,130
3459,136
3460,152
3461,192
3462,101
3463,212
3464,188
3465,154
3466,124
3467,103
3468,282
3469,111
3470,98
3471,196
3472,43
3473,79
3474,77
3475,174
3476,23
3477,173
3436,355
3437,260
3438,13
3439,127
3440,224
3441,422
3442,118
3443,0
3444,150
3445,123
3446,453
3447,79
3448,268
3449,266
3450,406
3451,280
3452,103
3453,59
3454,236
3455,128
3456,265
3457,122
3458,117
3459,148
3460,148
3461,261
3462,105
3463,275
3464,345
3465,169
3466,142
3467,98
3468,327
3469,95
3470,85
3471,214
3472,2
3473,81
3474,64
3475,199
3476,4
3477,241
3478,0
3479,210
3480,235
3481,16
3482,226
3483,99
3484,273
3485,13
3486,30
3487,116
3488,266
3489,142
3490,177
3491,131
3492,176
3493,216
3494,250
3495,63
3496,59
3497,274
3498,68
3499,183
3500,211
3501,159
3502,266
3503,100
3504,117
3505,125
3506,185
3507,131
3508,104
3509,135
3510,173
3511,257
3512,189
3513,92
3479,202
3480,276
3481,0
3482,302
3483,112
3484,472
3485,0
3486,0
3487,119
3488,351
3489,167
3490,202
3491,114
3492,215
3493,294
3494,333
3495,70
3496,46
3497,349
3498,39
3499,215
3500,212
3501,156
3502,352
3503,94
3504,141
3505,165
3506,339
3507,129
3508,115
3509,177
3510,205
3511,309
3512,348
3513,84
import csv
import pickle
import numpy as np
from main import *
import nn
pathToTestCase = "testcases/testcase_01.pkl"
load_test_case = pickle.load(open(pathToTestCase, 'rb'))
task_detail = {
1: [4, 'Forward Pass'],
2: [6, 'Forward + Backward Pass'],
3: [2, 'Update weights'],
4: [1, 'Check Relu'],
5: [1, 'Check Relu Gradient'],
6: [3, 'Check Softmax'],
7: [7, 'Check Softmax Gradient'],
8: [3, 'Check Cross Entropy Loss'],
9: [3, 'Check Cross Entropy Loss Gradient']
}
def check_forward(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['forward_input'])
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(2,1,'relu'))
nn1.addLayer(nn.FullyConnectedLayer(1,2,'softmax'))
output_X = input_X
ind = 0
weights = load_test_case['forward_weights']
biases = load_test_case['forward_biases']
layers = nn1.layers
for l in layers:
l.weights = weights[ind]
l.biases = biases[ind]
ind+=1
for l in nn1.layers:
output_X = l.forwardpass(output_X)
studentAnswer = output_X
teacherAnswer = load_test_case['forward_output']
teacherAnswer = np.round(teacherAnswer, 5)
studentAnswer = np.round(studentAnswer, 5)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_backward(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['backward_input'])
input_delta = np.asarray(load_test_case['backward_input_delta'])
nn1 = nn.NeuralNetwork(0.0, .1, 1)
nn1.addLayer(nn.FullyConnectedLayer(2,5,'relu'))
nn1.addLayer(nn.FullyConnectedLayer(5,2,'softmax'))
ind = 0
weights = load_test_case['backward_weights']
biases = load_test_case['backward_biases']
layers = nn1.layers
for l in layers:
l.weights = weights[ind]
l.biases = biases[ind]
ind+=1
activations = [input_X]
for l in layers:
activations.append(l.forwardpass(activations[-1]))
# activations = load_test_case['backward_input_activations']
weightsGrad = list()
biasesGrad = list()
delta = input_delta
for i in range(len(layers)-1, -1, -1):
delta = layers[i].backwardpass(activations[i], delta)
weightsGrad.append(layers[i].weightsGrad)
biasesGrad.append(layers[i].biasesGrad)
studentAnswerdelta = delta
studentAnswerweightsGrad = weightsGrad
studentAnswerbiasesGrad = biasesGrad
teacherAnswerdelta = load_test_case['backward_output']
teacherAnswerweightsGrad = load_test_case['backward_weightsGrad']
teacherAnswerbiasesGrad = load_test_case['backward_biasesGrad']
teacherAnswerdelta = np.asarray(teacherAnswerdelta)
teacherAnswerweightsGrad = np.asarray(teacherAnswerweightsGrad)
teacherAnswerbiasesGrad = np.asarray(teacherAnswerbiasesGrad)
studentAnswerdelta = np.round(studentAnswerdelta, 6)
teacherAnswerdelta = np.round(teacherAnswerdelta, 6)
studentAnswerweightsGrad = [np.round(x, 6) for x in studentAnswerweightsGrad]
teacherAnswerweightsGrad = [np.round(x, 6) for x in teacherAnswerweightsGrad]
studentAnswerbiasesGrad = [np.round(x, 6) for x in studentAnswerbiasesGrad]
teacherAnswerbiasesGrad = [np.round(x, 6) for x in teacherAnswerbiasesGrad]
print('Student Answer deltas', studentAnswerdelta)
print('Correct Answer deltas', teacherAnswerdelta)
print('Student Answer weights Gradient', studentAnswerweightsGrad)
print('Correct Answer weights Gradient', teacherAnswerweightsGrad)
print('Student Answer biases Gradient', studentAnswerbiasesGrad)
print('Correct Answer biases Gradient', teacherAnswerbiasesGrad)
print('Correct', np.array_equal(studentAnswerdelta, teacherAnswerdelta) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerweightsGrad, teacherAnswerweightsGrad)]) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerbiasesGrad, teacherAnswerbiasesGrad)]))
return (np.array_equal(studentAnswerdelta, teacherAnswerdelta) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerweightsGrad, teacherAnswerweightsGrad)]) and
np.all([np.array_equal(x, y) for x, y in zip(studentAnswerbiasesGrad, teacherAnswerbiasesGrad)]))
def check_updateweights(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(2,1,'relu'))
weights = load_test_case['updateweights_weights']
biases = load_test_case['updateweights_biases']
weightsGrad = load_test_case['updateweights_weightsGrad']
biasesGrad = load_test_case['updateweights_biasesGrad']
layer = nn1.layers[0]
layer.weights = weights
layer.biases = biases
layer.weightsGrad = weightsGrad
layer.biasesGrad = biasesGrad
layer.updateWeights(0.01)
studentAnswer = [layer.weights, layer.biases]
teacherAnswer = load_test_case['updateweights_output']
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
teacherAnswer_weight = np.round(teacherAnswer[0], 6)
studentAnswer_weight = np.round(studentAnswer[0], 6)
teacherAnswer_bias = np.round(teacherAnswer[1], 6)
studentAnswer_bias = np.round(studentAnswer[1], 6)
print('Correct', np.array_equal(studentAnswer[0], teacherAnswer[0]) and np.array_equal(studentAnswer[1], teacherAnswer[1]))
return np.array_equal(studentAnswer[0], teacherAnswer[0]) and np.array_equal(studentAnswer[1], teacherAnswer[1])
def check_relu(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['relu_input']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'relu'))
output_X = input_X
output_X = nn1.layers[0].relu_of_X(output_X)
studentAnswer = output_X
teacherAnswer = load_test_case['relu_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_gardient_relu(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['gardient_relu_input']).reshape(1,4)
input_delta = np.asarray(load_test_case['gardient_relu_input_delta']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'relu'))
output_X = input_X
output_X = nn1.layers[0].gradient_relu_of_X(output_X, input_delta)
studentAnswer = output_X
teacherAnswer = load_test_case['gardient_relu_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_softmax(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['softmax_input']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'softmax'))
output_X = input_X
output_X = nn1.layers[0].softmax_of_X(output_X)
studentAnswer = output_X
teacherAnswer = load_test_case['softmax_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_gardient_softmax(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_X = np.asarray(load_test_case['gardient_softmax_input']).reshape(1,4)
input_delta = np.asarray(load_test_case['gardient_softmax_input_delta']).reshape(1,4)
nn1 = nn.NeuralNetwork(0.0, 1, 1)
nn1.addLayer(nn.FullyConnectedLayer(4,4,'softmax'))
output_X = input_X
output_X = nn1.layers[0].gradient_softmax_of_X(output_X, input_delta)
studentAnswer = output_X
teacherAnswer = load_test_case['gardient_softmax_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_crossEntropyLoss(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_Y = np.asarray(load_test_case['crossEntropyLoss_input_Y']).reshape(2, 10)
input_Y_pred = np.asarray(load_test_case['crossEntropyLoss_input_Y_pred']).reshape(2, 10)
nn1 = nn.NeuralNetwork(0.0, 4, 1)
output_Y = nn1.crossEntropyLoss(input_Y, input_Y_pred)
studentAnswer = output_Y
teacherAnswer = load_test_case['crossEntropyLoss_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
def check_crossEntropyDelta(task_number):
print('='*20 + ' TASK '+str(task_number)+' '+str(task_detail[task_number][1])+' '+ '='*20)
input_Y = np.asarray(load_test_case['crossEntropyDelta_input_Y']).reshape(2, 10)
input_Y_pred = np.asarray(load_test_case['crossEntropyDelta_input_Y_pred']).reshape(2, 10)
nn1 = nn.NeuralNetwork(0.0, 4, 1)
output_Y = nn1.crossEntropyDelta(input_Y, input_Y_pred)
studentAnswer = output_Y
teacherAnswer = load_test_case['crossEntropyDelta_output']
teacherAnswer = np.round(teacherAnswer, 6)
studentAnswer = np.round(studentAnswer, 6)
print('Student Answer', studentAnswer)
print('Correct Answer', teacherAnswer)
print('Correct', np.array_equal(studentAnswer, teacherAnswer))
return np.array_equal(studentAnswer, teacherAnswer)
if __name__ == "__main__":
np.random.seed(42)
print()
correct_status = False
total_marks = 0
try:
correct_status = check_forward(1)
total_marks+=(correct_status*task_detail[1][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 1)
print("Correct False")
try:
correct_status = check_backward(2)
total_marks+=(correct_status*task_detail[2][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 2)
print("Correct False")
try:
correct_status = check_updateweights(3)
total_marks+=(correct_status*task_detail[3][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 3)
print("Correct False")
try:
correct_status = check_relu(4)
total_marks+=(correct_status*task_detail[4][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 4)
print("Correct False")
try:
correct_status = check_gardient_relu(5)
total_marks+=(correct_status*task_detail[5][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 5)
print("Correct False")
try:
correct_status = check_softmax(6)
total_marks+=(correct_status*task_detail[6][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 6)
print("Correct False")
try:
correct_status = check_gardient_softmax(7)
total_marks+=(correct_status*task_detail[7][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 7)
print("Correct False")
try:
correct_status = check_crossEntropyLoss(8)
total_marks+=(correct_status*task_detail[8][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 8)
print("Correct False")
try:
correct_status = check_crossEntropyDelta(9)
total_marks+=(correct_status*task_detail[9][0])
except Exception as e:
print("Error "+str(e)+" occured in task ", 9)
print("Correct False")
print('='*20 + ' TASK Finish ' + '='*20)
full_marks = 0
for x in range(len(task_detail)):
full_marks += task_detail[x+1][0]
print(' You got', total_marks, 'Marks Out of', full_marks, 'for', pathToTestCase.split('/')[1].split('.')[0])
print('='*53)
print()
import numpy as np
import nn
import csv
import pickle
def taskXor():
XTrain, YTrain, XVal, YVal, XTest, YTest = loadXor()
# Create a NeuralNetwork object 'nn1' as follows with optimal parameters. For parameter definition, refer to nn.py file.
# nn1 = nn.NeuralNetwork(lr, batchSize, epochs)
# Add layers to neural network corresponding to inputs and outputs of given data
# Eg. nn1.addLayer(FullyConnectedLayer(x,y))
###############################################
# TASK 3a (Marks 7) - YOUR CODE HERE
raise NotImplementedError
###############################################
nn1.train(XTrain, YTrain, XVal, YVal)
pred, acc = nn1.validate(XTest, YTest)
with open("predictionsXor.csv", 'w') as file:
writer = csv.writer(file)
writer.writerow(["id", "prediction"])
for i, p in enumerate(pred):
writer.writerow([i, p])
print('Test Accuracy',acc)
return nn1
def preprocessMnist(X):
# Perform any data preprocessing that you wish to do here
# Input: A 2-d numpy array containing an entire train, val or test split | Shape: n x 28*28
# Output: A 2-d numpy array of the same shape as the input (If the size is changed, you will get downstream errors)
###############################################
# TASK 3c (Marks 0) - YOUR CODE HERE
raise NotImplementedError
###############################################
def taskMnist():
XTrain, YTrain, XVal, YVal, XTest, _ = loadMnist()
# Create a NeuralNetwork object 'nn1' as follows with optimal parameters. For parameter definition, refer to nn.py file.
# nn1 = nn.NeuralNetwork(lr, batchSize, epochs)
# Add layers to neural network corresponding to inputs and outputs of given data
# Eg. nn1.addLayer(FullyConnectedLayer(x,y))
###############################################
# TASK 3b (Marks 13) - YOUR CODE HERE
raise NotImplementedError
###############################################
nn1.train(XTrain, YTrain, XVal, YVal)
pred, _ = nn1.validate(XTest, None)
with open("predictionsMnist.csv", 'w') as file:
writer = csv.writer(file)
writer.writerow(["id", "prediction"])
for i, p in enumerate(pred):
writer.writerow([i, p])
return nn1
################################# UTILITY FUNCTIONS ############################################
def oneHotEncodeY(Y, nb_classes):
# Calculates one-hot encoding for a given list of labels
# Input :- Y : An integer or a list of labels
# Output :- Coreesponding one hot encoded vector or the list of one-hot encoded vectors
return (np.eye(nb_classes)[Y]).astype(int)
def loadXor():
# This is a toy dataset with 10k points and 2 labels.
# The output can represented as the XOR of the input as described in the problem statement
# There are 7k training points, 1k validation points and 2k test points
train = pickle.load(open("data/xor/train.pkl", 'rb'))
test = pickle.load(open("data/xor/test.pkl", 'rb'))
testX, testY = np.array(test[0]), np.array(oneHotEncodeY(test[1],2))
trainX, trainY = np.array(train[0][:7000]), np.array(oneHotEncodeY(train[1][:7000],2))
valX, valY = np.array(train[0][7000:]), np.array(oneHotEncodeY(train[1][7000:],2))
return trainX, trainY, valX, valY, testX, testY
def loadMnist():
# MNIST dataset has 50k train, 10k val, 10k test
# The test labels have not been provided for this task
train = pickle.load(open("data/mnist/train.pkl", 'rb'))
test = pickle.load(open("data/mnist/test.pkl", 'rb'))
testX = preprocessMnist(np.array(test[0]))
testY = None # For MNIST the test labels have not been provided
trainX, trainY = preprocessMnist(np.array(train[0][:50000])), np.array(oneHotEncodeY(train[1][:50000],10))
valX, valY = preprocessMnist(np.array(train[0][50000:])), np.array(oneHotEncodeY(train[1][50000:],10))
return trainX, trainY, valX, valY, testX, testY
#################################################################################################
if __name__ == "__main__":
np.random.seed(7)
taskXor()
taskMnist()
import numpy as np
class NeuralNetwork:
def __init__(self, lr, batchSize, epochs):
# Method to initialize a Neural Network Object
# Parameters
# lr - learning rate
# batchSize - Mini batch size
# epochs - Number of epochs for training
self.lr = lr
self.batchSize = batchSize
self.epochs = epochs
self.layers = []
def addLayer(self, layer):
# Method to add layers to the Neural Network
self.layers.append(layer)
def train(self, trainX, trainY, validX=None, validY=None):
# Method for training the Neural Network
# Input
# trainX - A list of training input data to the neural network
# trainY - Corresponding list of training data labels
# validX - A list of validation input data to the neural network
# validY - Corresponding list of validation data labels
# The methods trains the weights and baises using the training data(trainX, trainY)
# Feel free to print accuracy at different points using the validate() or computerAccuracy() functions of this class
###############################################
# TASK 2c (Marks 0) - YOUR CODE HERE
raise NotImplementedError
###############################################
def crossEntropyLoss(self, Y, predictions):
# Input
# Y : Ground truth labels (encoded as 1-hot vectors) | shape = batchSize x number of output labels
# predictions : Predictions of the model | shape = batchSize x number of output labels
# Returns the cross-entropy loss between the predictions and the ground truth labels | shape = scalar
###############################################
# TASK 2a (Marks 3) - YOUR CODE HERE
raise NotImplementedError
###############################################
def crossEntropyDelta(self, Y, predictions):
# Input
# Y : Ground truth labels (encoded as 1-hot vectors) | shape = batchSize x number of output labels
# predictions : Predictions of the model | shape = batchSize x number of output labels
# Returns the derivative of the loss with respect to the last layer outputs, ie dL/dp_i where p_i is the ith
# output of the last layer of the network | shape = batchSize x number of output labels
###############################################
# TASK 2b (Marks 3) - YOUR CODE HERE
raise NotImplementedError
###############################################
def computeAccuracy(self, Y, predictions):
# Returns the accuracy given the true labels Y and final output of the model
correct = 0
for i in range(len(Y)):
if np.argmax(Y[i]) == np.argmax(predictions[i]):
correct += 1
accuracy = (float(correct) / len(Y)) * 100
return accuracy
def validate(self, validX, validY):
# Input
# validX : Validation Input Data
# validY : Validation Labels
# Returns the predictions and validation accuracy evaluated over the current neural network model
valActivations = self.predict(validX)
pred = np.argmax(valActivations, axis=1)
if validY is not None:
valAcc = self.computeAccuracy(validY, valActivations)
return pred, valAcc
else:
return pred, None
def predict(self, X):
# Input
# X : Current Batch of Input Data as an nparray
# Output
# Returns the predictions made by the model (which are the activations output by the last layer)
# Note: Activations at the first layer(input layer) is X itself
activations = X
for l in self.layers:
activations = l.forwardpass(activations)
return activations
class FullyConnectedLayer:
def __init__(self, in_nodes, out_nodes, activation):
# Method to initialize a Fully Connected Layer
# Parameters
# in_nodes - number of input nodes of this layer
# out_nodes - number of output nodes of this layer
self.in_nodes = in_nodes
self.out_nodes = out_nodes
self.activation = activation
# Stores a quantity that is computed in the forward pass but actually used in the backward pass. Try to identify
# this quantity to avoid recomputing it in the backward pass and hence, speed up computation
self.data = None
# Create np arrays of appropriate sizes for weights and biases and initialise them as you see fit
###############################################
# TASK 1a (Marks 0) - YOUR CODE HERE
raise NotImplementedError
self.weights = None
self.biases = None
###############################################
# NOTE: You must NOT change the above code but you can add extra variables if necessary
# Store the gradients with respect to the weights and biases in these variables during the backward pass
self.weightsGrad = None
self.biasesGrad = None
def relu_of_X(self, X):
# Input
# data : Output from current layer/input for Activation | shape: batchSize x self.out_nodes
# Returns: Activations after one forward pass through this relu layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation relu
###############################################
# TASK 1b (Marks 1) - YOUR CODE HERE
raise NotImplementedError
###############################################
def gradient_relu_of_X(self, X, delta):
# Input
# data : Output from next layer/input | shape: batchSize x self.out_nodes
# delta : del_Error/ del_activation_curr | shape: batchSize x self.out_nodes
# Returns: Current del_Error to pass to current layer in backward pass through relu layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation relu amd during backwardpass
###############################################
# TASK 1e (Marks 1) - YOUR CODE HERE
raise NotImplementedError
###############################################
def softmax_of_X(self, X):
# Input
# data : Output from current layer/input for Activation | shape: batchSize x self.out_nodes
# Returns: Activations after one forward pass through this softmax layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation softmax
###############################################
# TASK 1c (Marks 3) - YOUR CODE HERE
raise NotImplementedError
###############################################
def gradient_softmax_of_X(self, X, delta):
# Input
# data : Output from next layer/input | shape: batchSize x self.out_nodes
# delta : del_Error/ del_activation_curr | shape: batchSize x self.out_nodes
# Returns: Current del_Error to pass to current layer in backward pass through softmax layer | shape: batchSize x self.out_nodes
# This will only be called for layers with activation softmax amd during backwardpass
# Hint: You might need to compute Jacobian first
###############################################
# TASK 1f (Marks 7) - YOUR CODE HERE
raise NotImplementedError
###############################################
def forwardpass(self, X):
# Input
# activations : Activations from previous layer/input | shape: batchSize x self.in_nodes
# Returns: Activations after one forward pass through this layer | shape: batchSize x self.out_nodes
# You may need to write different code for different activation layers
###############################################
# TASK 1d (Marks 4) - YOUR CODE HERE
if self.activation == 'relu':
raise NotImplementedError
elif self.activation == 'softmax':
raise NotImplementedError
else:
print("ERROR: Incorrect activation specified: " + self.activation)
exit()
###############################################
def backwardpass(self, activation_prev, delta):
# Input
# activation_prev : Output from next layer/input | shape: batchSize x self.out_nodes]
# delta : del_Error/ del_activation_curr | shape: self.out_nodes
# Output
# new_delta : del_Error/ del_activation_prev | shape: self.in_nodes
# You may need to write different code for different activation layers
# Just compute and store the gradients here - do not make the actual updates
###############################################
# TASK 1g (Marks 6) - YOUR CODE HERE
if self.activation == 'relu':
inp_delta = self.gradient_relu_of_X(self.data, delta)
elif self.activation == 'softmax':
inp_delta = self.gradient_softmax_of_X(self.data, delta)
else:
print("ERROR: Incorrect activation specified: " + self.activation)
exit()
###############################################
def updateWeights(self, lr):
# Input
# lr: Learning rate being used
# Output: None
# This function should actually update the weights using the gradients computed in the backwardpass
###############################################
# TASK 1h (Marks 2) - YOUR CODE HERE
raise NotImplementedError
###############################################
\ No newline at end of file
instance (id),count
0,8
1,466
2,176
3,58
4,288
5,795
6,1
7,292
8,427
9,73
10,216
11,11
12,171
13,238
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment