Commit cb9fd563 authored by SHREYANSH JAIN's avatar SHREYANSH JAIN

final submission XOR 99.3 mnist 96.8

parent 12d020ad
...@@ -13,16 +13,16 @@ def taskXor(): ...@@ -13,16 +13,16 @@ def taskXor():
# TASK 3a (Marks 7) - YOUR CODE HERE # TASK 3a (Marks 7) - YOUR CODE HERE
# raise NotImplementedError # raise NotImplementedError
############################################### ###############################################
YTrain , YVal, YTest = np.array([int(i[1]==1) for i in YTrain]).reshape((-1,1)), np.array([int(i[1]==1) for i in YVal]).reshape((-1,1)), np.array([int(i[1]==1) for i in YTest]).reshape((-1,1)) # YTrain , YVal, YTest = np.array([int(i[1]==1) for i in YTrain]).reshape((-1,1)), np.array([int(i[1]==1) for i in YVal]).reshape((-1,1)), np.array([int(i[1]==1) for i in YTest]).reshape((-1,1))
lr,batchSize,epochs = 0.01,50,10 lr,batchSize,epochs = -0.8,25,3500
nn1 = nn.NeuralNetwork(lr, batchSize, epochs) nn1 = nn.NeuralNetwork(lr, batchSize, epochs)
# Add layers to neural network corresponding to inputs and outputs of given data # Add layers to neural network corresponding to inputs and outputs of given data
input_layer = XTrain.shape[1] input_layer = XTrain.shape[1]
hidden_layer = 2 hidden_layer = 20
output_layer = 1 output_layer = 2
# activation_fn = 'softmax' # 'relu' # activation_fn = 'softmax' # 'relu'
nn1.addLayer(nn.FullyConnectedLayer(input_layer,hidden_layer,'relu')) nn1.addLayer(nn.FullyConnectedLayer(input_layer,hidden_layer,'relu'))
nn1.addLayer(nn.FullyConnectedLayer(hidden_layer,output_layer,'relu')) nn1.addLayer(nn.FullyConnectedLayer(hidden_layer,output_layer,'softmax'))
nn1.train(XTrain, YTrain, XVal, YVal) nn1.train(XTrain, YTrain, XVal, YVal)
pred, acc = nn1.validate(XTest, YTest) pred, acc = nn1.validate(XTest, YTest)
...@@ -48,7 +48,7 @@ def preprocessMnist(X): ...@@ -48,7 +48,7 @@ def preprocessMnist(X):
def taskMnist(): def taskMnist():
XTrain, YTrain, XVal, YVal, XTest, _ = loadMnist() XTrain, YTrain, XVal, YVal, XTest, _ = loadMnist()
# Create a NeuralNetwork object 'nn1' as follows with optimal parameters. For parameter definition, refer to py file. # Create a NeuralNetwork object 'nn1' as follows with optimal parameters. For parameter definition, refer to py file.
lr,batchSize,epochs = 0.005,256,10000 lr,batchSize,epochs = -0.005,256,10000
nn1 = nn.NeuralNetwork(lr, batchSize, epochs) nn1 = nn.NeuralNetwork(lr, batchSize, epochs)
# Add layers to neural network corresponding to inputs and outputs of given data # Add layers to neural network corresponding to inputs and outputs of given data
input_layer = XTrain.shape[1] input_layer = XTrain.shape[1]
......
...@@ -260,8 +260,8 @@ class FullyConnectedLayer: ...@@ -260,8 +260,8 @@ class FullyConnectedLayer:
# This function should actually update the weights using the gradients computed in the backwardpass # This function should actually update the weights using the gradients computed in the backwardpass
############################################### ###############################################
# TASK 1h (Marks 2) - YOUR CODE HERE # TASK 1h (Marks 2) - YOUR CODE HERE
self.weights += lr*(self.weightsGrad) self.weights -= lr*(self.weightsGrad)
self.biases += lr*(self.biasesGrad) self.biases -= lr*(self.biasesGrad)
# raise NotImplementedError # raise NotImplementedError
############################################### ###############################################
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment