Commit 1332f328 authored by SHREYANSH JAIN's avatar SHREYANSH JAIN

final logcosh leaderboard

parent 1b4341db
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
import matplotlib.pyplot as plt
import csv
x = []
y = []
i=1
with open('logcosh.log','r') as csvfile:
for row in csvfile:
x.append(i)
i+=1
y.append(float(row[:-1]))
plt.plot(x,y, label='LOGCOSH')
plt.xlabel('epoch')
plt.ylabel('Error')
x = []
y = []
i=1
with open('mae.log','r') as csvfile:
for row in csvfile:
x.append(i)
i+=1
y.append(float(row[:-1]))
plt.plot(x,y, label='MAE')
x = []
y = []
i=1
with open('rmse.log','r') as csvfile:
for row in csvfile:
x.append(i)
i+=1
y.append(float(row[:-1]))
plt.plot(x,y, label='RMSE')
plt.plot(x,y, label='')
plt.legend()
plt.show()
\ No newline at end of file
python3 main.py --train_file train.csv --test_file test.csv --epoch 8000 --lr 0.4 --loss rmse
python3 main.py --train_file train.csv --test_file test.csv --epoch 8000 --lr 0.3 --loss mse
python3 main.py --train_file train.csv --test_file test.csv --epoch 8000 --lr 0.4 --loss logcosh
This diff is collapsed.
......@@ -5,15 +5,49 @@ x = []
y = []
i=1
with open('error.log','r') as csvfile:
with open('logcosh.log','r') as csvfile:
for row in csvfile:
x.append(i)
i+=1
y.append(float(row[:-1]))
plt.plot(x,y, label='Loaded from file!')
plt.xlabel('x')
plt.ylabel('y')
plt.title('Interesting Graph\nCheck it out')
plt.plot(x,y, label='LOGCOSH')
plt.xlabel('epoch')
plt.ylabel('Error')
x = []
y = []
i=1
with open('mae.log','r') as csvfile:
for row in csvfile:
x.append(i)
i+=1
y.append(float(row[:-1]))
plt.plot(x,y, label='MAE')
x = []
y = []
i=1
with open('rmse.log','r') as csvfile:
for row in csvfile:
x.append(i)
i+=1
y.append(float(row[:-1]))
plt.plot(x,y, label='RMSE')
plt.plot(x,y, label='')
plt.legend()
plt.show()
\ No newline at end of file
This diff is collapsed.
import numpy as np
import argparse
import csv
import warnings
# import matplotlib.pyplot as plt
'''
You are only required to fill the following functions
......@@ -24,7 +25,7 @@ def mean_squared_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = (0.5/samples)*np.sum(np.square(ydata-guess))
err = (1/samples)*np.sum(np.square(ydata-guess))
return err
raise NotImplementedError
......@@ -32,7 +33,7 @@ def mean_squared_gradient(xdata, ydata, weights):
samples = np.shape(xdata)[0]
guess = np.dot(xdata,weights)
gradient = (1/samples)*np.dot(xdata.T,(guess-ydata))
gradient = (2/samples)*np.dot(xdata.T,(guess-ydata))
return gradient
raise NotImplementedError
......@@ -60,7 +61,11 @@ def mean_log_cosh_loss(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
err = (1/samples)*np.sum(np.square(ydata-guess))
warnings.filterwarnings("error")
try:
err = np.sum(np.log(np.cosh(guess-ydata)))/samples
except Exception as e:
err = np.sum(np.absolute(guess-ydata)+np.log(2))/samples
return err
raise NotImplementedError
......@@ -68,7 +73,7 @@ def mean_log_cosh_gradient(xdata, ydata, weights):
guess = np.dot(xdata,weights)
samples = np.shape(guess)[0]
gradient = np.dot(xdata.T,np.tanh(guess-ydata))
gradient = np.dot(xdata.T,np.tanh(guess-ydata))/samples
return gradient
raise NotImplementedError
......@@ -84,7 +89,8 @@ def root_mean_squared_loss(xdata, ydata, weights):
def root_mean_squared_gradient(xdata, ydata, weights):
samples = np.shape(xdata)[0]
gradient = -weights.T/np.sqrt(samples)
guess = np.dot(xdata,weights)
gradient = mean_squared_gradient(xdata, ydata, weights)/(2*root_mean_squared_loss(xdata, ydata, weights))
return gradient
raise NotImplementedError
......@@ -277,7 +283,15 @@ def main():
output = [(i,np.absolute(ytest[i])) for i in range(len(ytest))]
np.savetxt("output.csv",output,delimiter=',',fmt="%d",header="instance (id),count",comments='')
np.savetxt("error.log",errlog,delimiter='\n',fmt="%f")
x,y,z = np.array([[ 1, 0, 2, -3], [ 1, -1, 0, -3], [-2, -5, 1, -3], [ 0, -5, 3, -3], [ 0, -4, 3, -2]]),np.array( [-2, 1, 1, 2, 0]),np.array( [ 1, 0, -2, -1])
print(mean_absolute_loss(x,y,z))
print(mean_absolute_gradient(x,y,z))
print(mean_squared_loss(x,y,z))
print(mean_squared_gradient(x,y,z))
print(root_mean_squared_loss(x,y,z))
print(root_mean_squared_gradient(x,y,z))
print(mean_log_cosh_loss(x,y,z))
print(mean_log_cosh_gradient(x,y,z))
if __name__ == '__main__':
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment