Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
ML725
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Analytics
Analytics
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Commits
Open sidebar
SHREYANSH JAIN
ML725
Commits
8ae28015
Commit
8ae28015
authored
Sep 09, 2019
by
SHREYANSH JAIN
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added other err fn
parent
225f736a
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
50 additions
and
3 deletions
+50
-3
Assignment1/demo.py
Assignment1/demo.py
+19
-0
Assignment1/main.py
Assignment1/main.py
+31
-3
No files found.
Assignment1/demo.py
0 → 100644
View file @
8ae28015
import
matplotlib.pyplot
as
plt
import
csv
x
=
[]
y
=
[]
i
=
1
with
open
(
'error.log'
,
'r'
)
as
csvfile
:
for
row
in
csvfile
:
x
.
append
(
i
)
i
+=
1
y
.
append
(
float
(
row
[:
-
1
]))
plt
.
plot
(
x
,
y
,
label
=
'Loaded from file!'
)
plt
.
xlabel
(
'x'
)
plt
.
ylabel
(
'y'
)
plt
.
title
(
'Interesting Graph
\n
Check it out'
)
plt
.
legend
()
plt
.
show
()
\ No newline at end of file
Assignment1/main.py
View file @
8ae28015
import
numpy
as
np
import
numpy
as
np
import
argparse
import
argparse
import
csv
import
csv
import
matplotlib.pyplot
as
plt
#
import matplotlib.pyplot as plt
'''
'''
You are only required to fill the following functions
You are only required to fill the following functions
mean_squared_loss
mean_squared_loss
...
@@ -24,7 +24,7 @@ def mean_squared_loss(xdata, ydata, weights):
...
@@ -24,7 +24,7 @@ def mean_squared_loss(xdata, ydata, weights):
guess
=
np
.
dot
(
xdata
,
weights
)
guess
=
np
.
dot
(
xdata
,
weights
)
samples
=
np
.
shape
(
guess
)[
0
]
samples
=
np
.
shape
(
guess
)[
0
]
err
=
0.5
*
samples
*
np
.
sum
(
np
.
square
(
ydata
.
T
-
guess
))
err
=
(
0.5
/
samples
)
*
np
.
sum
(
np
.
square
(
ydata
-
guess
))
return
err
return
err
raise
NotImplementedError
raise
NotImplementedError
...
@@ -32,33 +32,61 @@ def mean_squared_gradient(xdata, ydata, weights):
...
@@ -32,33 +32,61 @@ def mean_squared_gradient(xdata, ydata, weights):
samples
=
np
.
shape
(
xdata
)[
0
]
samples
=
np
.
shape
(
xdata
)[
0
]
guess
=
np
.
dot
(
xdata
,
weights
)
guess
=
np
.
dot
(
xdata
,
weights
)
gradient
=
(
1
/
samples
)
*
np
.
dot
(
xdata
.
T
,(
guess
-
ydata
.
T
))
gradient
=
(
1
/
samples
)
*
np
.
dot
(
xdata
.
T
,(
guess
-
ydata
))
return
gradient
return
gradient
raise
NotImplementedError
raise
NotImplementedError
def
mean_absolute_loss
(
xdata
,
ydata
,
weights
):
def
mean_absolute_loss
(
xdata
,
ydata
,
weights
):
guess
=
np
.
dot
(
xdata
,
weights
)
samples
=
np
.
shape
(
guess
)[
0
]
err
=
(
1
/
samples
)
*
np
.
sum
(
np
.
absolute
(
ydata
-
guess
))
return
err
raise
NotImplementedError
raise
NotImplementedError
def
mean_absolute_gradient
(
xdata
,
ydata
,
weights
):
def
mean_absolute_gradient
(
xdata
,
ydata
,
weights
):
guess
=
np
.
dot
(
xdata
,
weights
)
if
np
.
sum
(
ydata
-
guess
)
<
0
:
gradient
=
np
.
random
.
randint
(
0
,
10
,
np
.
shape
(
weights
)[
0
])
else
:
gradient
=
np
.
random
.
randint
(
-
10
,
0
,
np
.
shape
(
weights
)[
0
])
return
gradient
raise
NotImplementedError
raise
NotImplementedError
def
mean_log_cosh_loss
(
xdata
,
ydata
,
weights
):
def
mean_log_cosh_loss
(
xdata
,
ydata
,
weights
):
guess
=
np
.
dot
(
xdata
,
weights
)
samples
=
np
.
shape
(
guess
)[
0
]
err
=
(
1
/
samples
)
*
np
.
sum
(
np
.
square
(
ydata
-
guess
))
return
err
raise
NotImplementedError
raise
NotImplementedError
def
mean_log_cosh_gradient
(
xdata
,
ydata
,
weights
):
def
mean_log_cosh_gradient
(
xdata
,
ydata
,
weights
):
guess
=
np
.
dot
(
xdata
,
weights
)
samples
=
np
.
shape
(
guess
)[
0
]
gradient
=
np
.
dot
(
xdata
.
T
,
np
.
tanh
(
guess
-
ydata
))
return
gradient
raise
NotImplementedError
raise
NotImplementedError
def
root_mean_squared_loss
(
xdata
,
ydata
,
weights
):
def
root_mean_squared_loss
(
xdata
,
ydata
,
weights
):
guess
=
np
.
dot
(
xdata
,
weights
)
samples
=
np
.
shape
(
guess
)[
0
]
err
=
np
.
sqrt
(
np
.
divide
(
np
.
sum
(
np
.
square
(
ydata
.
T
-
guess
)),
samples
))
return
err
raise
NotImplementedError
raise
NotImplementedError
def
root_mean_squared_gradient
(
xdata
,
ydata
,
weights
):
def
root_mean_squared_gradient
(
xdata
,
ydata
,
weights
):
samples
=
np
.
shape
(
xdata
)[
0
]
gradient
=
-
weights
.
T
/
np
.
sqrt
(
samples
)
return
gradient
raise
NotImplementedError
raise
NotImplementedError
class
LinearRegressor
:
class
LinearRegressor
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment