Commit c90c56f0 authored by Sushant Mahajan's avatar Sushant Mahajan

comleted gradient descent

parent 85b39497
Pipeline #284 skipped
......@@ -53,20 +53,24 @@ def regularization(cost, w1, w2, lamb, m):
return cost+reg
def gradient(del1, del2, w1, w2, lamb, m):
del1,del2 = del1/m, del2/m
del1,del2 = del1/m, del2/m
tgrad1 = del1[:,1:] + lamb*w1[:,1:]/m
tgrad2 = del2[:,1:] + lamb*w2[:,1:]/m
return np.append(tgrad1.reshape(tgrad1.size), tgrad2.reshape(tgrad2.size))
w1[:,1:] = np.copy(tgrad1)
w2[:,1:] = np.copy(tgrad2)
return np.append(w1.reshape(w1.size), w2.reshape(w2.size))
def cost(li, lh, lo, weights, X, y, lamb):
w1t = weights[:(li+1)*lh] #28x58
w2t = weights[(li+1)*lh:] #1x29
w1 = weights[:(li+1)*lh] #28x58
w2 = weights[(li+1)*lh:] #1x29
#28x58, 1x29
tdel1,tdel2 = np.zeros((lh,li+1),dtype=float), np.zeros((lo,lh+1),dtype=float)
#w1,w2 = getMatrix(lh, li+1, w1t), getMatrix(lo, lh+1, w2t)
w1 = np.array(w1t)
w2 = np.array(w2t)
# w1 = np.array(w1t)
# w2 = np.array(w2t)
#cost
m = len(X)
J = 0.0
......@@ -100,6 +104,16 @@ def cost(li, lh, lo, weights, X, y, lamb):
return J,grad
def fit(X, y, li, lh, lo, weights, lamb, eta, passes=10000, verbose=True):
for i in range(1,passes+1):
J, dw = cost(li, lh, lo, weights, X, y, lamb)
#print(weights.shape, dw.shape)
weights += -eta*dw
print(i,"\r", end='')
if verbose and i%1000 == 0:
print()
print(J)
def predict(x, w1, w2):
x=[1]+x #58x1
x = np.array(x)
......@@ -115,7 +129,9 @@ if __name__ == "__main__":
# print(len(X), len(X[0]), len(y), X[0])
# print(len(tX), len(ty), tX[0])
li,lh,lo = tuple(params["layers"])
weights = [random() for _ in range(lh*(li+1)+lo*(lh+1))]
J,grad = cost(li, lh, lo, weights, X, y, 0.1)
print(J,grad)
weights = np.array([random() for _ in range(lh*(li+1)+lo*(lh+1))])
lamb,eta = 0.1,0.1
fit(X, y, li, lh, lo, weights, lamb, eta)
# J,grad = cost(li, lh, lo, weights, X, y, 0.1)
# print(J,grad)
#print(len(w1), len(w1[0]), len(w2), len(w2[0]))
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment