Skip to content

Commit 503e3e0

Browse files
committed
Gradient descent implemented using numpy
1 parent d64a0b1 commit 503e3e0

File tree

1 file changed

+40
-0
lines changed

1 file changed

+40
-0
lines changed
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#!/usr/bin/python
2+
3+
import numpy as np
4+
5+
def gradDescent(x, y, theta, alpha, m, nIter):
6+
loss = 0
7+
for i in range(0, nIter):
8+
h = np.dot(x, theta)
9+
loss = (h-y)
10+
gradient = np.dot(x.transpose(), loss) / m
11+
theta = theta - alpha * gradient
12+
13+
cost = np.sum(loss ** 2) / (2 * m)
14+
print("After %d iterations, cost is %f" % (nIter, cost))
15+
return theta
16+
17+
y = np.loadtxt('data/ex2y.dat')
18+
x = np.loadtxt('data/ex2x.dat')
19+
20+
on = np.ones(np.shape(x))
21+
# append ones for offset
22+
x = np.column_stack((on,x))
23+
24+
m,n = np.shape(x)
25+
numIter = 10000
26+
27+
# starting values
28+
theta = np.array([5,5])
29+
# learning rate
30+
alpha = 0.05
31+
32+
# gradient descent
33+
theta = gradDescent(x, y, theta, alpha, m, numIter)
34+
print "theta=",theta
35+
36+
# analytical solution
37+
t1 = np.linalg.inv(np.dot(x.transpose(),x))
38+
theta2 = np.dot(np.dot(t1,x.transpose()),y)
39+
print "theta2=",theta2
40+

0 commit comments

Comments
 (0)