Skip to content

Commit 15b937d

Browse files
committed
Merge pull request #1 from olest/master
Merge olest solution in R and Python
2 parents 3a003b3 + 0b0c6c1 commit 15b937d

File tree

2 files changed

+53
-0
lines changed

2 files changed

+53
-0
lines changed
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
x <- read.table("data/ex2x.dat")
2+
y <- read.table("data/ex2y.dat")
3+
4+
ft <- lm(y[,1]~x[,1])
5+
ft
6+
anova(ft)
7+
8+
png(file="age_vs_weight.png",height=600,width=600);
9+
plot(x[,1],y[,1])
10+
abline(ft)
11+
dev.off()
12+
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
#!/usr/bin/python
2+
3+
import numpy as np
4+
5+
# Implementation based on Ex. 1 in https://www.coursera.org/course/ml
6+
def gradDescent(x, y, theta, alpha, m, nIter):
7+
loss = 0
8+
for i in range(0, nIter):
9+
h = np.dot(x, theta)
10+
loss = (h-y)
11+
gradient = np.dot(x.transpose(), loss) / m
12+
theta = theta - alpha * gradient
13+
14+
cost = np.sum(loss ** 2) / (2 * m)
15+
print("After %d iterations, cost is %f" % (nIter, cost))
16+
return theta
17+
18+
y = np.loadtxt('data/ex2y.dat')
19+
x = np.loadtxt('data/ex2x.dat')
20+
21+
on = np.ones(np.shape(x))
22+
# append ones for offset
23+
x = np.column_stack((on,x))
24+
25+
m,n = np.shape(x)
26+
numIter = 10000
27+
28+
# starting values
29+
theta = np.array([5,5])
30+
# learning rate
31+
alpha = 0.05
32+
33+
# gradient descent
34+
theta = gradDescent(x, y, theta, alpha, m, numIter)
35+
print "theta=",theta
36+
37+
# analytical solution
38+
t1 = np.linalg.inv(np.dot(x.transpose(),x))
39+
theta2 = np.dot(np.dot(t1,x.transpose()),y)
40+
print "theta2=",theta2
41+

0 commit comments

Comments
 (0)