Skip to content

Commit 28febf8

Browse files
committed
# Conflicts: # .gitignore # .idea/workspace.xml
2 parents 4db45cc + bc35c63 commit 28febf8

10 files changed

Lines changed: 733 additions & 19 deletions

File tree

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,6 @@ html
1313
*.pyc
1414
*~
1515
*.swp
16-
/.idea/workspace.xml
16+
/.idea/*.xml
17+
/.idea/.name
18+
/.idea/DeepLearningTutorials-Peng.iml

.idea/.name

Lines changed: 0 additions & 1 deletion
This file was deleted.

.idea/DeepLearningTutorials-Peng.iml

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/misc.xml

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/modules.xml

Lines changed: 0 additions & 8 deletions
This file was deleted.

.idea/vcs.xml

Lines changed: 0 additions & 6 deletions
This file was deleted.

.idea/workspace.xml

Lines changed: 621 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

mycode/run_MLP.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22
import numpy as np
33
from theano import tensor as T, function, shared
44

5+
rng = np.random.RandomState(1234)
56

67
def init_theta():
7-
rng = np.random.RandomState(1234)
88
szAll = [(784, 512), (512, 256), (256, 10)]
99
tmp = []
1010
for sz in szAll:

mycode/run_conv.py

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
import dataset, models, loss, optim
2+
import numpy as np
3+
from theano import tensor as T, function, shared
4+
5+
rng = np.random.RandomState(1234)
6+
7+
8+
def init_theta_fc():
9+
szAll = [(256, 128), (128, 10)]
10+
tmp = []
11+
for sz in szAll:
12+
tmp.append(
13+
np.asarray(
14+
rng.uniform(
15+
low=-np.sqrt(6. / (sz[0] + sz[1])),
16+
high=np.sqrt(6. / (sz[0] + sz[1])),
17+
size=sz
18+
),
19+
dtype='float32'
20+
)
21+
)
22+
tmp.append(np.zeros((sz[1],), 'float32'))
23+
sh_theta = []
24+
for each in tmp:
25+
sh_theta.append(shared(each, borrow=True))
26+
return sh_theta
27+
28+
29+
def init_theta():
30+
sz_flt = [(16, 1, 5, 5), (16, 16, 5, 5) ]
31+
tmp = []
32+
for sz in szAll:
33+
tmp.append(
34+
np.asarray(
35+
rng.uniform(
36+
low=-np.sqrt(6. / (sz[0] + sz[1])),
37+
high=np.sqrt(6. / (sz[0] + sz[1])),
38+
size=sz
39+
),
40+
dtype='float32'
41+
)
42+
)
43+
tmp.append(np.zeros((sz[1],), 'float32'))
44+
sh_theta = []
45+
for each in tmp:
46+
sh_theta.append(shared(each, borrow=True))
47+
return sh_theta
48+
49+
50+
if __name__ == '__main__':
51+
# config
52+
itMax = 195*2
53+
szBatch = 256
54+
lr = 0.1
55+
vaFreq = 20
56+
57+
import tr_va_te
58+
tr_va_te.run(itMax=itMax,
59+
szBatch=szBatch,
60+
lr=lr,
61+
vaFreq=vaFreq,
62+
init_theta=init_theta,
63+
mo_create=models.create_mlp)

mycode/tmp_square.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
from theano import tensor as T, shared, function
2+
import numpy as np
3+
4+
rng = np.random.RandomState(1234)
5+
6+
# fprop
7+
#p = T.matrix(name='p', dtype='float32')
8+
p = shared(
9+
np.asarray(
10+
rng.uniform(low=-1, high=1, size=(3, 4)),
11+
dtype='float32'
12+
),
13+
borrow=True
14+
)
15+
I = T.matrix(name='I', dtype='float32')
16+
z = p * I
17+
u = z**2
18+
v = p + u
19+
ell = T.mean(v)
20+
# bprop
21+
dp = T.grad(ell, wrt=p)
22+
# the graph
23+
fg = function(
24+
inputs=[I],
25+
outputs=[ell, v, dp],
26+
updates=[(p, p - np.float32(0.1)*dp)]
27+
)
28+
29+
# fire
30+
pval = np.asarray(
31+
rng.uniform(low=-1, high=1, size=(3, 4)),
32+
dtype='float32'
33+
)
34+
Ival = np.asarray(
35+
rng.uniform(low=-1, high=1, size=(3, 4)),
36+
dtype='float32'
37+
)
38+
[ellval, vval, dpval] = fg(Ival)
39+
40+
pass
41+
42+
43+

0 commit comments

Comments
 (0)