From c387c4fd0d2cfbce13626e8825373ab2a17de759 Mon Sep 17 00:00:00 2001 From: Paul Cheuk Date: Sun, 23 Aug 2015 17:20:38 +0800 Subject: [PATCH] add my code --- mine_code/logistic_regression.py | 1 + mine_code/theano_intro.py | 11 +++++ mine_code/theano_intro_2.py | 76 ++++++++++++++++++++++++++++++++ 3 files changed, 88 insertions(+) create mode 100644 mine_code/logistic_regression.py create mode 100644 mine_code/theano_intro.py create mode 100644 mine_code/theano_intro_2.py diff --git a/mine_code/logistic_regression.py b/mine_code/logistic_regression.py new file mode 100644 index 00000000..1e352899 --- /dev/null +++ b/mine_code/logistic_regression.py @@ -0,0 +1 @@ +logistic_regression.py \ No newline at end of file diff --git a/mine_code/theano_intro.py b/mine_code/theano_intro.py new file mode 100644 index 00000000..311451bd --- /dev/null +++ b/mine_code/theano_intro.py @@ -0,0 +1,11 @@ +import theano +from theano import tensor as T + +a=T.scalar() +b=T.scalar() +y=a*b + +multiply = theano.function(inputs=[a, b], outputs=y) + +print multiply(3, 4) +print multiply(4, 33) \ No newline at end of file diff --git a/mine_code/theano_intro_2.py b/mine_code/theano_intro_2.py new file mode 100644 index 00000000..8db47031 --- /dev/null +++ b/mine_code/theano_intro_2.py @@ -0,0 +1,76 @@ +import theano +from theano import tensor as T +import numpy as np + + +trX = np.linspace(1, 1, 101) +trY= 2* trX * np.random.randn(*trX.shape)*0.33 + +X= T.scalar() +Y = T.scalar() + +def model(X, w): + return X*w + + +w = theano.shared(np.asarray(0, dtype=theano.config.floatX)) + +y = model(X, w) + + +cost = T.mean(T.sqr(y-Y)) + +gradient = T.grad(cost=cost, wrt=w) + +updates = [[w, w- gradient*0.01]] + +train = theano.function(inputs=[X, Y], outputs=cost, updates=updates, allow_input_downcast=True) + + +for i in range(100): + for x, y in zip(trX, trY): + train(x, y) + + +print w + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +