Skip to content

Commit 19040c3

Browse files
committed
add name to theano function and shared variable.
1 parent cb848c9 commit 19040c3

1 file changed

Lines changed: 10 additions & 6 deletions

File tree

code/logistic_cg.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,8 @@ def __init__(self, input, n_in, n_out):
7575
# initialize theta = (W,b) with 0s; W gets the shape (n_in, n_out),
7676
# while b is a vector of n_out elements, making theta a vector of
7777
# n_in*n_out + n_out elements
78-
self.theta = theano.shared( value = numpy.zeros(n_in*n_out+n_out, dtype = theano.config.floatX) )
78+
self.theta = theano.shared(value=numpy.zeros(n_in*n_out+n_out, dtype=theano.config.floatX),
79+
name='theta')
7980
# W is represented by the fisr n_in*n_out elements of theta
8081
self.W = self.theta[0:n_in*n_out].reshape((n_in,n_out))
8182
# b is the rest (last n_out elements)
@@ -225,27 +226,30 @@ def shared_dataset(data_xy):
225226
test_model = theano.function([minibatch_offset], classifier.errors(y),
226227
givens={
227228
x:test_set_x[minibatch_offset:minibatch_offset+batch_size],
228-
y:test_set_y[minibatch_offset:minibatch_offset+batch_size]})
229+
y:test_set_y[minibatch_offset:minibatch_offset+batch_size]},
230+
name="test")
229231

230232
validate_model = theano.function([minibatch_offset],classifier.errors(y),
231233
givens={
232234
x:valid_set_x[minibatch_offset:minibatch_offset+batch_size],
233-
y:valid_set_y[minibatch_offset:minibatch_offset+batch_size]})
235+
y:valid_set_y[minibatch_offset:minibatch_offset+batch_size]},
236+
name="validate")
234237

235238
# compile a thenao function that returns the cost of a minibatch
236239
batch_cost = theano.function([minibatch_offset], cost,
237240
givens= {
238241
x : train_set_x[minibatch_offset:minibatch_offset+batch_size],
239-
y : train_set_y[minibatch_offset:minibatch_offset+batch_size]})
240-
242+
y : train_set_y[minibatch_offset:minibatch_offset+batch_size]},
243+
name="batch_cost")
241244

242245

243246
# compile a theano function that returns the gradient of the minibatch
244247
# with respect to theta
245248
batch_grad = theano.function([minibatch_offset], T.grad(cost,classifier.theta),
246249
givens= {
247250
x : train_set_x[minibatch_offset:minibatch_offset+batch_size],
248-
y : train_set_y[minibatch_offset:minibatch_offset+batch_size]})
251+
y : train_set_y[minibatch_offset:minibatch_offset+batch_size]},
252+
name="batch_grad")
249253

250254

251255
# creates a function that computes the average cost on the training set

0 commit comments

Comments
 (0)