Skip to content

Commit 2734849

Browse files
committed
Remove .value in DBN
1 parent 1fb52f0 commit 2734849

2 files changed

Lines changed: 7 additions & 7 deletions

File tree

code/DBN.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ def pretraining_functions(self, train_set_x, batch_size,k):
144144
learning_rate = T.scalar('lr') # learning rate to use
145145

146146
# number of batches
147-
n_batches = train_set_x.value.shape[0] / batch_size
147+
n_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
148148
# begining of a batch, given `index`
149149
batch_begin = index * batch_size
150150
# ending of a batch given `index`
@@ -190,8 +190,8 @@ def build_finetune_functions(self, datasets, batch_size, learning_rate):
190190
(test_set_x , test_set_y ) = datasets[2]
191191

192192
# compute number of minibatches for training, validation and testing
193-
n_valid_batches = valid_set_x.value.shape[0] / batch_size
194-
n_test_batches = test_set_x.value.shape[0] / batch_size
193+
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size
194+
n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size
195195

196196
index = T.lscalar('index') # index to a [mini]batch
197197

@@ -263,7 +263,7 @@ def test_DBN( finetune_lr = 0.1, pretraining_epochs = 100, \
263263
test_set_x , test_set_y = datasets[2]
264264

265265
# compute number of minibatches for training, validation and testing
266-
n_train_batches = train_set_x.value.shape[0] / batch_size
266+
n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
267267

268268
# numpy random generator
269269
numpy_rng = numpy.random.RandomState(123)

doc/DBN.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ Theano variable to it that has a default value.
294294
learning_rate = T.scalar('lr') # learning rate to use
295295

296296
# number of batches
297-
n_batches = train_set_x.value.shape[0] / batch_size
297+
n_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
298298
# begining of a batch, given `index`
299299
batch_begin = index * batch_size
300300
# ending of a batch given `index`
@@ -358,8 +358,8 @@ and a ``test_model`` function).
358358
(test_set_x , test_set_y ) = datasets[2]
359359

360360
# compute number of minibatches for training, validation and testing
361-
n_valid_batches = valid_set_x.value.shape[0] / batch_size
362-
n_test_batches = test_set_x.value.shape[0] / batch_size
361+
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] / batch_size
362+
n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size
363363

364364
index = T.lscalar('index') # index to a [mini]batch
365365

0 commit comments

Comments
 (0)