@@ -144,7 +144,7 @@ def pretraining_functions(self, train_set_x, batch_size,k):
144144 learning_rate = T .scalar ('lr' ) # learning rate to use
145145
146146 # number of batches
147- n_batches = train_set_x .value .shape [0 ] / batch_size
147+ n_batches = train_set_x .get_value ( borrow = True ) .shape [0 ] / batch_size
148148 # begining of a batch, given `index`
149149 batch_begin = index * batch_size
150150 # ending of a batch given `index`
@@ -190,8 +190,8 @@ def build_finetune_functions(self, datasets, batch_size, learning_rate):
190190 (test_set_x , test_set_y ) = datasets [2 ]
191191
192192 # compute number of minibatches for training, validation and testing
193- n_valid_batches = valid_set_x .value .shape [0 ] / batch_size
194- n_test_batches = test_set_x .value .shape [0 ] / batch_size
193+ n_valid_batches = valid_set_x .get_value ( borrow = True ) .shape [0 ] / batch_size
194+ n_test_batches = test_set_x .get_value ( borrow = True ) .shape [0 ] / batch_size
195195
196196 index = T .lscalar ('index' ) # index to a [mini]batch
197197
@@ -263,7 +263,7 @@ def test_DBN( finetune_lr = 0.1, pretraining_epochs = 100, \
263263 test_set_x , test_set_y = datasets [2 ]
264264
265265 # compute number of minibatches for training, validation and testing
266- n_train_batches = train_set_x .value .shape [0 ] / batch_size
266+ n_train_batches = train_set_x .get_value ( borrow = True ) .shape [0 ] / batch_size
267267
268268 # numpy random generator
269269 numpy_rng = numpy .random .RandomState (123 )
0 commit comments