@@ -240,15 +240,17 @@ def build_finetune_functions(self, datasets, batch_size, learning_rate,
240240 self .y : train_set_y [index * batch_size :
241241 (index + 1 ) * batch_size ]})
242242
243- test_score_i = theano .function ([index ], self .errors ,
244- givens = {self .x : test_set_x [index * batch_size :
245- (index + 1 ) * batch_size ],
246- self .y : test_set_y [index * batch_size :
247- (index + 1 ) * batch_size ]})
248-
249- test_classify = theano .function ([index ], self .labels ,
250- givens = {self .x : test_set_x [index * batchsize :
251- (index + 1 )* batch_size ]},)
243+ if test_result_avail :
244+ test_score_i = theano .function ([index ], self .errors ,
245+ givens = {self .x : test_set_x [index * batch_size :
246+ (index + 1 ) * batch_size ],
247+ self .y : test_set_y [index * batch_size :
248+ (index + 1 ) * batch_size ]})
249+
250+ else :
251+ test_classify = theano .function ([index ], self .labels ,
252+ givens = {self .x : test_set_x [index * batchsize :
253+ (index + 1 )* batch_size ]},)
252254
253255 valid_score_i = theano .function ([index ], self .errors ,
254256 givens = {self .x : valid_set_x [index * batch_size :
@@ -344,7 +346,8 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
344346 print '... getting the finetuning functions'
345347 train_fn , validate_model , test_model = dbn .build_finetune_functions (
346348 datasets = datasets , batch_size = batch_size ,
347- learning_rate = finetune_lr )
349+ learning_rate = finetune_lr ,
350+ test_result_avail = False )
348351
349352 print '... finetunning the model'
350353 # early-stopping parameters
0 commit comments