22"""
33import os
44
5- import numpy , time , cPickle , gzip
5+ import numpy , time , cPickle , gzip , os , sys
66
77import theano
88import theano .tensor as T
@@ -141,6 +141,7 @@ def pretraining_functions(self, train_set_x, batch_size):
141141 # index to a [mini]batch
142142 index = T .lscalar ('index' ) # index to a minibatch
143143 learning_rate = T .scalar ('lr' ) # learning rate to use
144+ k = T .lscalar ('k' )
144145
145146 # number of batches
146147 n_batches = train_set_x .value .shape [0 ] / batch_size
@@ -154,11 +155,12 @@ def pretraining_functions(self, train_set_x, batch_size):
154155
155156 # get the cost and the updates list
156157 # TODO: change cost function to reconstruction error
157- cost ,updates = rbm .cd (learning_rate , persistent = None )
158+ cost ,updates = rbm .get_cost_updates (learning_rate , persistent = None , k = k )
158159
159160 # compile the theano function
160161 fn = theano .function (inputs = [index ,
161- theano .Param (learning_rate , default = 0.1 )],
162+ theano .Param (learning_rate , default = 0.1 ),
163+ theano .Param (k , default = 1 )],
162164 outputs = cost ,
163165 updates = updates ,
164166 givens = {self .x :train_set_x [batch_begin :batch_end ]})
@@ -229,13 +231,10 @@ def test_score():
229231 return train_fn , valid_score , test_score
230232
231233
232-
233-
234-
235-
236234def test_DBN ( finetune_lr = 0.1 , pretraining_epochs = 10 , \
237- pretrain_lr = 0.1 , training_epochs = 1000 , \
238- dataset = 'mnist.pkl.gz' ):
235+ pretrain_lr = 0.1 , k = 1 , training_epochs = 1000 , \
236+ dataset = '../data/mnist.pkl.gz' , batch_size = 1 ,
237+ output_folder = 'DBN_plots' ):
239238 """
240239 Demonstrates how to train and test a Deep Belief Network.
241240
@@ -253,18 +252,13 @@ def test_DBN( finetune_lr = 0.1, pretraining_epochs = 10, \
253252 :param dataset: path the the pickled dataset
254253 """
255254
256- print 'finetune_lr = ' , finetune_lr
257- print 'pretrain_lr = ' , pretrain_lr
258255
259256 datasets = load_data (dataset )
260257
261258 train_set_x , train_set_y = datasets [0 ]
262259 valid_set_x , valid_set_y = datasets [1 ]
263260 test_set_x , test_set_y = datasets [2 ]
264261
265-
266- batch_size = 20 # size of the minibatch
267-
268262 # compute number of minibatches for training, validation and testing
269263 n_train_batches = train_set_x .value .shape [0 ] / batch_size
270264
@@ -295,7 +289,7 @@ def test_DBN( finetune_lr = 0.1, pretraining_epochs = 10, \
295289 c = []
296290 for batch_index in xrange (n_train_batches ):
297291 c .append (pretraining_fns [i ](index = batch_index ,
298- lr = pretrain_lr ) )
292+ lr = pretrain_lr , k = k ) )
299293 print 'Pre-training layer %i, epoch %d, cost ' % (i ,epoch ),numpy .mean (c )
300294
301295 end_time = time .clock ()
0 commit comments