@@ -62,7 +62,7 @@ def __init__(self, rng, input, n_in, n_out, W=None, b=None,
6262
6363 :type activation: theano.Op or function
6464 :param activation: Non linearity to be applied in the hidden
65- layer
65+ layer
6666 """
6767 self .input = input
6868
@@ -219,7 +219,7 @@ def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
219219 print '... building the model'
220220
221221 # allocate symbolic variables for the data
222- index = T .lscalar () # index to a [mini]batch
222+ index = T .lscalar () # index to a [mini]batch
223223 x = T .matrix ('x' ) # the data is presented as rasterized images
224224 y = T .ivector ('y' ) # the labels are presented as 1D vector of
225225 # [int] labels
@@ -259,10 +259,10 @@ def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
259259
260260 # specify how to update the parameters of the model as a dictionary
261261 updates = {}
262- # given two list the zip A = [ a1,a2,a3,a4] and B = [b1,b2,b3,b4] of
262+ # given two list the zip A = [a1, a2, a3, a4] and B = [b1, b2, b3, b4] of
263263 # same length, zip generates a list C of same size, where each element
264264 # is a pair formed from the two lists :
265- # C = [ (a1,b1), (a2,b2), (a3,b3) , (a4,b4) ]
265+ # C = [(a1, b1), (a2, b2), (a3, b3), (a4, b4)]
266266 for param , gparam in zip (classifier .params , gparams ):
267267 updates [param ] = param - learning_rate * gparam
268268
0 commit comments