@@ -53,13 +53,13 @@ def SGD(self, training_data, epochs, mini_batch_size, eta,
5353 but slows things down substantially. If ``test`` is set, then
5454 appropriate ``test_data`` must be supplied.
5555 """
56- if test : n_test = len (test_inputs )
56+ if test : n_test = len (test_data )
5757 n = len (training_data )
5858 for j in xrange (epochs ):
5959 random .shuffle (training_data )
6060 mini_batches = [
6161 training_data [k :k + mini_batch_size ]
62- for k in xrange (0 , len ( training_data ) , mini_batch_size )]
62+ for k in xrange (0 , n , mini_batch_size )]
6363 for mini_batch in mini_batches :
6464 self .backprop (mini_batch , n , eta , lmbda )
6565 if test :
@@ -117,9 +117,9 @@ def evaluate(self, test_data):
117117 network outputs the correct result. Note that the neural
118118 network's output is assumed to be the index of whichever
119119 neuron in the final layer has the highest activation."""
120- test_results = [np .argmax (self .feedforward (x )) for x in test_data [ 0 ]]
121- return sum ( int ( x == y )
122- for x , y in zip ( test_results , test_data [ 1 ]) )
120+ test_results = [( np .argmax (self .feedforward (x )), y )
121+ for ( x , y ) in test_data ]
122+ return sum ( int ( x == y ) for ( x , y ) in test_results )
123123
124124 def cost (self , x , y ):
125125 """Return the quadratic cost associated to the network, with
0 commit comments