|
22 | 22 | SIZE = 1000 |
23 | 23 |
|
24 | 24 |
|
25 | | -# Generate training sets |
| 25 | +print "\nGenerating training data" |
26 | 26 | training_data, _, _ = mnist_loader.load_data_nn() |
27 | 27 | td_1 = [(x, x) for x, _ in training_data[0:SIZE]] |
28 | 28 | td_2 = [(x, x) for x, _ in training_data[12500:12500+SIZE]] |
|
44 | 44 | for x in td_3] |
45 | 45 | encoded_training_data = zip(encoded_td_1, encoded_td_2) |
46 | 46 |
|
47 | | -print "\Finding mapping between theories" |
| 47 | +print "\nFinding mapping between theories" |
48 | 48 | net = Network([30, 60, 30]) |
49 | 49 | net.SGD(encoded_training_data, 6, 10, 0.01, 0.05) |
50 | 50 |
|
|
57 | 57 | test_data = zip(encoded_test_1, encoded_test_2) |
58 | 58 | print "Mean desired output activation: %s" % ( |
59 | 59 | sum(y.mean() for _, y in test_data) / SIZE,) |
60 | | -error = sum([np.sum((net.feedforward(x)-y)**2) for (x, y) in test_data]) |
61 | | -print "Mean square error per training image: %s" % (error / SIZE,) |
| 60 | +error = sum(np.linalg.norm(net.feedforward(x)-y, 1) for (x, y) in test_data) |
| 61 | +print "Average l1 error per training image: %s" % (error / SIZE,) |
0 commit comments