Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions code/DBN.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"""
import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -327,7 +327,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
k=k)

print '... pre-training the model'
start_time = time.clock()
start_time = timeit.default_timer()
## Pre-train layer-wise
for i in xrange(dbn.n_layers):
# go through pretraining epochs
Expand All @@ -340,7 +340,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
print 'Pre-training layer %i, epoch %d, cost ' % (i, epoch),
print numpy.mean(c)

end_time = time.clock()
end_time = timeit.default_timer()
# end-snippet-2
print >> sys.stderr, ('The pretraining code for file ' +
os.path.split(__file__)[1] +
Expand Down Expand Up @@ -372,7 +372,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,

best_validation_loss = numpy.inf
test_score = 0.
start_time = time.clock()
start_time = timeit.default_timer()

done_looping = False
epoch = 0
Expand Down Expand Up @@ -424,7 +424,7 @@ def test_DBN(finetune_lr=0.1, pretraining_epochs=100,
done_looping = True
break

end_time = time.clock()
end_time = timeit.default_timer()
print(
(
'Optimization complete with best validation score of %f %%, '
Expand Down
10 changes: 5 additions & 5 deletions code/SdA.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
"""
import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -379,7 +379,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
batch_size=batch_size)

print '... pre-training the model'
start_time = time.clock()
start_time = timeit.default_timer()
## Pre-train layer-wise
corruption_levels = [.1, .2, .3]
for i in xrange(sda.n_layers):
Expand All @@ -394,7 +394,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
print 'Pre-training layer %i, epoch %d, cost ' % (i, epoch),
print numpy.mean(c)

end_time = time.clock()
end_time = timeit.default_timer()

print >> sys.stderr, ('The pretraining code for file ' +
os.path.split(__file__)[1] +
Expand Down Expand Up @@ -427,7 +427,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,

best_validation_loss = numpy.inf
test_score = 0.
start_time = time.clock()
start_time = timeit.default_timer()

done_looping = False
epoch = 0
Expand Down Expand Up @@ -471,7 +471,7 @@ def test_SdA(finetune_lr=0.1, pretraining_epochs=15,
done_looping = True
break

end_time = time.clock()
end_time = timeit.default_timer()
print(
(
'Optimization complete with best validation score of %f %%, '
Expand Down
6 changes: 3 additions & 3 deletions code/cA.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"""
import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -276,7 +276,7 @@ def test_cA(learning_rate=0.01, training_epochs=20,
}
)

start_time = time.clock()
start_time = timeit.default_timer()

############
# TRAINING #
Expand All @@ -293,7 +293,7 @@ def test_cA(learning_rate=0.01, training_epochs=20,
print 'Training epoch %d, reconstruction cost ' % epoch, numpy.mean(
c_array[0]), ' jacobian norm ', numpy.mean(numpy.sqrt(c_array[1]))

end_time = time.clock()
end_time = timeit.default_timer()

training_time = (end_time - start_time)

Expand Down
6 changes: 3 additions & 3 deletions code/convolutional_mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
"""
import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -274,7 +274,7 @@ def evaluate_lenet5(learning_rate=0.1, n_epochs=200,
best_validation_loss = numpy.inf
best_iter = 0
test_score = 0.
start_time = time.clock()
start_time = timeit.default_timer()

epoch = 0
done_looping = False
Expand Down Expand Up @@ -326,7 +326,7 @@ def evaluate_lenet5(learning_rate=0.1, n_epochs=200,
done_looping = True
break

end_time = time.clock()
end_time = timeit.default_timer()
print('Optimization complete.')
print('Best validation score of %f %% obtained at iteration %i, '
'with test performance %f %%' %
Expand Down
10 changes: 5 additions & 5 deletions code/dA.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -321,7 +321,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,
}
)

start_time = time.clock()
start_time = timeit.default_timer()

############
# TRAINING #
Expand All @@ -336,7 +336,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,

print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

end_time = time.clock()
end_time = timeit.default_timer()

training_time = (end_time - start_time)

Expand Down Expand Up @@ -379,7 +379,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,
}
)

start_time = time.clock()
start_time = timeit.default_timer()

############
# TRAINING #
Expand All @@ -394,7 +394,7 @@ def test_dA(learning_rate=0.1, training_epochs=15,

print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

end_time = time.clock()
end_time = timeit.default_timer()

training_time = (end_time - start_time)

Expand Down
6 changes: 3 additions & 3 deletions code/logistic_cg.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -275,7 +275,7 @@ def callback(theta_value):
# using scipy conjugate gradient optimizer
import scipy.optimize
print ("Optimizing using scipy.optimize.fmin_cg...")
start_time = time.clock()
start_time = timeit.default_timer()
best_w_b = scipy.optimize.fmin_cg(
f=train_fn,
x0=numpy.zeros((n_in + 1) * n_out, dtype=x.dtype),
Expand All @@ -284,7 +284,7 @@ def callback(theta_value):
disp=0,
maxiter=n_epochs
)
end_time = time.clock()
end_time = timeit.default_timer()
print(
(
'Optimization complete with best validation score of %f %%, with '
Expand Down
6 changes: 3 additions & 3 deletions code/logistic_sgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
import gzip
import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -360,7 +360,7 @@ def sgd_optimization_mnist(learning_rate=0.13, n_epochs=1000,

best_validation_loss = numpy.inf
test_score = 0.
start_time = time.clock()
start_time = timeit.default_timer()

done_looping = False
epoch = 0
Expand Down Expand Up @@ -419,7 +419,7 @@ def sgd_optimization_mnist(learning_rate=0.13, n_epochs=1000,
done_looping = True
break

end_time = time.clock()
end_time = timeit.default_timer()
print(
(
'Optimization complete with best validation score of %f %%,'
Expand Down
4 changes: 2 additions & 2 deletions code/lstm.py
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ def train_lstm(

uidx = 0 # the number of update done
estop = False # early stop
start_time = time.clock()
start_time = time.time()
try:
for eidx in xrange(max_epochs):
n_samples = 0
Expand Down Expand Up @@ -622,7 +622,7 @@ def train_lstm(
except KeyboardInterrupt:
print "Training interupted"

end_time = time.clock()
end_time = time.time()
if best_p is not None:
zipp(best_p, tparams)
else:
Expand Down
6 changes: 3 additions & 3 deletions code/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import os
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -336,7 +336,7 @@ def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
best_validation_loss = numpy.inf
best_iter = 0
test_score = 0.
start_time = time.clock()
start_time = timeit.default_timer()

epoch = 0
done_looping = False
Expand Down Expand Up @@ -391,7 +391,7 @@ def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000,
done_looping = True
break

end_time = time.clock()
end_time = timeit.default_timer()
print(('Optimization complete. Best validation score of %f %% '
'obtained at iteration %i, with test performance %f %%') %
(best_validation_loss * 100., best_iter + 1, test_score * 100.))
Expand Down
10 changes: 5 additions & 5 deletions code/rbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
contain hidden variables. Restricted Boltzmann Machines further restrict BMs
to those without visible-visible and hidden-hidden connections.
"""
import time
import timeit

try:
import PIL.Image as Image
Expand Down Expand Up @@ -428,7 +428,7 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
)

plotting_time = 0.
start_time = time.clock()
start_time = timeit.default_timer()

# go through training epochs
for epoch in xrange(training_epochs):
Expand All @@ -441,7 +441,7 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
print 'Training epoch %d, cost is ' % epoch, numpy.mean(mean_cost)

# Plot filters after each training epoch
plotting_start = time.clock()
plotting_start = timeit.default_timer()
# Construct image from the weight matrix
image = Image.fromarray(
tile_raster_images(
Expand All @@ -452,10 +452,10 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
)
)
image.save('filters_at_epoch_%i.png' % epoch)
plotting_stop = time.clock()
plotting_stop = timeit.default_timer()
plotting_time += (plotting_stop - plotting_start)

end_time = time.clock()
end_time = timeit.default_timer()

pretraining_time = (end_time - start_time) - plotting_time

Expand Down
6 changes: 3 additions & 3 deletions code/rnnslu.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import stat
import subprocess
import sys
import time
import timeit

import numpy

Expand Down Expand Up @@ -318,13 +318,13 @@ def main(param=None):
shuffle([train_lex, train_ne, train_y], param['seed'])

param['ce'] = e
tic = time.time()
tic = timeit.default_timer()

for i, (x, y) in enumerate(zip(train_lex, train_y)):
rnn.train(x, y, param['win'], param['clr'])
print '[learning] epoch %i >> %2.2f%%' % (
e, (i + 1) * 100. / nsentences),
print 'completed in %.2f (sec) <<\r' % (time.time() - tic),
print 'completed in %.2f (sec) <<\r' % (timeit.default_timer() - tic),
sys.stdout.flush()

# evaluation // back into the real world : idx -> words
Expand Down