Skip to content

Commit ddf9998

Browse files
author
whyboris
committed
deep dream works!
1 parent 9c9cbab commit ddf9998

File tree

1 file changed

+146
-0
lines changed

1 file changed

+146
-0
lines changed

deepdream.py

Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
print('let us deep ream!')
2+
3+
from hack import hack
4+
hack()
5+
6+
# aux functions
7+
8+
import scipy
9+
from keras.preprocessing import image
10+
11+
12+
def resize_img(img, size):
13+
img = np.copy(img)
14+
factors = (1,
15+
float(size[0]) / img.shape[1],
16+
float(size[1]) / img.shape[2],
17+
1)
18+
return scipy.ndimage.zoom(img, factors, order=1)
19+
20+
21+
def save_img(img, fname):
22+
pil_img = deprocess_image(np.copy(img))
23+
scipy.misc.imsave(fname, pil_img)
24+
25+
26+
def preprocess_image(image_path):
27+
img = image.load_img(image_path)
28+
img = image.img_to_array(img)
29+
img = np.expand_dims(img, axis=0)
30+
img = inception_v3.preprocess_input(img)
31+
return img
32+
33+
34+
def deprocess_image(x):
35+
if K.image_data_format() == 'channels_first':
36+
x = x.reshape((3, x.shape[2], x.shape[3]))
37+
x = x.transpose((1, 2, 0))
38+
else:
39+
x = x.reshape((x.shape[1], x.shape[2], 3))
40+
x /= 2.
41+
x += 0.5
42+
x *= 255.
43+
x = np.clip(x, 0, 255).astype('uint8')
44+
return x
45+
46+
# the main code
47+
48+
from keras.applications import inception_v3
49+
from keras import backend as K
50+
51+
K.set_learning_phase(0)
52+
53+
model = inception_v3.InceptionV3(weights='imagenet', include_top=False)
54+
55+
# model.summary()
56+
57+
layer_contributions = {
58+
'mixed2': 0.2,
59+
'mixed3': 3.,
60+
'mixed4': 2.,
61+
'mixed5': 1.5,
62+
}
63+
64+
layer_dict = dict([(layer.name, layer) for layer in model.layers])
65+
66+
loss = K.variable(0.)
67+
68+
for layer_name in layer_contributions:
69+
coeff = layer_contributions[layer_name]
70+
activation = layer_dict[layer_name].output
71+
72+
scaling = K.prod(K.cast(K.shape(activation), 'float32'))
73+
loss += coeff * K.sum(K.square(activation[:, 2: -2, 2: -2, :])) / scaling
74+
75+
dream = model.input
76+
77+
grads = K.gradients(loss, dream)[0]
78+
79+
grads /= K.maximum(K.mean(K.abs(grads)), 1e-7)
80+
81+
outputs = [loss, grads]
82+
83+
fetch_loss_and_grads = K.function([dream], outputs)
84+
85+
def eval_loss_and_grads(x):
86+
outs = fetch_loss_and_grads([x])
87+
loss_value = outs[0]
88+
grad_values = outs[1]
89+
return loss_value, grad_values
90+
91+
def gradient_ascent(x, iterations, step, max_loss=None):
92+
for i in range(iterations):
93+
loss_value, grad_values = eval_loss_and_grads(x)
94+
if max_loss is not None and loss_value > max_loss:
95+
break
96+
print('... loss value at ', i, ':', loss_value)
97+
x += step * grad_values
98+
return x
99+
100+
import numpy as np
101+
102+
step = 0.01
103+
num_octave = 3
104+
octave_scale = 1.4
105+
iterations = 20
106+
107+
max_loss = 10.
108+
109+
base_img_path = '../heatmap.jpg'
110+
111+
img = preprocess_image(base_img_path)
112+
113+
original_shape = img.shape[1:3]
114+
115+
successive_shapes = [original_shape]
116+
117+
for i in range(1, num_octave):
118+
shape = tuple([int(dim / (octave_scale ** i)) for dim in original_shape])
119+
successive_shapes.append(shape)
120+
121+
successive_shapes = successive_shapes[::-1]
122+
123+
original_image = np.copy(img)
124+
125+
shrunk_original_image = resize_img(img, successive_shapes[0])
126+
127+
for shape in successive_shapes:
128+
print('Processing shape: ', shape)
129+
img = resize_img(img, shape)
130+
img = gradient_ascent(img, iterations=iterations, step=step, max_loss=max_loss)
131+
132+
upscaled_shrunk_original_img = resize_img(shrunk_original_image, shape)
133+
134+
same_original_size = resize_img(shrunk_original_image, shape)
135+
136+
lost_detail = same_original_size - upscaled_shrunk_original_img
137+
138+
img += lost_detail
139+
140+
shrunk_original_image = resize_img(original_image, shape)
141+
142+
save_img(img, fname='dream_at_scale_' + str(shape) + '.png')
143+
144+
145+
save_img(img, fname='final_dream.png')
146+

0 commit comments

Comments
 (0)