首先,我们需要选定一个在 ImageNet 上预训练的卷积神经网络:VGG16、Inception、ResNet50 等等都可以。实践证明,Inception 可以生成的比较好,所以这里我们使用 Keras 内置的 Inception V3 模型。
1 2 3 4 5 6
# 不使用及时执行模式 # See https://github.com/tensorflow/tensorflow/issues/33135
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
1 2 3 4 5 6 7 8
# 加载预训练的 Inception V3 模型
from tensorflow.keras.applications import inception_v3 from tensorflow.keras import backend as K
K.set_learning_phase(0)
model = inception_v3.InceptionV3(weights='imagenet', include_top=False)
WARNING:tensorflow:From <ipython-input-3-aa9be14f7cc2>:6: set_learning_phase (from tensorflow.python.keras.backend) is deprecated and will be removed after 2020-10-11.
Instructions for updating:
Simply pass a True/False value to the `training` argument of the `__call__` method of your layer or model.
defgradient_ascent(x, iterations, step, max_loss=None): for i inrange(iterations): loss_value, grad_values = eval_loss_and_grads(x) if max_loss isnotNoneand loss_value > max_loss: break print(f' loss value at {i}: {loss_value}') x += step * grad_values return x
defdeprocess_image(x): ''' 将一个张量转换为有效图像 ''' if K.image_data_format() == 'channels_first': x = x.reshape((3, x.shape[2], x.shape[3])) x = x.transpose((1, 2, 0)) else: x = x.reshape((x.shape[1], x.shape[2], 3)) x /= 2. x += 0.5 x *= 255. x = np.clip(x, 0, 255).astype('uint8') return x
original_shape = img.shape[1:3] successive_shapes = [original_shape] for i inrange(1, num_octave): shape = tuple([dim // (octave_scale ** i) for dim in original_shape]) successive_shapes.append(shape) successive_shapes = successive_shapes[::-1]
Processing image shape (510.0, 510.0)
loss value at 0: 0.7559041976928711
loss value at 1: 0.8942177295684814
...
loss value at 18: 6.890047073364258
loss value at 19: 7.1789984703063965
Processing image shape (714.0, 714.0)
loss value at 0: 1.6531521081924438
loss value at 1: 2.5666146278381348
...
loss value at 18: 9.60411548614502
loss value at 19: 9.890054702758789
Processing image shape (1000, 1000)
loss value at 0: 1.6969424486160278
loss value at 1: 2.754427909851074
...
loss value at 17: 9.593267440795898
loss value at 18: 9.928627014160156