-
Notifications
You must be signed in to change notification settings - Fork 0
/
vgg16.py
100 lines (81 loc) · 3.85 KB
/
vgg16.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
#import _pickle as cpickle
import pickle as cpickle
import tensorflow as tf
import os
import tensorflow_utils as tf_utils
# noinsection PyPep8Naming
class VGG16(object):
def __init__(self, name='vgg16'):
self.name = name
self.reuse = False
# weight_file_path = '../../Models_zoo/caffe_layers_value.pickle'
weight_file_path = os.path.abspath("models_zoo/caffe_layers_value.pickle")
with open(weight_file_path, 'rb') as f:
self.pretrained_weights = cpickle.load(f, encoding='latin1')
def __call__(self, x, mode=1):
with tf.variable_scope(self.name, reuse=self.reuse):
x = tf.concat([x, x, x], axis=-1, name='concat')
tf_utils.print_activations(x)
# conv1
relu1_1 = self.conv_layer(x, 'conv1_1', trainable=False)
relu1_2 = self.conv_layer(relu1_1, 'conv1_2', trainable=False)
pool_1 = tf_utils.max_pool_2x2(relu1_2, name='max_pool_1')
tf_utils.print_activations(pool_1)
# conv2
relu2_1 = self.conv_layer(pool_1, 'conv2_1', trainable=False)
relu2_2 = self.conv_layer(relu2_1, 'conv2_2', trainable=False)
pool_2 = tf_utils.max_pool_2x2(relu2_2, name='max_pool_2')
tf_utils.print_activations(pool_2)
# conv3
relu3_1 = self.conv_layer(pool_2, 'conv3_1', trainable=False)
relu3_2 = self.conv_layer(relu3_1, 'conv3_2', trainable=False)
relu3_3 = self.conv_layer(relu3_2, 'conv3_3', trainable=False)
pool_3 = tf_utils.max_pool_2x2(relu3_3, name='max_pool_3')
tf_utils.print_activations(pool_3)
# conv4
relu4_1 = self.conv_layer(pool_3, 'conv4_1', trainable=False)
relu4_2 = self.conv_layer(relu4_1, 'conv4_2', trainable=False)
relu4_3 = self.conv_layer(relu4_2, 'conv4_3', trainable=False)
pool_4 = tf_utils.max_pool_2x2(relu4_3, name='max_pool_4')
tf_utils.print_activations(pool_4)
# conv5
relu5_1 = self.conv_layer(pool_4, 'conv5_1', trainable=False)
relu5_2 = self.conv_layer(relu5_1, 'conv5_2', trainable=False)
relu5_3 = self.conv_layer(relu5_2, 'conv5_3', trainable=False)
# set reuse=True for next call
self.reuse = True
if mode == 1:
outputs = [relu1_2]
elif mode == 2:
outputs = [relu1_2, relu2_2]
elif mode == 3:
outputs = [relu1_2, relu2_2, relu3_3]
elif mode == 4:
outputs = [relu1_2, relu2_2, relu3_3, relu4_3]
elif mode == 5:
outputs = [relu1_2, relu2_2, relu3_3, relu4_3, relu5_3]
else:
raise NotImplementedError
return outputs
def conv_layer(self, bottom, name, trainable=False):
with tf.variable_scope(name):
w = self.get_conv_weight(name)
b = self.get_bias(name)
conv_weights = tf.get_variable("W", shape=w.shape, initializer=tf.constant_initializer(w),
trainable=trainable)
conv_biases = tf.get_variable("b", shape=b.shape, initializer=tf.constant_initializer(b),
trainable=trainable)
conv = tf.nn.conv2d(bottom, conv_weights, [1, 1, 1, 1], padding='SAME')
bias = tf.nn.bias_add(conv, conv_biases)
relu = tf.nn.relu(bias)
tf_utils.print_activations(relu)
return relu
def get_conv_weight(self, name):
f = self.get_weight(name)
return f.transpose((2, 3, 1, 0))
def get_weight(self, layer_name):
layer = self.pretrained_weights[layer_name]
return layer[0]
def get_bias(self, layer_name):
layer = self.pretrained_weights[layer_name]
return layer[1]