def drop_absorber_weights(model, net):
# load the prototxt file as a protobuf message
with open(model) as f:
str2 = f.read()
msg = caffe_pb2.NetParameter()
text_format.Merge(str2, msg)
# iterate over all layers of the network
for i, layer in enumerate(msg.layer):
if not layer.type == 'Python':
continue
conv_layer = msg.layer[i - 2].name # conv layers are always two layers behind dropout
# get some necessary sizes
kernel_size = 1
shape_of_kernel_blob = net.params[conv_layer][0].data.shape
number_of_feature_maps = list(shape_of_kernel_blob[0:1])
shape_of_kernel_blob = list(shape_of_kernel_blob[1:4])
for x in shape_of_kernel_blob:
kernel_size *= x
weight = copy_double(net.params[conv_layer][0].data)
bias = copy_double(net.params[conv_layer][1].data)
# get p from dropout layer
python_param_str = eval(msg.layer[i].python_param.param_str)
p = float(python_param_str['p'])
scale = 1/(1-p)
# manipulate the weights and biases over all feature maps:
for j in xrange(number_of_feature_maps[0]):
net.params[conv_layer][0].data[j] = weight[j] * scale
net.params[conv_layer][1].data[j] = bias[j] * scale
return net
评论列表
文章目录