def read_net(filename):
net = caffe_pb2.NetParameter()
with open(filename) as f:
protobuf.text_format.Parse(f.read(), net)
return net
python类NetParameter()的实例源码
def generate_nbn_caffemodel(input_prototxt, input_caffemodel, output_prototxt, output_caffemodel, eps = 0.00001):
input_network = caffe.Net(input_prototxt, input_caffemodel, caffe.TEST)
f = open(input_caffemodel, 'rb')
tmp_model = caffe_pb2.NetParameter()
tmp_model.ParseFromString(f.read())
f.close()
layers = tmp_model.layer
output_network = caffe.Net(output_prototxt, caffe.TEST)
for i in range(len(layers)):
if layers[i].type == "Input" or layers[i].type == "Eltwise" or layers[i].type == "Scale" or layers[i].type == "BatchNorm" or layers[i].type == "ImageData" or layers[i].type == "ReLU" or layers[i].type == "Pooling" or layers[i].type == "Split" or layers[i].type == "Concat" or layers[i].type == "Flatten" or layers[i].type == "SoftmaxWithLoss":
continue
elif layers[i].type == "Convolution":
if not (layers[i+2].type == "Scale" and layers[i+1].type == "BatchNorm"):
continue
bn_conv = layers[i+1].name
scale_conv = layers[i+2].name
conv_w = input_network.params[layers[i].name][0].data[...]
print layers[i].name, layers[i+1].name, layers[i+2].name, layers[i+2].scale_param.bias_term, layers[i].convolution_param.bias_term, conv_w.shape
if layers[i].convolution_param.bias_term:
# original conv
conv_b = input_network.params[layers[i].name][1].data[...]
else:
conv_b = np.zeros((conv_w.shape[0],), dtype=np.uint8)
# original batchnormal
scale = input_network.params[bn_conv][2].data[...]
mean = input_network.params[bn_conv][0].data[...]
var = input_network.params[bn_conv][1].data[...]
# original scale
scale_w = input_network.params[scale_conv][0].data[...]
scale_b = input_network.params[scale_conv][1].data[...]
#print "scale_w:", scale_w
# calculate
var = np.sqrt(var/scale+eps)
conv_b = conv_b-mean/scale
conv_b = conv_b/var
var = scale_w/var
conv_b = scale_w*conv_b
conv_b = conv_b + scale_b
for j in range(len(var)):
output_network.params[layers[i].name][0].data[j] = var[j]*conv_w[j]
output_network.params[layers[i].name][1].data[...] = conv_b
else:
output_network.params[layers[i].name][0].data[...] = input_network.params[layers[i].name][0].data[...]
output_network.params[layers[i].name][1].data[...] = input_network.params[layers[i].name][1].data[...]
output_network.save(output_caffemodel)
def bn_absorber_weights(model, weights):
# load the prototxt file as a protobuf message
with open(model) as f:
str2 = f.read()
msg = caffe_pb2.NetParameter()
text_format.Merge(str2, msg)
# load net
net = caffe.Net(model, weights, caffe.TEST)
# iterate over all layers of the network
for i, layer in enumerate(msg.layer):
if not layer.type == 'BN':
continue
# check the special case that the bn layer comes right after concat layer
if msg.layer[i].name == 'bn0_1':
continue
if msg.layer[i - 1].type == 'Deconvolution': # do not merge into deconv layer
continue
bn_layer = msg.layer[i].name
conv_layer = msg.layer[i - 1].name
# get some necessary sizes
kernel_size = 1
shape_of_kernel_blob = net.params[conv_layer][0].data.shape
number_of_feature_maps = list(shape_of_kernel_blob[0:1])
shape_of_kernel_blob = list(shape_of_kernel_blob[1:4])
for x in shape_of_kernel_blob:
kernel_size *= x
weight = copy_double(net.params[conv_layer][0].data)
bias = copy_double(net.params[conv_layer][1].data)
# receive new_gamma and new_beta which was already calculated by the compute_bn_statistics.py script
new_gamma = net.params[bn_layer][0].data[...]
new_beta = net.params[bn_layer][1].data[...]
# manipulate the weights and biases over all feature maps:
# weight_new = weight * gamma_new
# bias_new = bias * gamma_new + beta_new
# for more information see https://github.com/alexgkendall/caffe-segnet/issues/109
for j in xrange(number_of_feature_maps[0]):
net.params[conv_layer][0].data[j] = weight[j] * np.repeat(new_gamma.item(j), kernel_size).reshape(
net.params[conv_layer][0].data[j].shape)
net.params[conv_layer][1].data[j] = bias[j] * new_gamma.item(j) + new_beta.item(j)
# set the no longer needed bn params to zero
net.params[bn_layer][0].data[:] = 0
net.params[bn_layer][1].data[:] = 0
return net
def convertBN(inmodel, outmodel):
""" subsume all the BN layers inside inmode to normal layers in the out model """
# load files
print 'Loading caffemodel: {}'.format(inmodel)
with open(inmodel, 'rb') as f:
binary_content = f.read()
protobuf = caffe_pb2.NetParameter()
protobuf.ParseFromString(binary_content)
layers = protobuf.layer
_eps = 1e-5
for layer in layers:
if layer.type == 'BatchNorm':
# the layer to be modified.
layer_c = [l for l in layers if l.name == layer.name[3:]][0]
# the parameters fo the computational layer
w = np.reshape(np.array(layer_c.blobs[0].data), layer_c.blobs[0].shape.dim)
b = np.reshape(np.array(layer_c.blobs[1].data), layer_c.blobs[1].shape.dim)
# load the BN parameters
factor = 0 if np.array(layer.blobs[2].data) == 0 else 1./np.array(layer.blobs[2].data)
mean = np.array(layer.blobs[0].data) * factor
var = np.array(layer.blobs[1].data) * factor
# display information
print 'Modifying layer {} based on information from {}'.format(layer_c.name, layer.name)
# update weights
if len(w.shape) == 4:
w /= (_eps + np.sqrt(var)[:, np.newaxis, np.newaxis, np.newaxis])
elif len(w.shape) == 2:
w /= (_eps + np.sqrt(var)[:, np.newaxis])
# update bias
b -= mean
b /= (_eps + np.sqrt(var))
# save the changes back to the model
del layer_c.blobs[0].data[:]
del layer_c.blobs[1].data[:]
layer_c.blobs[0].data.extend(w.flatten().tolist())
layer_c.blobs[1].data.extend(b.flatten().tolist())
# save the model to out model
new_binary_content = protobuf.SerializeToString()
print 'Saving caffemodel: {}'.format(outmodel)
with open(outmodel, 'wb') as f:
f.write(new_binary_content)
def get_transformer(deploy_file, mean_file=None):
"""
Returns an instance of caffe.io.Transformer
Arguments:
deploy_file -- path to a .prototxt file
Keyword arguments:
mean_file -- path to a .binaryproto file (optional)
"""
network = caffe_pb2.NetParameter()
with open(deploy_file) as infile:
text_format.Merge(infile.read(), network)
if network.input_shape:
dims = network.input_shape[0].dim
else:
dims = network.input_dim[:4]
#dims = network.input_dim
t = caffe.io.Transformer(
inputs = {'data': dims}
)
t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
# color images
if dims[1] == 3:
# channel swap
t.set_channel_swap('data', (2,1,0))
if mean_file:
# set mean pixel
with open(mean_file) as infile:
blob = caffe_pb2.BlobProto()
blob.MergeFromString(infile.read())
if blob.HasField('shape'):
blob_dims = blob.shape
assert len(blob_dims) == 4, 'Shape should have 4 dimensions - shape is "%s"' % blob.shape
elif blob.HasField('num') and blob.HasField('channels') and \
blob.HasField('height') and blob.HasField('width'):
blob_dims = (blob.num, blob.channels, blob.height, blob.width)
else:
raise ValueError('blob does not provide shape or 4d dimensions')
pixel = np.reshape(blob.data, blob_dims[1:]).mean(1).mean(1)
t.set_mean('data', pixel)
return t
# Load image to caffe
def get_transformer(deploy_file, mean_file=None):
"""
Returns an instance of caffe.io.Transformer
Arguments:
deploy_file -- path to a .prototxt file
Keyword arguments:
mean_file -- path to a .binaryproto file (optional)
"""
network = caffe_pb2.NetParameter()
with open(deploy_file) as infile:
text_format.Merge(infile.read(), network)
if network.input_shape:
dims = network.input_shape[0].dim
else:
dims = network.input_dim[:4]
t = caffe.io.Transformer(
inputs = {'data': dims}
)
t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
# color images
if dims[1] == 3:
# channel swap
t.set_channel_swap('data', (2,1,0))
if mean_file:
# set mean pixel
with open(mean_file,'rb') as infile:
blob = caffe_pb2.BlobProto()
blob.MergeFromString(infile.read())
if blob.HasField('shape'):
blob_dims = blob.shape
assert len(blob_dims) == 4, 'Shape should have 4 dimensions - shape is "%s"' % blob.shape
elif blob.HasField('num') and blob.HasField('channels') and \
blob.HasField('height') and blob.HasField('width'):
blob_dims = (blob.num, blob.channels, blob.height, blob.width)
else:
raise ValueError('blob does not provide shape or 4d dimensions')
pixel = np.reshape(blob.data, blob_dims[1:]).mean(1).mean(1)
t.set_mean('data', pixel)
return t
classifier.py 文件源码
项目:Barebones-Flask-and-Caffe-Classifier
作者: alex-paterson
项目源码
文件源码
阅读 27
收藏 0
点赞 0
评论 0
def get_transformer(deploy_file, mean_file=None):
"""
Returns an instance of caffe.io.Transformer
Arguments:
deploy_file -- path to a .prototxt file
Keyword arguments:
mean_file -- path to a .binaryproto file (optional)
"""
network = caffe_pb2.NetParameter()
with open(deploy_file) as infile:
text_format.Merge(infile.read(), network)
if network.input_shape:
dims = network.input_shape[0].dim
else:
dims = network.input_dim[:4]
t = caffe.io.Transformer(
inputs = {'data': dims}
)
t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
# color images
if dims[1] == 3:
# channel swap
t.set_channel_swap('data', (2,1,0))
if mean_file:
# set mean pixel
with open(mean_file,'rb') as infile:
blob = caffe_pb2.BlobProto()
blob.MergeFromString(infile.read())
if blob.HasField('shape'):
blob_dims = blob.shape
assert len(blob_dims) == 4, 'Shape should have 4 dimensions - shape is "%s"' % blob.shape
elif blob.HasField('num') and blob.HasField('channels') and \
blob.HasField('height') and blob.HasField('width'):
blob_dims = (blob.num, blob.channels, blob.height, blob.width)
else:
raise ValueError('blob does not provide shape or 4d dimensions')
pixel = np.reshape(blob.data, blob_dims[1:]).mean(1).mean(1)
t.set_mean('data', pixel)
return t
def bn_absorber_weights(model, weights):
# load the prototxt file as a protobuf message
with open(model) as f:
str2 = f.read()
msg = caffe_pb2.NetParameter()
text_format.Merge(str2, msg)
# load net
net = caffe.Net(model, weights, caffe.TEST)
# iterate over all layers of the network
for i, layer in enumerate(msg.layer):
if not layer.type == 'BN':
continue
# check the special case that the bn layer comes right after concat layer
if msg.layer[i].name == 'bn0_1':
continue
if msg.layer[i - 1].type == 'Deconvolution': # do not merge into deconv layer
continue
bn_layer = msg.layer[i].name
conv_layer = msg.layer[i - 1].name
# get some necessary sizes
kernel_size = 1
shape_of_kernel_blob = net.params[conv_layer][0].data.shape
number_of_feature_maps = list(shape_of_kernel_blob[0:1])
shape_of_kernel_blob = list(shape_of_kernel_blob[1:4])
for x in shape_of_kernel_blob:
kernel_size *= x
weight = copy_double(net.params[conv_layer][0].data)
bias = copy_double(net.params[conv_layer][1].data)
# receive new_gamma and new_beta which was already calculated by the compute_bn_statistics.py script
new_gamma = net.params[bn_layer][0].data[...]
new_beta = net.params[bn_layer][1].data[...]
# manipulate the weights and biases over all feature maps:
# weight_new = weight * gamma_new
# bias_new = bias * gamma_new + beta_new
# for more information see https://github.com/alexgkendall/caffe-segnet/issues/109
for j in xrange(number_of_feature_maps[0]):
net.params[conv_layer][0].data[j] = weight[j] * np.repeat(new_gamma.item(j), kernel_size).reshape(
net.params[conv_layer][0].data[j].shape)
net.params[conv_layer][1].data[j] = bias[j] * new_gamma.item(j) + new_beta.item(j)
# set the no longer needed bn params to zero
net.params[bn_layer][0].data[:] = 0
net.params[bn_layer][1].data[:] = 0
return net
def bn_absorber_weights(model, weights):
# load the prototxt file as a protobuf message
with open(model) as f:
str2 = f.read()
msg = caffe_pb2.NetParameter()
text_format.Merge(str2, msg)
# load net
net = caffe.Net(model, weights, caffe.TEST)
# iterate over all layers of the network
for i, layer in enumerate(msg.layer):
# check if conv layer exist right before bn layer, otherwise merging is not possible and skip
if not layer.type == 'BN':
continue
if not msg.layer[i-1].type == 'Convolution':
continue
# get the name of the bn and conv layer
bn_layer = msg.layer[i].name
conv_layer = msg.layer[i-1].name
# get some necessary sizes
kernel_size = 1
shape_of_kernel_blob = net.params[conv_layer][0].data.shape
number_of_feature_maps = list(shape_of_kernel_blob[0:1])
shape_of_kernel_blob = list(shape_of_kernel_blob[1:4])
for x in shape_of_kernel_blob:
kernel_size *= x
weight = copy_double(net.params[conv_layer][0].data)
bias = copy_double(net.params[conv_layer][1].data)
# receive new_gamma and new_beta which was already calculated by the compute_bn_statistics.py script
new_gamma = net.params[bn_layer][0].data[...]
new_beta = net.params[bn_layer][1].data[...]
# manipulate the weights and biases over all feature maps:
# weight_new = weight * gamma_new
# bias_new = bias * gamma_new + beta_new
# for more information see https://github.com/alexgkendall/caffe-segnet/issues/109
for j in xrange(number_of_feature_maps[0]):
net.params[conv_layer][0].data[j] = weight[j] * np.repeat(new_gamma.item(j), kernel_size).reshape(
net.params[conv_layer][0].data[j].shape)
net.params[conv_layer][1].data[j] = bias[j] * new_gamma.item(j) + new_beta.item(j)
# set the no longer needed bn params to zero
net.params[bn_layer][0].data[:] = 0
net.params[bn_layer][1].data[:] = 0
return net
def get_transformer(deploy_file, mean_file=None):
"""
Returns an instance of caffe.io.Transformer
Arguments:
deploy_file -- path to a .prototxt file
Keyword arguments:
mean_file -- path to a .binaryproto file (optional)
"""
network = caffe_pb2.NetParameter()
with open(deploy_file) as infile:
text_format.Merge(infile.read(), network)
if network.input_shape:
dims = network.input_shape[0].dim
else:
dims = network.input_dim[:4]
t = caffe.io.Transformer(
inputs = {'data': dims}
)
t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
# color images
if dims[1] == 3:
# channel swap
t.set_channel_swap('data', (2,1,0))
if mean_file:
# set mean pixel
with open(mean_file,'rb') as infile:
blob = caffe_pb2.BlobProto()
blob.MergeFromString(infile.read())
if blob.HasField('shape'):
blob_dims = blob.shape
assert len(blob_dims) == 4, 'Shape should have 4 dimensions - shape is "%s"' % blob.shape
elif blob.HasField('num') and blob.HasField('channels') and \
blob.HasField('height') and blob.HasField('width'):
blob_dims = (blob.num, blob.channels, blob.height, blob.width)
else:
raise ValueError('blob does not provide shape or 4d dimensions')
pixel = np.reshape(blob.data, blob_dims[1:]).mean(1).mean(1)
t.set_mean('data', pixel)
return t
def get_transformer(deploy_file, mean_file=None):
"""
Returns an instance of caffe.io.Transformer
Arguments:
deploy_file -- path to a .prototxt file
Keyword arguments:
mean_file -- path to a .binaryproto file (optional)
"""
network = caffe_pb2.NetParameter()
with open(deploy_file) as infile:
text_format.Merge(infile.read(), network)
if network.input_shape:
dims = network.input_shape[0].dim
else:
dims = network.input_dim[:4]
t = caffe.io.Transformer(
inputs = {'data': dims}
)
t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
# color images
if dims[1] == 3:
# channel swap
t.set_channel_swap('data', (2,1,0))
if mean_file:
# set mean pixel
with open(mean_file,'rb') as infile:
blob = caffe_pb2.BlobProto()
blob.MergeFromString(infile.read())
if blob.HasField('shape'):
blob_dims = blob.shape
assert len(blob_dims) == 4, 'Shape should have 4 dimensions - shape is "%s"' % blob.shape
elif blob.HasField('num') and blob.HasField('channels') and \
blob.HasField('height') and blob.HasField('width'):
blob_dims = (blob.num, blob.channels, blob.height, blob.width)
else:
raise ValueError('blob does not provide shape or 4d dimensions')
pixel = np.reshape(blob.data, blob_dims[1:]).mean(1).mean(1)
t.set_mean('data', pixel)
return t