def __init__(self):
caffe.set_mode_gpu()
#caffe.set_device(0)
model_path = '../models/bvlc_googlenet/' # substitute your path here
net_fn = model_path + 'deploy.prototxt'
param_fn = model_path + 'bvlc_googlenet.caffemodel'
model = caffe.io.caffe_pb2.NetParameter()
text_format.Merge(open(net_fn).read(), model)
model.force_backward = True #backward to input layer
open('tmp.prototxt', 'w').write(str(model))
self.net = caffe.Classifier('tmp.prototxt', param_fn,
mean = np.float32([104.0, 116.0, 122.0]),
channel_swap = (2,1,0))
# for the mode guide, if flag = 1
self.flag = 0
self.epoch = 20
self.end = 'inception_4c/output'
#self.end = 'conv4'
python类Merge()的实例源码
def __init__(self, solver_prototxt, pretrained_model=None):
"""Initialize the SolverWrapper."""
self.solver = caffe.SGDSolver(solver_prototxt)
if pretrained_model is not None:
print ('Loading pretrained model '
'weights from {:s}').format(pretrained_model)
self.solver.net.copy_from(pretrained_model)
self.solver_param = caffe.io.caffe_pb2.SolverParameter()
with open(solver_prototxt, 'rt') as f:
text_format.Merge(f.read(), self.solver_param)
if self.solver_param.solver_mode == 1:
caffe.set_mode_gpu()
caffe.set_device(params.gpu_id)
print 'Use GPU', params.gpu_id, 'to train'
else:
print 'Use CPU to train'
#initial python data layer
self.solver.net.layers[0].set_db()
def __init__(self):
caffe.set_mode_gpu()
#caffe.set_device(0)
model_path = '../models/bvlc_googlenet/' # substitute your path here
net_fn = model_path + 'deploy.prototxt'
param_fn = model_path + 'bvlc_googlenet.caffemodel'
model = caffe.io.caffe_pb2.NetParameter()
text_format.Merge(open(net_fn).read(), model)
model.force_backward = True #backward to input layer
open('tmp.prototxt', 'w').write(str(model))
self.net = caffe.Classifier('tmp.prototxt', param_fn,
mean = np.float32([104.0, 116.0, 122.0]),
channel_swap = (2,1,0))
# for the mode guide, if flag = 1
self.flag = 0
self.epoch = 20
self.end = 'inception_4c/output'
#self.end = 'conv4'
def parse_proto(text, message_name):
if message_name in protos:
p = protos[message_name]()
text_format.Merge(text, p)
return p
import pdb; pdb.set_trace()
raise RuntimeError("message name is: '" + message_name + "'")
def create_proto_expression():
s= """arithmetic_operator { left { atom { field { component { name: "a" } component { name: "value" } } } } right { arithmetic_operator { left { atom { field { component { name: "xyz" } component { name: "value" } } } } right { atom { field { component { name: "b" } component { name: "value" } } } } operator: PLUS } } operator: MULTIPLY }"""
e = protocall_pb2.Expression()
text_format.Merge(s, e)
return e
def _load_layer_types(prototxt):
# Read prototxt with caffe protobuf definitions
layers = caffe_pb2.NetParameter()
with open(prototxt, 'r') as f:
text_format.Merge(str(f.read()), layers)
# Assign layer parameters to type dictionary
types = OrderedDict()
for i in range(len(layers.layer)):
types[layers.layer[i].name] = layers.layer[i].type
return types
def load(self):
'''Load the layer definitions from the prototxt.'''
self.params = get_caffe_resolver().NetParameter()
with open(self.def_path, 'rb') as def_file:
text_format.Merge(def_file.read(), self.params)
def parseProtoString(s):
from google.protobuf import text_format
proto_net = pb.NetParameter()
text_format.Merge(s, proto_net)
return proto_net
def read_proto_file(file_path, parser_object):
file = open(file_path, "r")
if not file:
raise Exception("ERROR (" + file_path + ")!")
text_format.Merge(str(file.read()), parser_object)
file.close()
return parser_object
def parse_test_proto(proto_type, proto_string):
instance = proto_type()
text_format.Merge(proto_string, instance)
return instance
def merge(self, parent, child):
'''Merge the child node into the parent.'''
raise NotImplementedError('Must be implemented by subclass')
def __init__(self, def_path, data_path, target_toolkit, input_shape=None, phase='test'):
self.layer_name_map = {}
self.data_injector = None
self.is_train_proto = False
self.input_shape = input_shape
if def_path is None:
if self.input_shape is None:
raise ConversionError('if the graph prototxt is not provided, the input shape should be provided')
self.input_shape = [1] + self.input_shape
def_path, self.data_injector = self.gen_prototxt_from_caffemodel(data_path, self.input_shape)
self.is_train_proto = True
else:
model = get_caffe_resolver().NetParameter()
with open(def_path, 'r') as f:
text_format.Merge(f.read(), model)
layers = model.layers or model.layer
if len([layer for layer in layers if NodeKind.map_raw_kind(layer.type) in LAYER_IN_TRAIN_PROTO]) > 0:
if self.input_shape is None:
raise ConversionError('the train_val.prototxt should be provided with the input shape')
self.input_shape = [1] + self.input_shape
self.is_train_proto = True
graph = GraphBuilder(def_path, self.input_shape, self.is_train_proto, phase).build()
if self.is_train_proto:
def_path = graph.prototxt
if data_path is not None:
graph = graph.transformed([
self.data_injector if self.data_injector else DataInjector(def_path, data_path), # Load and associate learned parameters
BatchNormScaleBiasFuser(),
BatchNormPreprocessor() # Pre-process batch normalization data
])
target_toolkit = target_toolkit.lower()
if target_toolkit not in ('caffe', 'caffe2'):
graph = graph.transformed([DataReshaper({ # Reshape the parameters to TensorFlow's ordering
NodeKind.Convolution: (2, 3, 1, 0), # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
NodeKind.InnerProduct: (1, 0) # (c_o, c_i) -> (c_i, c_o)
}),
ParameterNamer() # Convert parameters to dictionaries
])
self.graph = graph
# self.graph = NodeRenamer()(graph)
print_stderr(self.graph)
def load(self):
self.model = get_caffe_resolver().NetParameter()
with open(self.model_path, 'r') as f:
text_format.Merge(f.read(), self.model)
if self.is_train_proto:
self.process_train_proto()
def test_example_proto_coder_default_value(self):
input_schema = dataset_schema.from_feature_spec({
'scalar_feature_3':
tf.FixedLenFeature(shape=[], dtype=tf.float32, default_value=1.0),
'scalar_feature_4':
tf.FixedLenFeature(shape=[], dtype=tf.float32, default_value=0.0),
'1d_vector_feature':
tf.FixedLenFeature(
shape=[1], dtype=tf.float32, default_value=[2.0]),
'2d_vector_feature':
tf.FixedLenFeature(
shape=[2, 2],
dtype=tf.float32,
default_value=[[1.0, 2.0], [3.0, 4.0]]),
})
coder = example_proto_coder.ExampleProtoCoder(input_schema)
# Python types.
example_proto_text = """
features {
}
"""
example = tf.train.Example()
text_format.Merge(example_proto_text, example)
data = example.SerializeToString()
# Assert the data is decoded into the expected format.
expected_decoded = {
'scalar_feature_3': 1.0,
'scalar_feature_4': 0.0,
'1d_vector_feature': [2.0],
'2d_vector_feature': [[1.0, 2.0], [3.0, 4.0]],
}
decoded = coder.decode(data)
np.testing.assert_equal(expected_decoded, decoded)
def block_from_ascii(cls, text):
"""Returns Block protobuf parsed from ASCII text."""
block = block_pb2.Block()
text_format.Merge(text, block)
return block
def block_from_ascii(cls, text):
"""Returns Block protobuf parsed from ASCII text."""
block = block_pb2.Block()
text_format.Merge(text, block)
return block
def block_from_ascii(cls, text):
"""Returns Block protobuf parsed from ASCII text."""
block = block_pb2.Block()
text_format.Merge(text, block)
return block
def block_from_ascii(cls, text):
"""Returns Block protobuf parsed from ASCII text."""
block = block_pb2.Block()
text_format.Merge(text, block)
return block
def block_from_ascii(cls, text):
"""Returns Block protobuf parsed from ASCII text."""
block = block_pb2.Block()
text_format.Merge(text, block)
return block
def load(self):
'''Load the layer definitions from the prototxt.'''
self.params = get_caffe_resolver().NetParameter()
with open(self.def_path, 'rb') as def_file:
text_format.Merge(def_file.read(), self.params)