def load_model(dirname):
model_filename = dirname + "/model.hdf5"
param_filename = dirname + "/params.json"
if os.path.isfile(param_filename):
print("loading {} ...".format(param_filename))
with open(param_filename, "r") as f:
try:
params = json.load(f)
except Exception as e:
raise Exception("could not load {}".format(param_filename))
model = seq2seq(**params)
if os.path.isfile(model_filename):
print("loading {} ...".format(model_filename))
serializers.load_hdf5(model_filename, model)
return model
else:
return None
python类load_hdf5()的实例源码
def load_model(dirname):
model_filename = dirname + "/model.hdf5"
param_filename = dirname + "/params.json"
if os.path.isfile(param_filename):
print("loading {} ...".format(param_filename))
with open(param_filename, "r") as f:
try:
params = json.load(f)
except Exception as e:
raise Exception("could not load {}".format(param_filename))
qrnn = RNNModel(**params)
if os.path.isfile(model_filename):
print("loading {} ...".format(model_filename))
serializers.load_hdf5(model_filename, qrnn)
return qrnn
else:
return None
def load(self):
filename = "fc_value.model"
if os.path.isfile(filename):
serializers.load_hdf5(filename, self.fc_value)
print "model fc_value loaded successfully."
filename = "fc_advantage.model"
if os.path.isfile(filename):
serializers.load_hdf5(filename, self.fc_advantage)
print "model fc_advantage loaded successfully."
filename = "fc_value.optimizer"
if os.path.isfile(filename):
serializers.load_hdf5(filename, self.optimizer_fc_value)
print "optimizer fc_value loaded successfully."
filename = "fc_advantage.optimizer"
if os.path.isfile(filename):
serializers.load_hdf5(filename, self.optimizer_fc_advantage)
print "optimizer fc_advantage loaded successfully."
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
embed_cache = {}
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit, embed_cache),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
embed_cache = {}
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit, embed_cache),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semi_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if USE_GPU:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semi_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
embed_cache = {}
parser.reset()
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit, embed_cache),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward_test(word_list, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semi_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if USE_GPU:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semi_vocab
)
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semi_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if USE_GPU:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semi_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semi_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semi_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = to_vram_words(convert_word_list(l.split(), word_vocab))
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')
def test(args):
trace('loading model ...')
word_vocab = Vocabulary.load(args.model + '.words')
phrase_vocab = Vocabulary.load(args.model + '.phrases')
semiterminal_vocab = Vocabulary.load(args.model + '.semiterminals')
parser = Parser.load_spec(args.model + '.spec')
if args.use_gpu:
parser.to_gpu()
serializers.load_hdf5(args.model + '.weights', parser)
trace('generating parse trees ...')
with open(args.source) as fp:
for l in fp:
word_list = convert_word_list(l.split(), word_vocab)
tree = combine_xbar(
restore_labels(
parser.forward(word_list, None, args.unary_limit),
phrase_vocab,
semiterminal_vocab))
print('( ' + tree_to_string(tree) + ' )')
trace('finished.')