def build(nc, w, h,
loss='categorical_crossentropy',
optimizer='adam',
**kwargs):
data_shape = w * h if None not in (w, h) else -1 # TODO: -1 or None?
inp = Input(shape=(h, w, 3))
enet = encoder.build(inp)
enet = decoder.build(enet, nc=nc)
name = 'enet_naive_upsampling'
enet = Reshape((data_shape, nc))(enet) # TODO: need to remove data_shape for multi-scale training
enet = Activation('softmax')(enet)
model = Model(inputs=inp, outputs=enet)
model.compile(optimizer=optimizer, loss=loss, metrics=['accuracy', 'mean_squared_error'])
return model, name
python类Input()的实例源码
def build(nc, w, h,
loss='categorical_crossentropy',
# optimizer='adadelta'):
optimizer='adam',
metrics=None,
**kwargs):
data_shape = w * h if None not in (w, h) else -1 # TODO: -1 or None?
inp = Input(shape=(h, w, 3), name='image')
enet = encoder.build(inp)
enet = decoder.build(enet, nc=nc)
name = 'enet_unpooling'
# TODO: need to remove data_shape for multi-scale training
enet = Reshape((data_shape, nc))(enet)
enet = Activation('softmax', name='output')(enet)
model = Model(inputs=inp, outputs=enet)
if metrics is None:
metrics = ['accuracy']
model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
return model, name
def test_trainable_argument():
x = np.random.random((5, 3))
y = np.random.random((5, 2))
model = Sequential()
model.add(Dense(2, input_dim=3, trainable=False))
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
assert_allclose(out, out_2)
# test with nesting
input = Input(shape=(3,))
output = model(input)
model = Model(input, output)
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
assert_allclose(out, out_2)
def test_trainable_argument():
x = np.random.random((5, 3))
y = np.random.random((5, 2))
model = Sequential()
model.add(Dense(2, input_dim=3, trainable=False))
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
assert_allclose(out, out_2)
# test with nesting
input = Input(shape=(3,))
output = model(input)
model = Model(input, output)
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
assert_allclose(out, out_2)
def run_parallel_test(data_generator):
a = Input(shape=(3,), name='input_a')
b = Input(shape=(3,), name='input_b')
a_2 = Dense(4, name='dense_1')(a)
dp = Dropout(0.5, name='dropout')
b_2 = dp(b)
optimizer = 'rmsprop'
loss = 'mse'
loss_weights = [1., 0.5]
model = Model([a, b], [a_2, b_2])
model = make_parallel(model, 2)
model.compile(optimizer, loss,
metrics=[],
loss_weights=loss_weights,
sample_weight_mode=None)
trained_epochs = []
tracker_cb = LambdaCallback(on_epoch_begin=lambda epoch, logs: trained_epochs.append(epoch))
model.fit_generator(data_generator(4),
steps_per_epoch=3,
epochs=5,
initial_epoch=2,
callbacks=[tracker_cb])
assert trained_epochs == [2, 3, 4]
def test_trainable_argument():
x = np.random.random((5, 3))
y = np.random.random((5, 2))
model = Sequential()
model.add(Dense(2, input_dim=3, trainable=False))
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
assert_allclose(out, out_2)
# test with nesting
input = Input(shape=(3,))
output = model(input)
model = Model(input, output)
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
assert_allclose(out, out_2)
def build(nc, w, h,
loss='categorical_crossentropy',
optimizer='adadelta',
plot=False,
**kwargs):
# data_shape = input_shape[0] * input_shape[1] if input_shape and None not in input_shape else None
data_shape = w * h if None not in (w, h) else -1 # TODO: -1 or None?
inp = Input(shape=(h, w, 3))
shapes = valid_shapes(inp)
if h < 161 or w < 161:
errmsg = 'Input image tensor must be at least 161pxs in both width and height'
raise ValueError(errmsg)
out = encoder.build(inp, valid_shapes=shapes)
out = decoder.build(inp=inp, encoder=out, nc=nc, valid_shapes=shapes)
out = Reshape((data_shape, nc))(out) # TODO: need to remove data_shape for multi-scale training
out = Activation('softmax')(out)
model = Model(inputs=inp, outputs=out)
model.compile(optimizer=optimizer, loss=loss, metrics=['accuracy', 'mean_squared_error'])
name = 'icnet'
if plot:
plot_model(model, to_file='{}.png'.format(name), show_shapes=True)
return model, name
def _build_single_device_model(blueprint, device):
import tensorflow as tf
with tf.device(get_logical_device(device)):
inputs = Input(shape=(blueprint.layout.input_size,))
row_input = inputs
for row in blueprint.layout.rows:
row_input = _build_row_model(row_input, row)
final_layer_input = _maybe_merge_inputs(row_input)
predictions = Dense(
blueprint.layout.output_size,
activation=blueprint.layout.output_activation)(final_layer_input)
return Model(input=inputs, output=predictions)