def build_critic(self):
"""Build critic network
recieve convereted tensor: raw_data, smooted_data, and downsampled_data
"""
# lower layer
lower_model = [self.build_network(self.model_config['critic_lower'], input_shape=(self.history_length, self.n_stock, 1))
for _ in range(1 + self.n_smooth + self.n_down)]
merged = Merge(lower_model, mode='concat')
# upper layer
upper_model = self.build_network(self.model_config['critic_upper'], model=merged)
# action layer
action = self.build_network(self.model_config['critic_action'], input_shape=(self.n_stock,), is_conv=False)
# output layer
merged = Merge([upper_model, action], mode='mul')
model = Sequential()
model.add(merged)
model.add(Dense(1))
return model
python类Merge()的实例源码
def _maybe_merge_inputs(inputs):
if isinstance(inputs, list) and len(inputs) > 1:
return Merge(mode='concat')(inputs)
elif isinstance(inputs, list) and len(inputs) == 1:
return inputs[0]
else:
return inputs
def __build_keras_model(self):
models = []
model_artist_id = Sequential()
model_artist_id.add(Embedding(100, 10, input_length=1))
model_artist_id.add(Reshape(target_shape=(10,)))
models.append(model_artist_id)
model_week = Sequential()
model_week.add(Embedding(7, 2, input_length=1))
model_week.add(Reshape(target_shape=(6,)))
models.append(model_week)
# model_gender = Sequential()
# model_gender.add(Embedding(1, 3, input_length=1))
# model_gender.add(Reshape(target_shape=(3,)))
# models.append(model_gender)
model_day = Sequential()
model_day.add(Embedding(1, 10, input_length=1))
model_day.add(Reshape(target_shape=(10,)))
models.append(model_day)
# model_language = Sequential()
# model_language.add(Embedding(1, 3, input_length=1))
# model_language.add(Reshape(target_shape=(3,)))
# models.append(model_language)
model_others = Sequential()
model_others.add(Reshape((self.others_dim,), input_shape=(self.others_dim,)))
models.append(model_others)
self.model = Sequential()
self.model.add(Merge(models, mode='concat'))
self.model.add(Dense(100, init='uniform'))
self.model.add(Activation('relu'))
self.model.add(Dense(200, init='uniform'))
self.model.add(Activation('relu'))
self.model.add(Dense(1))
self.model.compile(loss='mean_absolute_error', optimizer='adam')
def build_actor(self):
"""Build actor network
recieve convereted tensor: raw_data, smooted_data, and downsampled_data
"""
# lower layer
lower_model = [self.build_network(self.model_config['actor_lower'], input_shape=(self.history_length, self.n_stock, 1))
for _ in range(1 + self.n_smooth + self.n_down)]
merged = Merge(lower_model, mode='concat')
# upper layer
model = self.build_network(self.model_config['actor_upper'], model=merged)
return model
def build(self):
enc_size = self.size_of_env_observation()
argument_size = IntegerArguments.size_of_arguments
input_enc = InputLayer(batch_input_shape=(self.batch_size, enc_size), name='input_enc')
input_arg = InputLayer(batch_input_shape=(self.batch_size, argument_size), name='input_arg')
input_prg = Embedding(input_dim=PROGRAM_VEC_SIZE, output_dim=PROGRAM_KEY_VEC_SIZE, input_length=1,
batch_input_shape=(self.batch_size, 1))
f_enc = Sequential(name='f_enc')
f_enc.add(Merge([input_enc, input_arg], mode='concat'))
f_enc.add(MaxoutDense(128, nb_feature=4))
self.f_enc = f_enc
program_embedding = Sequential(name='program_embedding')
program_embedding.add(input_prg)
f_enc_convert = Sequential(name='f_enc_convert')
f_enc_convert.add(f_enc)
f_enc_convert.add(RepeatVector(1))
f_lstm = Sequential(name='f_lstm')
f_lstm.add(Merge([f_enc_convert, program_embedding], mode='concat'))
f_lstm.add(LSTM(256, return_sequences=False, stateful=True, W_regularizer=l2(0.0000001)))
f_lstm.add(Activation('relu', name='relu_lstm_1'))
f_lstm.add(RepeatVector(1))
f_lstm.add(LSTM(256, return_sequences=False, stateful=True, W_regularizer=l2(0.0000001)))
f_lstm.add(Activation('relu', name='relu_lstm_2'))
# plot(f_lstm, to_file='f_lstm.png', show_shapes=True)
f_end = Sequential(name='f_end')
f_end.add(f_lstm)
f_end.add(Dense(1, W_regularizer=l2(0.001)))
f_end.add(Activation('sigmoid', name='sigmoid_end'))
f_prog = Sequential(name='f_prog')
f_prog.add(f_lstm)
f_prog.add(Dense(PROGRAM_KEY_VEC_SIZE, activation="relu"))
f_prog.add(Dense(PROGRAM_VEC_SIZE, W_regularizer=l2(0.0001)))
f_prog.add(Activation('softmax', name='softmax_prog'))
# plot(f_prog, to_file='f_prog.png', show_shapes=True)
f_args = []
for ai in range(1, IntegerArguments.max_arg_num+1):
f_arg = Sequential(name='f_arg%s' % ai)
f_arg.add(f_lstm)
f_arg.add(Dense(IntegerArguments.depth, W_regularizer=l2(0.0001)))
f_arg.add(Activation('softmax', name='softmax_arg%s' % ai))
f_args.append(f_arg)
# plot(f_arg, to_file='f_arg.png', show_shapes=True)
self.model = Model([input_enc.input, input_arg.input, input_prg.input],
[f_end.output, f_prog.output] + [fa.output for fa in f_args],
name="npi")
self.compile_model()
plot(self.model, to_file='model.png', show_shapes=True)
def build_critic(self):
"""Build critic network
recieve transformed tensor: raw_data, smooted_data, and downsampled_data
"""
nf = self.n_feature
# layer1
# smoothed input
sm_model = [Sequential() for _ in range(self.n_smooth)]
for m in sm_model:
m.add(Lambda(lambda x: x, input_shape=(self.history_length, self.n_stock, 1)))
m.add(Convolution2D(nb_filter=nf, nb_row=self.k_w, nb_col=1, border_mode='same'))
m.add(BatchNormalization(mode=2, axis=-1))
m.add(PReLU())
# down sampled input
dw_model = [Sequential() for _ in range(self.n_down)]
for m in dw_model:
m.add(Lambda(lambda x: x, input_shape=(self.history_length, self.n_stock, 1)))
m.add(Convolution2D(nb_filter=nf, nb_row=self.k_w, nb_col=1, border_mode='same'))
m.add(BatchNormalization(mode=2, axis=-1))
m.add(PReLU())
# raw input
state = Sequential()
nf = self.n_feature
state.add(Lambda(lambda x: x, input_shape=(self.history_length, self.n_stock, 1)))
state.add(Convolution2D(nb_filter=nf, nb_row=self.k_w, nb_col=1, border_mode='same'))
state.add(BatchNormalization(mode=2, axis=-1))
state.add(PReLU())
merged = Merge([state,] + sm_model + dw_model, mode='concat', concat_axis=-1)
# layer2
nf = nf * 2
model = Sequential()
model.add(merged)
model.add(Convolution2D(nb_filter=nf, nb_row=self.k_w, nb_col=1, border_mode='same'))
model.add(BatchNormalization(mode=2, axis=-1))
model.add(PReLU())
model.add(Flatten())
# layer3
model.add(Dense(self.n_hidden))
model.add(BatchNormalization(mode=1, axis=-1))
model.add(PReLU())
# layer4
model.add(Dense(int(np.sqrt(self.n_hidden))))
model.add(PReLU())
# output
model.add(Dense(2 * self.n_stock))
model.add(Reshape((self.n_stock, 2)))
return model