def lstm_model(time_steps, rnn_layers, dense_layers=None, learning_rate=0.01, optimizer='Adagrad',learning_rate_decay_fn = None): # [Ftrl, Adam, Adagrad, Momentum, SGD, RMSProp]
print(time_steps)
#exit(0)
"""
Creates a deep model based on:
* stacked lstm cells
* an optional dense layers
:param num_units: the size of the cells.
:param rnn_layers: list of int or dict
* list of int: the steps used to instantiate the `BasicLSTMCell` cell
* list of dict: [{steps: int, keep_prob: int}, ...]
:param dense_layers: list of nodes for each layer
:return: the model definition
"""
def lstm_cells(layers):
print('-------------------------sdsdsdsdssd---------------------------------------------',layers)
if isinstance(layers[0], dict):
return [rnn.DropoutWrapper(rnn.BasicLSTMCell(layer['num_units'],state_is_tuple=True),layer['keep_prob'])
if layer.get('keep_prob')
else rnn.BasicLSTMCell(layer['num_units'], state_is_tuple=True)
for layer in layers]
return [rnn.BasicLSTMCell(steps, state_is_tuple=True) for steps in layers]
def dnn_layers(input_layers, layers):
if layers and isinstance(layers, dict):
return tflayers.stack(input_layers, tflayers.fully_connected,
layers['layers'],
activation=layers.get('activation'),
dropout=layers.get('dropout'))
elif layers:
return tflayers.stack(input_layers, tflayers.fully_connected, layers)
else:
return input_layers
def _lstm_model(X, y):
stacked_lstm = rnn.MultiRNNCell(lstm_cells(rnn_layers), state_is_tuple=True)
x_ = tf.unstack(X, num=time_steps, axis=1)
output, layers = rnn.static_rnn(stacked_lstm, x_, dtype=dtypes.float32)
output = dnn_layers(output[-1], dense_layers)
prediction, loss = tflearn.models.linear_regression(output, y)
train_op = tf.contrib.layers.optimize_loss(
loss, tf.contrib.framework.get_global_step(), optimizer=optimizer,
learning_rate = tf.train.exponential_decay(learning_rate, tf.contrib.framework.get_global_step(), decay_steps = 1000, decay_rate = 0.9, staircase=False, name=None))
print('learning_rate',learning_rate)
return prediction, loss, train_op
# https://www.tensorflow.org/versions/r0.10/api_docs/python/train/decaying_the_learning_rate
return _lstm_model
lstm_predictior.py 文件源码
python
阅读 19
收藏 0
点赞 0
评论 0
评论列表
文章目录