def empty_attention_loop_state() -> AttentionLoopStateTA:
"""Create an empty attention loop state.
The attention loop state is a technical object for storing the attention
distributions and the context vectors in time. It is used with the
``tf.while_loop`` dynamic implementation of the decoder.
This function returns an empty attention loop state which means there are
two empty arrays, one for attention distributions in time, and one for
the attention context vectors in time.
"""
return AttentionLoopStateTA(
contexts=tf.TensorArray(
dtype=tf.float32, size=0, dynamic_size=True,
name="contexts"),
weights=tf.TensorArray(
dtype=tf.float32, size=0, dynamic_size=True,
name="distributions", clear_after_read=False))
评论列表
文章目录