conv_seq2seq.py 文件源码

python
阅读 29 收藏 0 点赞 0 评论 0

项目:conv_seq2seq 作者: tobyyouup 项目源码 文件源码
def default_params():
    params = Seq2SeqModel.default_params().copy()
    params.update({
        "encoder.class": "seq2seq.encoders.ConvEncoderFairseq",
        "encoder.params": {},  # Arbitrary parameters for the encoder
        "decoder.class": "seq2seq.decoders.ConvDecoder",
        "decoder.params": {},  # Arbitrary parameters for the decoder
        "source.max_seq_len": 50,
        "source.reverse": False,
        "target.max_seq_len": 50,
        "embedding.dim": 256,
        "embedding.init_scale": 0.04,
        "embedding.share": False,
        "position_embeddings.num_positions": 100,
        "inference.beam_search.beam_width": 0,
        "inference.beam_search.length_penalty_weight": 1.0,
        "inference.beam_search.choose_successors_fn": "choose_top_k",
        "vocab_source": "",
        "vocab_target": "", 
        "optimizer.name": "Momentum",
        "optimizer.learning_rate": 0.25,
        "optimizer.params": {"momentum": 0.99, "use_nesterov": True}, # Arbitrary parameters for the optimizer
        #"optimizer.params": { "epsilon": 0.0000008}, # Arbitrary parameters for the optimizer
        "optimizer.lr_decay_type": "exponential_decay",
        "optimizer.lr_decay_steps": 5000,  # one epoch steps
        "optimizer.lr_decay_rate": 0.9,  
        "optimizer.lr_start_decay_at": 0,  # start annealing epoch 0
        "optimizer.lr_stop_decay_at": tf.int32.max,
        "optimizer.lr_min_learning_rate": 1e-5,
        "optimizer.lr_staircase": True,
        "optimizer.clip_gradients": 0.1,
        "optimizer.clip_embed_gradients": 5,
        "optimizer.sync_replicas": 0,
        "optimizer.sync_replicas_to_aggregate": 0,

})
    return params
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号