def __init__(
self, sequential=True, use_vocab=True, init_token=None,
eos_token=None, fix_length=None, tensor_type=torch.LongTensor,
preprocessing=None, postprocessing=None, lower=False,
tokenize=(lambda s: s.split()), include_lengths=False,
batch_first=False, pad_token="<pad>", unk_token="<unk>",
pad_first=False):
self.sequential = sequential
self.use_vocab = use_vocab
self.init_token = init_token
self.eos_token = eos_token
self.unk_token = unk_token
self.fix_length = fix_length
self.tensor_type = tensor_type
self.preprocessing = preprocessing
self.postprocessing = postprocessing
self.lower = lower
self.tokenize = get_tokenizer(tokenize)
self.include_lengths = include_lengths
self.batch_first = batch_first
self.pad_token = pad_token if self.sequential else None
self.pad_first = pad_first
评论列表
文章目录