def __init__(self, opt, shared=None):
opt['batch_sort'] = False
super().__init__(opt, shared)
self.use_batch_act = self.bsz > 1
self.num_workers = opt['numworkers']
# One can specify a collate function to use for preparing a batch
collate_fn = opt.get('collate_fn', default_collate)
if not shared:
self.dataset = StreamDataset(opt)
self.pytorch_dataloader = DataLoader(
self.dataset,
batch_size=self.bsz,
shuffle=False,
sampler=sampler.SequentialSampler(self.dataset),
num_workers=self.num_workers,
collate_fn=collate_fn,
pin_memory=False,
drop_last=False,
)
self.lastYs = [None] * self.bsz
else:
self.dataset = shared['dataset']
self.pytorch_dataloader = shared['pytorch_dataloader']
self.lastYs = shared['lastYs']
self.num_batches = math.ceil(self.dataset.num_examples()/self.bsz)
self.reset()
评论列表
文章目录