def forward(self, input):
self._backend = type2backend[type(input)]
output = input.new()
self.noise = input.new()
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
self.save_for_backward(input)
return output
python类default_generator()的实例源码
def resample(self, seed=None):
"""Resample the dataset.
Args:
seed (int, optional): Seed for resampling. By default no seed is
used.
"""
if seed is not None:
gen = torch.manual_seed(seed)
else:
gen = torch.default_generator
if self.replacement:
self.perm = torch.LongTensor(len(self)).random_(
len(self.dataset), generator=gen)
else:
self.perm = torch.randperm(
len(self.dataset), generator=gen).narrow(0, 0, len(self))
def forward(self, input):
self._backend = type2backend[type(input)]
output = input.new()
self.noise = input.new()
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
self.save_for_backward(input)
return output
def forward(self, input):
self._backend = type2backend[type(input)]
output = input.new()
self.noise = input.new()
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
self.save_for_backward(input)
return output
def forward(ctx, input, lower, upper, train, inplace):
ctx.lower = lower
ctx.upper = upper
ctx.train = train
ctx.inplace = inplace
ctx._backend = type2backend[type(input)]
if ctx.inplace:
ctx.mark_dirty(input)
output = input
else:
output = input.new(input.size())
ctx.noise = input.new()
ctx._backend.RReLU_updateOutput(
ctx._backend.library_state,
input,
output,
ctx.noise,
ctx.lower,
ctx.upper,
ctx.train,
ctx.inplace,
torch.default_generator if not input.is_cuda else 0
)
ctx.save_for_backward(input)
return output
def updateOutput(self, input):
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
self.output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
return self.output
def updateOutput(self, input):
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
self.output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
return self.output
def updateOutput(self, input):
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
self.output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
return self.output
def updateOutput(self, input):
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
self.output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
return self.output
def updateOutput(self, input):
self._backend.RReLU_updateOutput(
self._backend.library_state,
input,
self.output,
self.noise,
self.lower,
self.upper,
self.train,
self.inplace,
torch.default_generator if not input.is_cuda else 0
)
return self.output