python类default_generator()的实例源码

activation.py 文件源码 项目:pytorch-dist 作者: apaszke 项目源码 文件源码 阅读 44 收藏 0 点赞 0 评论 0
def forward(self, input):
        self._backend = type2backend[type(input)]
        output = input.new()
        self.noise = input.new()
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        self.save_for_backward(input)
        return output
shuffledataset.py 文件源码 项目:tnt 作者: pytorch 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def resample(self, seed=None):
        """Resample the dataset.

        Args:
            seed (int, optional): Seed for resampling. By default no seed is
            used.
        """
        if seed is not None:
            gen = torch.manual_seed(seed)
        else:
            gen = torch.default_generator

        if self.replacement:
            self.perm = torch.LongTensor(len(self)).random_(
                len(self.dataset), generator=gen)
        else:
            self.perm = torch.randperm(
                len(self.dataset), generator=gen).narrow(0, 0, len(self))
activation.py 文件源码 项目:pytorch 作者: tylergenter 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def forward(self, input):
        self._backend = type2backend[type(input)]
        output = input.new()
        self.noise = input.new()
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        self.save_for_backward(input)
        return output
activation.py 文件源码 项目:pytorch-coriander 作者: hughperkins 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def forward(self, input):
        self._backend = type2backend[type(input)]
        output = input.new()
        self.noise = input.new()
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        self.save_for_backward(input)
        return output
activation.py 文件源码 项目:pytorch 作者: ezyang 项目源码 文件源码 阅读 36 收藏 0 点赞 0 评论 0
def forward(ctx, input, lower, upper, train, inplace):
        ctx.lower = lower
        ctx.upper = upper
        ctx.train = train
        ctx.inplace = inplace
        ctx._backend = type2backend[type(input)]
        if ctx.inplace:
            ctx.mark_dirty(input)
            output = input
        else:
            output = input.new(input.size())
        ctx.noise = input.new()
        ctx._backend.RReLU_updateOutput(
            ctx._backend.library_state,
            input,
            output,
            ctx.noise,
            ctx.lower,
            ctx.upper,
            ctx.train,
            ctx.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        ctx.save_for_backward(input)
        return output
RReLU.py 文件源码 项目:pytorch-dist 作者: apaszke 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def updateOutput(self, input):
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            self.output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        return self.output
RReLU.py 文件源码 项目:pytorch 作者: tylergenter 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def updateOutput(self, input):
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            self.output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        return self.output
RReLU.py 文件源码 项目:pytorch-coriander 作者: hughperkins 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def updateOutput(self, input):
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            self.output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        return self.output
RReLU.py 文件源码 项目:pytorch 作者: ezyang 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def updateOutput(self, input):
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            self.output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        return self.output
RReLU.py 文件源码 项目:pytorch 作者: pytorch 项目源码 文件源码 阅读 38 收藏 0 点赞 0 评论 0
def updateOutput(self, input):
        self._backend.RReLU_updateOutput(
            self._backend.library_state,
            input,
            self.output,
            self.noise,
            self.lower,
            self.upper,
            self.train,
            self.inplace,
            torch.default_generator if not input.is_cuda else 0
        )
        return self.output


问题


面经


文章

微信
公众号

扫码关注公众号