我们从Python开源项目中,提取了以下10个代码示例,用于说明如何使用torch.default_generator()。
def forward(self, input): self._backend = type2backend[type(input)] output = input.new() self.noise = input.new() self._backend.RReLU_updateOutput( self._backend.library_state, input, output, self.noise, self.lower, self.upper, self.train, self.inplace, torch.default_generator if not input.is_cuda else 0 ) self.save_for_backward(input) return output
def resample(self, seed=None): """Resample the dataset. Args: seed (int, optional): Seed for resampling. By default no seed is used. """ if seed is not None: gen = torch.manual_seed(seed) else: gen = torch.default_generator if self.replacement: self.perm = torch.LongTensor(len(self)).random_( len(self.dataset), generator=gen) else: self.perm = torch.randperm( len(self.dataset), generator=gen).narrow(0, 0, len(self))
def forward(ctx, input, lower, upper, train, inplace): ctx.lower = lower ctx.upper = upper ctx.train = train ctx.inplace = inplace ctx._backend = type2backend[type(input)] if ctx.inplace: ctx.mark_dirty(input) output = input else: output = input.new(input.size()) ctx.noise = input.new() ctx._backend.RReLU_updateOutput( ctx._backend.library_state, input, output, ctx.noise, ctx.lower, ctx.upper, ctx.train, ctx.inplace, torch.default_generator if not input.is_cuda else 0 ) ctx.save_for_backward(input) return output
def updateOutput(self, input): self._backend.RReLU_updateOutput( self._backend.library_state, input, self.output, self.noise, self.lower, self.upper, self.train, self.inplace, torch.default_generator if not input.is_cuda else 0 ) return self.output