我们从Python开源项目中,提取了以下16个代码示例,用于说明如何使用chainer.Function()。
def forward(self, inputs): """Applies forward propagation to input arrays. It delegates the procedure to :meth:`forward_cpu` or :meth:`forward_gpu` by default. Which it selects is determined by the type of input arrays. Implementations of :class:`Function` must implement either CPU/GPU methods or this method. Args: inputs: Tuple of input array(s). Returns: Tuple of output array(s). .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ if any(isinstance(x, cuda.ndarray) for x in inputs): return self.forward_gpu(inputs) else: return self.forward_cpu(inputs)
def forward_cpu(self, inputs): """Applies forward propagation to input arrays on CPU. Args: inputs: Tuple of :class:`numpy.ndarray` object(s). Returns: tuple: Tuple of :class:`numpy.ndarray` object(s). .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ raise NotImplementedError()
def forward_gpu(self, inputs): """Applies forward propagation to input arrays on GPU. Args: inputs: Tuple of :class:`cupy.ndarray` object(s). Returns: tuple: Tuple of :class:`cupy.ndarray` object(s). .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ raise NotImplementedError()
def backward_gpu(self, inputs, grad_outputs): """Applies backprop to output gradient arrays on GPU. Args: inputs: Tuple of input :class:`cupy.ndarray` object(s). grad_outputs: Tuple of output gradient :class:`cupy.ndarray` object(s). Returns: tuple: Tuple of input gradient :class:`cupy.ndarray` object(s). Some or all of them can be ``None``, if the function is not differentiable on corresponding inputs. .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ return tuple(None for _ in inputs)
def check_type_mismatch(self, x_data): xp = cuda.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): return xp.array(1, np.float32), def backward(self, inputs, grads): return [1] x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, TypeError, 'dummy_function'): y.backward()
def check_dtype_mismatch(self, x_data): xp = cuda.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1], np.int32), x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, TypeError, 'dummy_function'): y.backward()
def check_traceback(self, x_data): xp = cuda.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1, 2], np.float32), x = chainer.Variable(x_data) line = inspect.currentframe().f_lineno + 1 y = DummyFunction()(x) # `line` is THIS line try: y.backward() self.fail() except ValueError as e: self.assertIn('Stacktrace', str(e)) self.assertIn('line %d' % line, str(e))
def local_function_hooks(self): """Ordered Dictionary of registered function hooks. Contrary to ``chainer.thread_local.function_hooks``, which registers its elements to all functions, Function hooks in this property is specific to this function. """ if not hasattr(self, '_local_function_hooks'): self._local_function_hooks = collections.OrderedDict() return self._local_function_hooks
def backward(self, inputs, grad_outputs): """Applies backprop to output gradient arrays. It delegates the procedure to :meth:`backward_cpu` or :meth:`backward_gpu` by default. Which it selects is determined by the type of input arrays and output gradient arrays. Implementations of :class:`Function` must implement either CPU/GPU methods or this method, if the function is intended to be backprop-ed. Args: inputs: Tuple of input arrays. grad_outputs: Tuple of output gradient arrays. Returns: tuple: Tuple of input gradient arrays. Some or all of them can be ``None``, if the function is not differentiable on inputs. .. warning:: Implementations of :class:`Function` must take care that the return value must be a tuple even if it returns only one array. """ if any(isinstance(x, cuda.ndarray) for x in inputs + grad_outputs): return self.backward_gpu(inputs, grad_outputs) else: return self.backward_cpu(inputs, grad_outputs)
def forward_preprocess(self, function, in_data): """Callback function invoked before forward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input data of forward propagation. """ pass
def forward_postprocess(self, function, in_data): """Callback function invoked after forward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input data of forward propagation. """ pass # backward
def backward_preprocess(self, function, in_data, out_grad): """Callback function invoked before backward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input data of forward propagation. out_grad(tuple of numpy.ndarray or tuple of cupy.ndarray): Gradient data of backward propagation. """ pass
def backward_postprocess(self, function, in_data, out_grad): """Callback function invoked after backward propagation. Args: function(~chainer.Function): Function object to which the function hook is registered. in_data(tuple of numpy.ndarray or tuple of cupy.ndarray): Input of forward propagation. out_grad(tuple of numpy.ndarray or tuple of cupy.ndarray): Gradient data of backward propagation. """ pass
def test_label(self): self.assertEqual(self.f.label, 'Function')
def setUp(self): self.original_debug = chainer.is_debug() chainer.set_debug(True) self.one = numpy.array([1], numpy.float32) self.f = chainer.Function()