我们从Python开源项目中,提取了以下17个代码示例,用于说明如何使用tensorflow.python.util.nest.assert_same_structure()。
def __call__(self, inputs, state, scope=None): """Run the cell and add its inputs to its outputs. Args: inputs: cell inputs. state: cell state. scope: optional cell scope. Returns: Tuple of cell outputs and new state. Raises: TypeError: If cell inputs and outputs have different structure (type). ValueError: If cell inputs and outputs have different structure (value). """ outputs, new_state = self._cell(inputs, state, scope=scope) nest.assert_same_structure(inputs, outputs) # Ensure shapes match def assert_shape_match(inp, out): inp.get_shape().assert_is_compatible_with(out.get_shape()) nest.map_structure(assert_shape_match, inputs, outputs) res_outputs = nest.map_structure( lambda inp, out: math_ops.scalar_mul(0.5, inp + out), inputs, outputs) return res_outputs, new_state
def gnmt_residual_fn(inputs, outputs): """Residual function that handles different inputs and outputs inner dims. Args: inputs: cell inputs, this is actual inputs concatenated with the attention vector. outputs: cell outputs Returns: outputs + actual inputs """ def split_input(inp, out): out_dim = out.get_shape().as_list()[-1] inp_dim = inp.get_shape().as_list()[-1] return tf.split(inp, [out_dim, inp_dim - out_dim], axis=-1) actual_inputs, _ = nest.map_structure(split_input, inputs, outputs) def assert_shape_match(inp, out): inp.get_shape().assert_is_compatible_with(out.get_shape()) nest.assert_same_structure(actual_inputs, outputs) nest.map_structure(assert_shape_match, actual_inputs, outputs) return nest.map_structure(lambda inp, out: inp + out, actual_inputs, outputs)
def __call__(self, inputs, state, scope='higway_cell'): """Run the cell and add its inputs to its outputs. Args: inputs: cell inputs. state: cell state. scope: optional cell scope. Returns: Tuple of cell outputs and new state. Raises: TypeError: If cell inputs and outputs have different structure (type). ValueError: If cell inputs and outputs have different structure (value). """ outputs, new_state = self._cell(inputs, state, scope=scope) nest.assert_same_structure(inputs, outputs) # Ensure shapes match def assert_shape_match(inp, out): inp.get_shape().assert_is_compatible_with(out.get_shape()) nest.map_structure(assert_shape_match, inputs, outputs) res_outputs = nest.map_structure(self._highway, inputs, outputs) return (res_outputs, new_state)
def __call__(self, inputs, state, scope=None): """Run the cell and then apply the residual_fn on its inputs to its outputs. Args: inputs: cell inputs. state: cell state. scope: optional cell scope. Returns: Tuple of cell outputs and new state. Raises: TypeError: If cell inputs and outputs have different structure (type). ValueError: If cell inputs and outputs have different structure (value). """ outputs, new_state = self._cell(inputs, state, scope=scope) # Ensure shapes match def assert_shape_match(inp, out): inp.get_shape().assert_is_compatible_with(out.get_shape()) def default_residual_fn(inputs, outputs): nest.assert_same_structure(inputs, outputs) nest.map_structure(assert_shape_match, inputs, outputs) return nest.map_structure(lambda inp, out: inp + out, inputs, outputs) res_outputs = (self._residual_fn or default_residual_fn)(inputs, outputs) return (res_outputs, new_state)
def __call__(self, inputs, state, scope=None): """Run the cell and add its inputs to its outputs. Args: inputs: cell inputs. state: cell state. scope: optional cell scope. Returns: Tuple of cell outputs and new state. Raises: TypeError: If cell inputs and outputs have different structure (type). ValueError: If cell inputs and outputs have different structure (value). """ outputs, new_state = self._cell(inputs, state, scope=scope) nest.assert_same_structure(inputs, outputs) # Ensure shapes match def assert_shape_match(inp, out): inp.get_shape().assert_is_compatible_with(out.get_shape()) nest.map_structure(assert_shape_match, inputs, outputs) res_outputs = nest.map_structure( lambda inp, out: inp + out, inputs, outputs) return (res_outputs, new_state)
def _assert_sructures_equal(self, struct1, struct2): tf_nest.assert_same_structure(struct1, struct2) for a, b in zip(tf_nest.flatten(struct1), tf_nest.flatten(struct2)): np.testing.assert_array_equal(a, b)
def __init__(self, initial_state, mask=None, name="trainable_initial_state"): """Constructs the Module that introduces a trainable state in the graph. It receives an initial state that will be used as the initial values for the trainable variables that the module contains, and optionally a mask that indicates the parts of the initial state that should be learnable. Args: initial_state: tensor or arbitrarily nested iterables of tensors. mask: optional boolean mask. It should have the same nested structure as the given initial_state. name: module name. Raises: TypeError: if mask is not a list of booleans or None. """ super(TrainableInitialState, self).__init__(name=name) # Since python 2.7, DeprecationWarning is ignored by default. # Turn on the warning: warnings.simplefilter("always", DeprecationWarning) warnings.warn("Use the trainable flag in initial_state instead.", DeprecationWarning, stacklevel=2) if mask is not None: flat_mask = nest.flatten(mask) if not all([isinstance(m, bool) for m in flat_mask]): raise TypeError("Mask should be None or a list of boolean values.") nest.assert_same_structure(initial_state, mask) self._mask = mask self._initial_state = initial_state
def testInitialStateTuple(self, trainable, use_custom_initial_value, state_size): batch_size = 6 # Set the attribute to the class since it we can't set properties of # abstract classes snt.RNNCore.state_size = state_size flat_state_size = nest.flatten(state_size) core = snt.RNNCore(name="dummy_core") if use_custom_initial_value: flat_initializer = [tf.constant_initializer(2)] * len(flat_state_size) trainable_initializers = nest.pack_sequence_as( structure=state_size, flat_sequence=flat_initializer) else: trainable_initializers = None initial_state = core.initial_state( batch_size, dtype=tf.float32, trainable=trainable, trainable_initializers=trainable_initializers) nest.assert_same_structure(initial_state, state_size) flat_initial_state = nest.flatten(initial_state) for state, size in zip(flat_initial_state, flat_state_size): self.assertEqual(state.get_shape(), [batch_size, size]) with self.test_session() as sess: tf.global_variables_initializer().run() flat_initial_state_value = sess.run(flat_initial_state) for value, size in zip(flat_initial_state_value, flat_state_size): expected_initial_state = np.empty([batch_size, size]) if not trainable: expected_initial_state.fill(0) elif use_custom_initial_value: expected_initial_state.fill(2) else: value_row = value[0] expected_initial_state = np.tile(value_row, (batch_size, 1)) self.assertAllClose(value, expected_initial_state)
def _create(self): nest.assert_same_structure(self.encoder_outputs.final_state, self.decoder_state_size) return self.encoder_outputs.final_state
def _assert_correct_outputs(self, initial_state_): nest.assert_same_structure(initial_state_, self.decoder_cell.state_size) nest.assert_same_structure( initial_state_, self.encoder_outputs.final_state) encoder_state_flat = nest.flatten(self.encoder_outputs.final_state) with self.test_session() as sess: encoder_state_flat_ = sess.run(encoder_state_flat) initial_state_flat_ = nest.flatten(initial_state_) for e_dec, e_enc in zip(initial_state_flat_, encoder_state_flat_): self.assertAllEqual(e_dec, e_enc)
def _assert_correct_outputs(self, initial_state_): nest.assert_same_structure(initial_state_, self.decoder_cell.state_size)
def __init__(self, initial_state, mask=None, name="trainable_initial_state"): """Constructs the Module that introduces a trainable state in the graph. It receives an initial state that will be used as the intial values for the trainable variables that the module contains, and optionally a mask that indicates the parts of the initial state that should be learnable. Args: initial_state: tensor or arbitrarily nested iterables of tensors. mask: optional boolean mask. It should have the same nested structure as the given initial_state. name: module name. Raises: TypeError: if mask is not a list of booleans or None. """ super(TrainableInitialState, self).__init__(name=name) # Since python 2.7, DeprecationWarning is ignored by default. # Turn on the warning: warnings.simplefilter("always", DeprecationWarning) warnings.warn("Use the trainable flag in initial_state instead.", DeprecationWarning, stacklevel=2) if mask is not None: flat_mask = nest.flatten(mask) if not all([isinstance(m, bool) for m in flat_mask]): raise TypeError("Mask should be None or a list of boolean values.") nest.assert_same_structure(initial_state, mask) self._mask = mask self._initial_state = initial_state
def testInitialStateComputation(self, tuple_state, mask): if tuple_state: initial_state = (tf.fill([BATCH_SIZE, 6], 2), (tf.fill([BATCH_SIZE, 7], 3), tf.fill([BATCH_SIZE, 8], 4))) else: initial_state = tf.fill([BATCH_SIZE, 9], 10) trainable_state_module = snt.TrainableInitialState(initial_state, mask=mask) trainable_state = trainable_state_module() nest.assert_same_structure(initial_state, trainable_state) flat_initial_state = nest.flatten(initial_state) flat_trainable_state = nest.flatten(trainable_state) if mask is not None: flat_mask = nest.flatten(mask) else: flat_mask = (True,) * len(flat_initial_state) with self.test_session() as sess: sess.run(tf.global_variables_initializer()) # Check all variables are initialized correctly and return a state that # has the same as it is provided. for trainable_state, initial_state in zip(flat_trainable_state, flat_initial_state): self.assertAllEqual(sess.run(trainable_state), sess.run(initial_state)) # Change the value of all the trainable variables to ones. for variable in tf.trainable_variables(): sess.run(tf.assign(variable, tf.ones_like(variable))) # Check that the values of the initial_states have changed if and only if # they are trainable. for trainable_state, initial_state, mask in zip(flat_trainable_state, flat_initial_state, flat_mask): trainable_state_value = sess.run(trainable_state) initial_state_value = sess.run(initial_state) if mask: expected_value = np.ones_like(initial_state_value) else: expected_value = initial_state_value self.assertAllEqual(trainable_state_value, expected_value)
def trainable_initial_state(batch_size, state_size, dtype, initializers=None): """Creates an initial state consisting of trainable variables. The trainable variables are created with the same shapes as the elements of `state_size` and are tiled to produce an initial state. Args: batch_size: An int, or scalar int32 Tensor representing the batch size. state_size: A `TensorShape` or nested tuple of `TensorShape`s to use for the shape of the trainable variables. dtype: The data type used to create the variables and thus initial state. initializers: An optional container of the same structure as `state_size` containing initializers for the variables. Returns: A `Tensor` or nested tuple of `Tensor`s with the same size and structure as `state_size`, where each `Tensor` is a tiled trainable `Variable`. Raises: ValueError: if the user passes initializers that are not functions. """ flat_state_size = nest.flatten(state_size) if not initializers: flat_initializer = tuple(tf.zeros_initializer for _ in flat_state_size) else: nest.assert_same_structure(initializers, state_size) flat_initializer = nest.flatten(initializers) if not all([callable(init) for init in flat_initializer]): raise ValueError("Not all the passed initializers are callable objects.") # Produce names for the variables. In the case of a tuple or nested tuple, # this is just a sequence of numbers, but for a flat `namedtuple`, we use # the field names. NOTE: this could be extended to nested `namedtuple`s, # but for now that's extra complexity that's not used anywhere. try: names = ["init_{}".format(state_size._fields[i]) for i in xrange(len(flat_state_size))] except (AttributeError, IndexError): names = ["init_state_{}".format(i) for i in xrange(len(flat_state_size))] flat_initial_state = [] for name, size, init in zip(names, flat_state_size, flat_initializer): shape_with_batch_dim = [1] + tensor_shape.as_shape(size).as_list() initial_state_variable = tf.get_variable( name, shape=shape_with_batch_dim, dtype=dtype, initializer=init) initial_state_variable_dims = initial_state_variable.get_shape().ndims tile_dims = [batch_size] + [1] * (initial_state_variable_dims - 1) flat_initial_state.append( tf.tile(initial_state_variable, tile_dims, name=(name + "_tiled"))) return nest.pack_sequence_as(structure=state_size, flat_sequence=flat_initial_state)