我们从Python开源项目中,提取了以下15个代码示例,用于说明如何使用tensorflow.python.ops.array_ops.reverse_sequence()。
def _reverse_seq(input_seq, lengths): """Reverse a list of Tensors up to specified lengths. Args: input_seq: Sequence of seq_len tensors of dimension (batch_size, depth) lengths: A tensor of dimension batch_size, containing lengths for each sequence in the batch. If "None" is specified, simply reverses the list. Returns: time-reversed sequence """ for input_ in input_seq: input_.set_shape(input_.get_shape().with_rank(2)) # Join into (time, batch_size, depth) s_joined = array_ops_.pack(input_seq) # Reverse along dimension 0 s_reversed = array_ops_.reverse_sequence(s_joined, lengths, 0, 1) # Split again into list result = array_ops_.unpack(s_reversed) return result
def _reverse(self, t, lengths): """Time reverse the provided tensor or list of tensors. Assumes the top dimension is the time dimension. Args: t: 3D tensor or list of 2D tensors to be reversed lengths: 1D tensor of lengths, or `None` Returns: A reversed tensor or list of tensors """ if isinstance(t, list): return list(reversed(t)) else: if lengths is None: return array_ops.reverse(t, [True, False, False]) else: return array_ops.reverse_sequence(t, lengths, 0, 1)
def _reverse_seq(input_seq, lengths): """Reverse a list of Tensors up to specified lengths. Args: input_seq: Sequence of seq_len tensors of dimension (batch_size, depth) lengths: A tensor of dimension batch_size, containing lengths for each sequence in the batch. If "None" is specified, simply reverses the list. Returns: time-reversed sequence """ if lengths is None: return list(reversed(input_seq)) input_shape = tensor_shape.matrix(None, None) for input_ in input_seq: input_shape.merge_with(input_.get_shape()) input_.set_shape(input_shape) # Join into (time, batch_size, depth) s_joined = array_ops.pack(input_seq) # TODO(schuster, ebrevdo): Remove cast when reverse_sequence takes int32 if lengths is not None: lengths = math_ops.to_int64(lengths) # Reverse along dimension 0 s_reversed = array_ops.reverse_sequence(s_joined, lengths, 0, 1) # Split again into list result = array_ops.unpack(s_reversed) for r in result: r.set_shape(input_shape) return result
def _reverse(input_, seq_lengths, seq_dim, batch_dim): if seq_lengths is not None: return array_ops.reverse_sequence( input=input_, seq_lengths=seq_lengths, seq_dim=seq_dim, batch_dim=batch_dim) else: return array_ops.reverse(input_, axis=[seq_dim])
def _reverse_seq(input_seq, lengths): """Reverse a list of Tensors up to specified lengths. Args: input_seq: Sequence of seq_len tensors of dimension (batch_size, depth) lengths: A tensor of dimension batch_size, containing lengths for each sequence in the batch. If "None" is specified, simply reverses the list. Returns: time-reversed sequence """ if lengths is None: return list(reversed(input_seq)) for input_ in input_seq: input_.set_shape(input_.get_shape().with_rank(2)) # Join into (time, batch_size, depth) s_joined = array_ops_.pack(input_seq) # Reverse along dimension 0 s_reversed = array_ops_.reverse_sequence(s_joined, lengths, 0, 1) # Split again into list result = array_ops_.unpack(s_reversed) return result
def _reverse_seq(input_seq, lengths): """Reverse a list of Tensors up to specified lengths. Args: input_seq: Sequence of seq_len tensors of dimension (batch_size, n_features) or nested tuples of tensors. lengths: A `Tensor` of dimension batch_size, containing lengths for each sequence in the batch. If "None" is specified, simply reverses the list. Returns: time-reversed sequence """ if lengths is None: return list(reversed(input_seq)) flat_input_seq = tuple(nest.flatten(input_) for input_ in input_seq) flat_results = [[] for _ in range(len(input_seq))] for sequence in zip(*flat_input_seq): input_shape = tensor_shape.unknown_shape( ndims=sequence[0].get_shape().ndims) for input_ in sequence: input_shape.merge_with(input_.get_shape()) input_.set_shape(input_shape) # Join into (time, batch_size, depth) s_joined = array_ops.pack(sequence) # TODO(schuster, ebrevdo): Remove cast when reverse_sequence takes int32 if lengths is not None: lengths = math_ops.to_int64(lengths) # Reverse along dimension 0 s_reversed = array_ops.reverse_sequence(s_joined, lengths, 0, 1) # Split again into list result = array_ops.unpack(s_reversed) for r, flat_result in zip(result, flat_results): r.set_shape(input_shape) flat_result.append(r) results = [nest.pack_sequence_as(structure=input_, flat_sequence=flat_result) for input_, flat_result in zip(input_seq, flat_results)] return results
def dynamic_bidirectional_rnn(cell, pre_inputs, sequence_length=None, initial_state=None, dtype=None, parallel_iterations=None, swap_memory=False, time_major=False, scope=None, feed_prev_out=False, num_layers=1, reuse_layers=True): isinstance(cell, BiRNNCell) with vs.variable_scope(scope or "Bi-RNN") as root_scope: inputs_list = [] outputs_list = [] outputs_fw_list = [] outputs_bw_list = [] state_fw_list = [] state_bw_list = [] for layer_idx in range(num_layers): scope_name = "layer_{}".format(layer_idx) with name_scope(scope_name) if reuse_layers else vs.variable_scope(scope_name): inputs = cell.pre(pre_inputs) outputs_fw, state_fw = dynamic_rnn(cell, inputs, sequence_length=sequence_length, initial_state=initial_state, dtype=dtype, parallel_iterations=parallel_iterations, swap_memory=swap_memory, time_major=time_major, feed_prev_out=feed_prev_out, scope='FW') inputs_rev = reverse_sequence(inputs, sequence_length, 1) outputs_bw_rev, state_bw = dynamic_rnn(cell, inputs_rev, sequence_length=sequence_length, initial_state=initial_state, dtype=dtype, parallel_iterations=parallel_iterations, swap_memory=swap_memory, time_major=time_major, feed_prev_out=feed_prev_out, scope='BW') outputs_bw = reverse_sequence(outputs_bw_rev, sequence_length, 1) outputs = cell.post(outputs_fw, outputs_bw) pre_inputs = outputs inputs_list.append(inputs) outputs_list.append(outputs) outputs_fw_list.append(outputs_fw) outputs_bw_list.append(outputs_bw) state_fw_list.append(state_fw) state_bw_list.append(state_bw) if reuse_layers: root_scope.reuse_variables() tensors = dict() tensors['in'] = transpose(pack(inputs_list), [1, 0, 2, 3]) tensors['out'] = transpose(pack(outputs_list), [1, 0, 2, 3]) tensors['fw_out'] = transpose(pack(outputs_fw_list), [1, 0, 2, 3]) # [N, L, M, d] tensors['bw_out'] = transpose(pack(outputs_bw_list), [1, 0, 2, 3]) # [N, L, M, d] tensors['fw_state'] = transpose(pack(state_fw_list), [1, 0, 2]) # [N, L, d] tensors['bw_state'] = transpose(pack(state_bw_list), [1, 0, 2]) # [N, L, d] return outputs_list[-1], state_fw_list[-1], state_bw_list[-1], tensors