我们从Python开源项目中,提取了以下6个代码示例,用于说明如何使用tensorflow.complex_abs()。
def stft(wav, n_fft=1024, overlap=4, dt=tf.int32, absp=False): assert (wav.shape[0] > n_fft) X = tf.placeholder(dtype=dt,shape=wav.shape) X = tf.cast(X,tf.float32) hop = n_fft / overlap ## prepare constant variable Pi = tf.constant(np.pi, dtype=tf.float32) W = tf.constant(scipy.hanning(n_fft), dtype=tf.float32) S = tf.pack([tf.fft(tf.cast(tf.multiply(W,X[i:i+n_fft]),\ tf.complex64)) for i in range(1, wav.shape[0] - n_fft, hop)]) abs_S = tf.complex_abs(S) sess = tf.Session() if absp: return sess.run(abs_S, feed_dict={X:wav}) else: return sess.run(S, feed_dict={X:wav})
def complex_mod_of_real(x): xshp = x.get_shape().as_list() assert xshp[1] % 2 == 0 xcplx = tf.complex(x[:, 0:xshp[1]/2], x[:, xshp[1]/2:]) return tf.complex_abs(xcplx)
def unsplit_from_complex_ir(x): #return tf.concat(1, [tf.imag(x), tf.abs(tf.real(x))]) return tf.abs(tf.concat(1, [tf.imag(x), tf.real(x)])) #mag = tf.maximum(1.0, tf.complex_abs(x)) #x = tf.complex(tf.real(x) / (mag + 1e-10), tf.imag(x) / (mag + 1e-10)) # real = tf.concat(1, [tf.imag(x), tf.real(x)]) # return tf.abs(HolographicMemory.normalize_real_by_complex_abs([real])[0])
def modrelu_c(in_c, bias): if not in_c.dtype.is_complex: raise(ValueError('modrelu_c: Argument in_c must be complex type')) if bias.dtype.is_complex: raise(ValueError('modrelu_c: Argument bias must be real type')) n = tf.complex_abs(in_c) scale = 1./(n+1e-5) return complex_mul_real(in_c, ( tf.nn.relu(n+bias)*scale ))
def __call__(self, inputs, state, scope=None ): with tf.variable_scope(scope or type(self).__name__): unitary_hidden_state, secondary_cell_hidden_state = tf.split(1,2,state) mat_in = tf.get_variable('mat_in', [self.input_size, self.state_size*2]) mat_out = tf.get_variable('mat_out', [self.state_size*2, self.output_size]) in_proj = tf.matmul(inputs, mat_in) in_proj_c = tf.complex(tf.split(1,2,in_proj)) out_state = modReLU( in_proj_c + ulinear(unitary_hidden_state, self.state_size), tf.get_variable(name='bias', dtype=tf.float32, shape=tf.shape(unitary_hidden_state), initializer = tf.constant_initalizer(0.)), scope=scope) with tf.variable_scope('unitary_output'): '''computes data linear, unitary linear and summation -- TODO: should be complex output''' unitary_linear_output_real = linear.linear([tf.real(out_state), tf.imag(out_state), inputs], True, 0.0) with tf.variable_scope('scale_nonlinearity'): modulus = tf.complex_abs(unitary_linear_output_real) rescale = tf.maximum(modulus + hidden_bias, 0.) / (modulus + 1e-7) #transition to data shortcut connection #out_ = tf.matmul(tf.concat(1,[tf.real(out_state), tf.imag(out_state), ] ), mat_out) + out_bias #hidden state is complex but output is completely real return out_, out_state #complex
def istft(spec, overlap=4): assert (spec.shape[0] > 1) S = placeholder(dtype=tf.complex64, shape=spec.shape) X = tf.complex_abs(tf.concat(0, [tf.ifft(frame) \ for frame in tf.unstack(S)])) sess = tf.Session() return sess.run(X, feed_dict={S:spec})