我们从Python开源项目中,提取了以下21个代码示例,用于说明如何使用tensorflow.Varialbe()。
def batch_norm(in_tensor, phase_train, name, reuse=None, data_format='NHWC', center=True, scale=True): """ Batch normalization on convolutional maps. Ref.: http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow Args: x: Tensor, 4D BHWD input maps n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps """ axis = -1 if data_format == 'NHWC' else 1 with tf.variable_scope(name): # return tf.contrib.layers.batch_norm(in_tensor, is_training=phase_train, scope=scope, reuse=reuse) return tf.layers.batch_normalization(in_tensor, axis=axis, center=center, scale=scale, training=phase_train, reuse=reuse, fused=True, momentum=0.99)
def batch_norm_template(inputs, is_training, scope, moments_dims, bn_decay): """ Batch normalization on convolutional maps and beyond... Ref.: http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow Args: inputs: Tensor, k-D input ... x C could be BC or BHWC or BDHWC is_training: boolean tf.Varialbe, true indicates training phase scope: string, variable scope moments_dims: a list of ints, indicating dimensions for moments calculation bn_decay: float or float tensor variable, controling moving average weight Return: normed: batch-normalized maps """ with tf.variable_scope(scope) as sc: num_channels = inputs.get_shape()[-1].value beta = tf.Variable(tf.constant(0.0, shape=[num_channels]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[num_channels]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(inputs, moments_dims, name='moments') decay = bn_decay if bn_decay is not None else 0.9 ema = tf.train.ExponentialMovingAverage(decay=decay) # Operator that maintains moving averages of variables. ema_apply_op = tf.cond(is_training, lambda: ema.apply([batch_mean, batch_var]), lambda: tf.no_op()) # Update moving average and return current batch's avg and var. def mean_var_with_update(): with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) # ema.average returns the Variable holding the average of var. mean, var = tf.cond(is_training, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(inputs, mean, var, beta, gamma, 1e-3) return normed
def batch_norm_for_fc(inputs, is_training, bn_decay, scope): """ Batch normalization on FC data. Args: inputs: Tensor, 2D BxC input is_training: boolean tf.Varialbe, true indicates training phase bn_decay: float or float tensor variable, controling moving average weight scope: string, variable scope Return: normed: batch-normalized maps """ return batch_norm_template(inputs, is_training, scope, [0,], bn_decay)
def batch_norm_for_conv1d(inputs, is_training, bn_decay, scope): """ Batch normalization on 1D convolutional maps. Args: inputs: Tensor, 3D BLC input maps is_training: boolean tf.Varialbe, true indicates training phase bn_decay: float or float tensor variable, controling moving average weight scope: string, variable scope Return: normed: batch-normalized maps """ return batch_norm_template(inputs, is_training, scope, [0,1], bn_decay)
def batch_norm_for_conv2d(inputs, is_training, bn_decay, scope): """ Batch normalization on 2D convolutional maps. Args: inputs: Tensor, 4D BHWC input maps is_training: boolean tf.Varialbe, true indicates training phase bn_decay: float or float tensor variable, controling moving average weight scope: string, variable scope Return: normed: batch-normalized maps """ return batch_norm_template(inputs, is_training, scope, [0,1,2], bn_decay)
def batch_norm_for_conv3d(inputs, is_training, bn_decay, scope): """ Batch normalization on 3D convolutional maps. Args: inputs: Tensor, 5D BDHWC input maps is_training: boolean tf.Varialbe, true indicates training phase bn_decay: float or float tensor variable, controling moving average weight scope: string, variable scope Return: normed: batch-normalized maps """ return batch_norm_template(inputs, is_training, scope, [0,1,2,3], bn_decay)
def batch_norm(x, n_out, phase_train, conv=True, scope='bn'): """ Batch normalization on convolutional maps. Args: x: Tensor, 4D BHWD input maps n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps """ with tf.variable_scope(scope): beta = tf.Variable(tf.constant(0.0, shape=[n_out]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), name='gamma', trainable=True) if conv: batch_mean, batch_var = tf.nn.moments(x, [0,1,2], name='moments') else: batch_mean, batch_var = tf.nn.moments(x, [0], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.5) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) return normed
def batch_norm(x, n_out, phase_train, scope='bn'): """ Batch normalization on convolutional maps. Args: x: Tensor n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps """ with tf.variable_scope(scope): beta = tf.Variable(tf.constant(0.0, shape=[n_out]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(x, [0], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.5) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) return normed
def batch_norm(x, n_out, phase_train=tf.constant(True)): """ Batch normalization on convolutional maps. Ref.: http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow Args: x: Tensor, 4D BHWD input maps n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps """ with tf.variable_scope('bn'): beta = tf.Variable(tf.constant(0.0, shape=[n_out]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(x, [0, 1, 2], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.5) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) return normed
def batch_norm_conv(x, n_out, phase_train, scope='bn'): """ Batch normalization on convolutional maps. Args: x: Tensor, 4D BHWD input maps n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps """ with tf.variable_scope(scope): beta = tf.Variable(tf.constant(0.0, shape=[n_out]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(x, [0,1,2], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.5) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) return normed
def batch_norm_fc(x, n_out, phase_train, scope='bn'): """ Batch normalization on convolutional maps. Args: x: Tensor, 4D BHWD input maps n_out: integer, depth of input maps phase_train: boolean tf.Varialbe, true indicates training phase scope: string, variable scope Return: normed: batch-normalized maps """ with tf.variable_scope(scope): beta = tf.Variable(tf.constant(0.0, shape=[n_out]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[n_out]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(x, [0], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.5) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(phase_train, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, 1e-3) return normed
def batch_norm(input_tensor, if_training): """ Batch normalization on convolutional feature maps. Ref.: http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow Args: input_tensor: Tensor, 4D NHWC input feature maps depth: Integer, depth of input feature maps if_training: Boolean tf.Varialbe, true indicates training phase scope: String, variable scope Return: normed_tensor: Batch-normalized feature maps """ with tf.variable_scope('batch_normalization'): depth = int(input_tensor.get_shape()[-1]) beta = tf.Variable(tf.constant(0.0, shape=[depth]), name='beta', trainable=True) gamma = tf.Variable(tf.constant(1.0, shape=[depth]), name='gamma', trainable=True) batch_mean, batch_var = tf.nn.moments(input_tensor, [0,1,2], name='moments') ema = tf.train.ExponentialMovingAverage(decay=0.99) def mean_var_with_update(): ema_apply_op = ema.apply([batch_mean, batch_var]) with tf.control_dependencies([ema_apply_op]): return tf.identity(batch_mean), tf.identity(batch_var) mean, var = tf.cond(if_training, mean_var_with_update, lambda: (ema.average(batch_mean), ema.average(batch_var))) normed_tensor = tf.nn.batch_normalization(input_tensor, mean, var, beta, gamma, 1e-3) return normed_tensor