我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用keras.regularizers.get()。
def __init__(self, W_regularizer=None, u_regularizer=None, b_regularizer=None, W_constraint=None, u_constraint=None, b_constraint=None, bias=True, **kwargs): self.supports_masking = True self.init = initializations.get('glorot_uniform') self.W_regularizer = regularizers.get(W_regularizer) self.u_regularizer = regularizers.get(u_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.W_constraint = constraints.get(W_constraint) self.u_constraint = constraints.get(u_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias super(AttentionWithContext, self).__init__(**kwargs)
def __init__(self, output_dim, init='glorot_uniform', activation='relu',weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, input_dim=None, **kwargs): self.W_initializer = initializers.get(init) self.b_initializer = initializers.get('zeros') self.activation = activations.get(activation) self.output_dim = output_dim self.input_dim = input_dim self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.initial_weights = weights self.input_spec = InputSpec(ndim=2) if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(SparseFullyConnectedLayer, self).__init__(**kwargs)
def __init__(self, epsilon=1e-3, mode=0, axis=-1, momentum=0.99, r_max_value=3., d_max_value=5., t_delta=1., weights=None, beta_init='zero', gamma_init='one', gamma_regularizer=None, beta_regularizer=None, **kwargs): self.supports_masking = True self.beta_init = initializers.get(beta_init) self.gamma_init = initializers.get(gamma_init) self.epsilon = epsilon self.mode = mode self.axis = axis self.momentum = momentum self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_regularizer = regularizers.get(beta_regularizer) self.initial_weights = weights self.r_max_value = r_max_value self.d_max_value = d_max_value self.t_delta = t_delta if self.mode == 0: self.uses_learning_phase = True super(BatchRenormalization, self).__init__(**kwargs)
def __init__(self, output_dim, freq_dim, hidden_dim, init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.freq_dim = freq_dim self.hidden_dim = hidden_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.forget_bias_init = initializations.get(forget_bias_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W, self.dropout_U = dropout_W, dropout_U if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(ITOSFM, self).__init__(**kwargs)
def __init__(self, init='glorot_uniform', U_regularizer=None, b_start_regularizer=None, b_end_regularizer=None, U_constraint=None, b_start_constraint=None, b_end_constraint=None, weights=None, **kwargs): self.supports_masking = True self.uses_learning_phase = True self.input_spec = [InputSpec(ndim=3)] self.init = initializations.get(init) self.U_regularizer = regularizers.get(U_regularizer) self.b_start_regularizer = regularizers.get(b_start_regularizer) self.b_end_regularizer = regularizers.get(b_end_regularizer) self.U_constraint = constraints.get(U_constraint) self.b_start_constraint = constraints.get(b_start_constraint) self.b_end_constraint = constraints.get(b_end_constraint) self.initial_weights = weights super(ChainCRF, self).__init__(**kwargs)
def __init__(self, output_dim, memory_dim=128, memory_size=20, controller_output_dim=100, location_shift_range=1, num_read_head=1, num_write_head=1, init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, R_regularizer=None, b_regularizer=None, W_y_regularizer=None, W_xi_regularizer=None, W_r_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.forget_bias_init = initializations.get(forget_bias_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W, self.dropout_U = dropout_W, dropout_U if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(NTM, self).__init__(**kwargs)
def __init__(self, mask_shape, layer_sizes, scale, bias=None, act_reg=None, **kwargs): """ """ self.mask_shape = mask_shape self.layer_sizes = layer_sizes self.scale = scale self.gen = generators.FFMatrixGen2D(output_shape=mask_shape, layer_sizes=layer_sizes, scale=scale) self.bias = bias self.act_reg = regularizers.get(act_reg) super().__init__(**kwargs)
def __init__(self, alpha_initializer=0.2, beta_initializer=5.0, alpha_regularizer=None, alpha_constraint=None, beta_regularizer=None, beta_constraint=None, shared_axes=None, **kwargs): super(ParametricSoftplus, self).__init__(**kwargs) self.supports_masking = True self.alpha_initializer = initializers.get(alpha_initializer) self.alpha_regularizer = regularizers.get(alpha_regularizer) self.alpha_constraint = constraints.get(alpha_constraint) self.beta_initializer = initializers.get(beta_initializer) self.beta_regularizer = regularizers.get(beta_regularizer) self.beta_constraint = constraints.get(beta_constraint) if shared_axes is None: self.shared_axes = None elif not isinstance(shared_axes, (list, tuple)): self.shared_axes = [shared_axes] else: self.shared_axes = list(shared_axes)
def __init__(self, filters, centers_initializer='zeros', centers_regularizer=None, centers_constraint=None, stds_initializer='ones', stds_regularizer=None, stds_constraint=None, gauss_scale=100, **kwargs): self.filters = filters self.gauss_scale = gauss_scale super(GaussianReceptiveFields, self).__init__(**kwargs) self.centers_initializer = initializers.get(centers_initializer) self.stds_initializer = initializers.get(stds_initializer) self.centers_regularizer = regularizers.get(centers_regularizer) self.stds_regularizer = regularizers.get(stds_regularizer) self.centers_constraint = constraints.get(centers_constraint) self.stds_constraint = constraints.get(stds_constraint)
def __init__(self, quadratic_filters=2, init='glorot_uniform', weights=None, W_quad_regularizer=None, W_lin_regularizer=None, activity_regularizer=None, W_quad_constraint=None, W_lin_constraint=None, bias=True, input_dim=None, **kwargs): self.init = initializations.get(init) self.quadratic_filters = quadratic_filters self.input_dim = input_dim self.W_quad_regularizer = regularizers.get(W_quad_regularizer) self.W_lin_regularizer = regularizers.get(W_lin_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_quad_constraint = constraints.get(W_quad_constraint) self.W_lin_constraint = constraints.get(W_lin_constraint) self.initial_weights = weights self.input_spec = [InputSpec(ndim=2)] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(GQM, self).__init__(**kwargs)
def __init__(self, quadratic_filters=2, init='glorot_uniform', weights=None, W_quad_regularizer=None, W_lin_regularizer=None, activity_regularizer=None, W_quad_constraint=None, W_lin_constraint=None, bias=True, input_dim=None, **kwargs): self.init = initializations.get(init) self.quadratic_filters = quadratic_filters self.input_dim = input_dim self.W_quad_regularizer = regularizers.get(W_quad_regularizer) self.W_lin_regularizer = regularizers.get(W_lin_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_quad_constraint = constraints.get(W_quad_constraint) self.W_lin_constraint = constraints.get(W_lin_constraint) self.initial_weights = weights self.input_spec = [InputSpec(ndim=5)] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(GQM_conv, self).__init__(**kwargs)
def __init__(self, quadratic_filters=2, init='glorot_uniform', weights=None, W_quad_regularizer=None, W_lin_regularizer=None, activity_regularizer=None, W_quad_constraint=None, W_lin_constraint=None, bias=True, input_dim=None, **kwargs): self.init = initializations.get(init) self.quadratic_filters = quadratic_filters self.input_dim = input_dim self.W_quad_regularizer = regularizers.get(W_quad_regularizer) self.W_lin_regularizer = regularizers.get(W_lin_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_quad_constraint = constraints.get(W_quad_constraint) self.W_lin_constraint = constraints.get(W_lin_constraint) self.initial_weights = weights self.input_spec = [InputSpec(ndim=5)] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(GQM_4D, self).__init__(**kwargs)
def __init__(self, units, kernel_initializer='glorot_uniform', kernel_regularizer=None, kernel_constraint=constraints.NonNeg(), k_initializer='zeros', k_regularizer=None, k_constraint=None, tied_k=False, activity_regularizer=None, **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'),) super(SoftMinMax, self).__init__(**kwargs) self.units = units self.kernel_initializer = initializers.get(kernel_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.k_initializer = initializers.get(k_initializer) self.k_regularizer = regularizers.get(k_regularizer) self.k_constraint = constraints.get(k_constraint) self.tied_k = tied_k self.activity_regularizer = regularizers.get(activity_regularizer) self.input_spec = InputSpec(min_ndim=2) self.supports_masking = True
def __init__(self, output_dim, init='glorot_uniform', activation=None, weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, bias=True, input_dim=None, **kwargs): self.init = initializations.get(init) self.activation = activations.get(activation) self.output_dim = output_dim self.input_dim = input_dim self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias self.initial_weights = weights self.input_spec = [InputSpec(ndim='2+')] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(DenseNonNeg, self).__init__(**kwargs)
def __init__(self, init='glorot_uniform', activation=None, weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, bias=True, input_dim=None, **kwargs): self.init = initializations.get(init) self.activation = activations.get(activation) self.input_dim = input_dim self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias self.initial_weights = weights self.input_spec = [InputSpec(ndim='2+')] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(Feedback, self).__init__(**kwargs)
def __init__(self, init='glorot_uniform', activation=None, weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, bias=True, input_dim=None, **kwargs): self.init = initializations.get(init) self.activation = activations.get(activation) self.input_dim = input_dim self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias self.initial_weights = weights self.input_spec = [InputSpec(ndim='2+')] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(DivisiveNormalization, self).__init__(**kwargs)
def __init__(self, nb_kernels, kernel_dim, init='glorot_uniform', weights=None, W_regularizer=None, activity_regularizer=None, W_constraint=None, input_dim=None, **kwargs): self.init = initializers.get(init) self.nb_kernels = nb_kernels self.kernel_dim = kernel_dim self.input_dim = input_dim self.W_regularizer = regularizers.get(W_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.initial_weights = weights self.input_spec = [InputSpec(ndim=2)] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(MinibatchDiscrimination, self).__init__(**kwargs)
def __init__(self,output_dim,mem_vec_dim,init='glorot_uniform', activation='linear', weights=None, activity_regularizer=None,input_dim=None, **kwargs): ''' Params: output_dim: ????? mem_vec_dim: query????? ''' self.init = initializations.get(init) self.activation = activations.get(activation) self.output_dim = output_dim self.input_dim = input_dim self.mem_vector_dim=mem_vec_dim self.activity_regularizer = regularizers.get(activity_regularizer) self.initial_weights = weights if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(MemoryNet,self).__init__(**kwargs)
def __init__(self, output_dim, L, init='glorot_uniform', inner_init='orthogonal', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W, self.dropout_U = dropout_W, dropout_U self.L = L if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(RHN, self).__init__(**kwargs)
def __init__(self, input_dim, output_dim, init='uniform', input_length=None, W_regularizer=None, activity_regularizer=None, W_constraint=None, mask_zero=False, weights=None, **kwargs): self.input_dim = input_dim self.output_dim = output_dim self.init = initializations.get(init) self.input_length = input_length self.mask_zero = mask_zero self.W_constraint = constraints.get(W_constraint) self.constraints = [self.W_constraint] self.W_regularizer = regularizers.get(W_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.initial_weights = weights kwargs['input_shape'] = (self.input_dim,) super(Embedding2D, self).__init__(**kwargs)
def __init__(self, input_dim, output_dim, init='uniform', input_length=None, W_regularizer=None, activity_regularizer=None, W_constraint=None, mask_zero=False, weights=None, **kwargs): self.input_dim = input_dim self.output_dim = output_dim self.init = initializations.get(init) self.input_length = input_length self.mask_zero = mask_zero self.W_constraint = constraints.get(W_constraint) self.constraints = [self.W_constraint] self.W_regularizer = regularizers.get(W_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.initial_weights = weights kwargs['input_shape'] = (self.input_dim,) super(Embedding, self).__init__(**kwargs)
def __init__(self, init='glorot_uniform', U_regularizer=None, b_start_regularizer=None, b_end_regularizer=None, U_constraint=None, b_start_constraint=None, b_end_constraint=None, weights=None, **kwargs): super(ChainCRF, self).__init__(**kwargs) self.init = initializers.get(init) self.U_regularizer = regularizers.get(U_regularizer) self.b_start_regularizer = regularizers.get(b_start_regularizer) self.b_end_regularizer = regularizers.get(b_end_regularizer) self.U_constraint = constraints.get(U_constraint) self.b_start_constraint = constraints.get(b_start_constraint) self.b_end_constraint = constraints.get(b_end_constraint) self.initial_weights = weights self.supports_masking = True self.uses_learning_phase = True self.input_spec = [InputSpec(ndim=3)]
def __init__(self, downsampling_factor=10, init='glorot_uniform', activation='linear', weights=None, W_regularizer=None, activity_regularizer=None, W_constraint=None, input_dim=None, **kwargs): self.downsampling_factor = downsampling_factor self.init = initializations.get(init) self.activation = activations.get(activation) self.W_regularizer = regularizers.get(W_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.initial_weights = weights self.input_dim = input_dim if self.input_dim: kwargs['input_shape'] = (self.input_dim,) self.input_spec = [InputSpec(ndim=4)] super(EltWiseProduct, self).__init__(**kwargs)
def __init__(self, nb_classes, frequency_table=None, mode=0, init='glorot_uniform', weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, bias=True, verbose=False, **kwargs): ''' # Arguments: nb_classes: Number of classes. frequency_table: list. Frequency of each class. More frequent classes will have shorter huffman codes. mode: integer. One of [0, 1] verbose: boolean. Set to true to see the progress of building huffman tree. ''' self.nb_classes = nb_classes if frequency_table is None: frequency_table = [1] * nb_classes self.frequency_table = frequency_table self.mode = mode self.init = initializations.get(init) self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias self.initial_weights = weights self.verbose = verbose super(Huffmax, self).__init__(**kwargs)
def __init__(self, W_regularizer=None, u_regularizer=None, b_regularizer=None, W_constraint=None, u_constraint=None, b_constraint=None, W_dropout=0., u_dropout=0., bias=True, **kwargs): self.supports_masking = True self.W_init = initializers.get('orthogonal') self.u_init = initializers.get('glorot_uniform') self.W_regularizer = regularizers.get(W_regularizer) self.u_regularizer = regularizers.get(u_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.W_constraint = constraints.get(W_constraint) self.u_constraint = constraints.get(u_constraint) self.b_constraint = constraints.get(b_constraint) self.W_dropout = min(1., max(0., W_dropout)) self.u_dropout = min(1., max(0., u_dropout)) self.bias = bias super(AttentionWithContext, self).__init__(**kwargs)
def __init__(self, output_dim, init='glorot_uniform', inner_init='orthogonal', activation='tanh', beta_init='zero', gamma_init='one', W_regularizer=None, U_regularizer=None, b_regularizer=None, gamma_regularizer=None, beta_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.activation = activations.get(activation) self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.beta_init = initializations.get(beta_init) self.gamma_init = initializations.get(gamma_init) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_regularizer = regularizers.get(beta_regularizer) self.dropout_W = dropout_W self.dropout_U = dropout_U self.epsilon = 1e-5 if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(LN_SimpleRNN, self).__init__(**kwargs)
def __init__(self, output_dim, init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.forget_bias_init = initializations.get(forget_bias_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W, self.dropout_U = dropout_W, dropout_U if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(DualCurrent, self).__init__(**kwargs)
def __init__(self, output_dim, init='glorot_uniform', inner_init='orthogonal', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, b_regularizer=None, shape_key=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W, self.dropout_U = dropout_W, dropout_U self.shape_key = shape_key or {} if self.dropout_W or self.dropout_U: self.uses_learning_phase = True kwargs['consume_less'] = 'gpu' super(RTTN, self).__init__(**kwargs) self.num_actions = 4
def __init__(self, output_dim, init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.forget_bias_init = initializations.get(forget_bias_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W, self.dropout_U = dropout_W, dropout_U if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(DecoderVaeLSTM, self).__init__(**kwargs)
def __init__(self, output_dim, init='glorot_uniform', inner_init='orthogonal', forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=0., dropout_U=0., **kwargs): self.output_dim = output_dim self.init = initializations.get(init) self.inner_init = initializations.get(inner_init) self.forget_bias_init = initializations.get(forget_bias_init) self.activation = activations.get(activation) self.inner_activation = activations.get(inner_activation) self.W_regularizer = regularizers.get(W_regularizer) self.U_regularizer = regularizers.get(U_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.dropout_W = dropout_W self.dropout_U = dropout_U self.stateful = False if self.dropout_W or self.dropout_U: self.uses_learning_phase = True super(QRNN, self).__init__(**kwargs)
def __init__(self, W_regularizer=None, b_regularizer=None, W_constraint=None, b_constraint=None, bias=True, **kwargs): """ Keras Layer that implements an Attention mechanism for temporal data. Supports Masking. Follows the work of Raffel et al. [https://arxiv.org/abs/1512.08756] # Input shape 3D tensor with shape: `(samples, steps, features)`. # Output shape 2D tensor with shape: `(samples, features)`. :param kwargs: Just put it on top of an RNN Layer (GRU/LSTM/SimpleRNN) with return_sequences=True. The dimensions are inferred based on the output shape of the RNN. Example: model.add(LSTM(64, return_sequences=True)) model.add(Attention()) """ self.supports_masking = True self.init = initializations.get('glorot_uniform') self.W_regularizer = regularizers.get(W_regularizer) self.b_regularizer = regularizers.get(b_regularizer) self.W_constraint = constraints.get(W_constraint) self.b_constraint = constraints.get(b_constraint) self.bias = bias super(Attention, self).__init__(**kwargs)
def __init__(self, output_dim, window_size=3, stride=1, kernel_initializer='uniform', bias_initializer='zero', activation='linear', activity_regularizer=None, kernel_regularizer=None, bias_regularizer=None, kernel_constraint=None, bias_constraint=None, use_bias=True, input_dim=None, input_length=None, **kwargs): self.output_dim = output_dim self.window_size = window_size self.strides = (stride, 1) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.activation = activations.get(activation) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = [InputSpec(ndim=3)] self.input_dim = input_dim self.input_length = input_length if self.input_dim: kwargs['input_shape'] = (self.input_length, self.input_dim) super(GCNN, self).__init__(**kwargs)
def __init__(self, units, window_size=2, stride=1, return_sequences=False, go_backwards=False, stateful=False, unroll=False, activation='tanh', kernel_initializer='uniform', bias_initializer='zero', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, dropout=0, use_bias=True, input_dim=None, input_length=None, **kwargs): self.return_sequences = return_sequences self.go_backwards = go_backwards self.stateful = stateful self.unroll = unroll self.units = units self.window_size = window_size self.strides = (stride, 1) self.use_bias = use_bias self.activation = activations.get(activation) self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.dropout = dropout self.supports_masking = True self.input_spec = [InputSpec(ndim=3)] self.input_dim = input_dim self.input_length = input_length if self.input_dim: kwargs['input_shape'] = (self.input_length, self.input_dim) super(QRNN, self).__init__(**kwargs)
def __init__(self, axis=-1, gamma_init='one', beta_init='zero', gamma_regularizer=None, beta_regularizer=None, epsilon=1e-6, **kwargs): super(LayerNormalization, self).__init__(**kwargs) self.axis = to_list(axis) self.gamma_init = initializers.get(gamma_init) self.beta_init = initializers.get(beta_init) self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_regularizer = regularizers.get(beta_regularizer) self.epsilon = epsilon self.supports_masking = True
def __init__(self, ratio, data_format=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(SE, self).__init__(**kwargs) self.ratio = ratio self.data_format= conv_utils.normalize_data_format(data_format) self.use_bias = use_bias self.kernel_initializer = initializers.get(kernel_initializer) self.bias_initializer = initializers.get(bias_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.supports_masking = True
def __init__(self, epsilon=1e-3, axis=-1, weights=None, beta_init='zero', gamma_init='one', gamma_regularizer=None, beta_regularizer=None, **kwargs): self.supports_masking = True self.beta_init = initializers.get(beta_init) self.gamma_init = initializers.get(gamma_init) self.epsilon = epsilon self.axis = axis self.gamma_regularizer = regularizers.get(gamma_regularizer) self.beta_regularizer = regularizers.get(beta_regularizer) self.initial_weights = weights super(FixedBatchNormalization, self).__init__(**kwargs)
def __init__(self, kernel_size, strides=(1, 1), padding='valid', depth_multiplier=1, data_format=None, activation=None, use_bias=True, depthwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, bias_constraint=None, **kwargs): super(DepthwiseConv2D, self).__init__( filters=None, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, activation=activation, use_bias=use_bias, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, bias_constraint=bias_constraint, **kwargs) self.depth_multiplier = depth_multiplier self.depthwise_initializer = initializers.get(depthwise_initializer) self.depthwise_regularizer = regularizers.get(depthwise_regularizer) self.depthwise_constraint = constraints.get(depthwise_constraint) self.bias_initializer = initializers.get(bias_initializer)
def __init__(self, filters, kernel_size, kernel_initializer='glorot_uniform', activation=None, weights=None, padding='valid', strides=(1, 1), data_format=None, kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, use_bias=True, **kwargs): if data_format is None: data_format = K.image_data_format() if padding not in {'valid', 'same', 'full'}: raise ValueError('Invalid border mode for CosineConvolution2D:', padding) self.filters = filters self.kernel_size = kernel_size self.nb_row, self.nb_col = self.kernel_size self.kernel_initializer = initializers.get(kernel_initializer) self.activation = activations.get(activation) self.padding = padding self.strides = tuple(strides) self.data_format = normalize_data_format(data_format) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.use_bias = use_bias self.input_spec = [InputSpec(ndim=4)] self.initial_weights = weights super(CosineConvolution2D, self).__init__(**kwargs)
def __init__(self, kernel_size, strides=(1, 1), padding='valid', depth_multiplier=1, data_format=None, activation=None, use_bias=True, depthwise_initializer='glorot_uniform', bias_initializer='zeros', depthwise_regularizer=None, bias_regularizer=None, activity_regularizer=None, depthwise_constraint=None, bias_constraint=None, **kwargs): super(DepthwiseConv2D, self).__init__( filters=None, kernel_size=kernel_size, strides=strides, padding=padding, data_format=data_format, activation=activation, use_bias=use_bias, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, bias_constraint=bias_constraint, **kwargs) self.depth_multiplier = depth_multiplier self.depthwise_initializer = initializers.get(depthwise_initializer) self.depthwise_regularizer = regularizers.get(depthwise_regularizer) self.depthwise_constraint = constraints.get(depthwise_constraint) self.bias_initializer = initializers.get(bias_initializer) self._padding = _preprocess_padding(self.padding) self._strides = (1,) + self.strides + (1,) self._data_format = "NHWC"
def __init__(self, units, activation='linear', weights=None, kernel_initializer='glorot_uniform', kernel_regularizer=None, kernel_constraint=None, bias_initializer='uniform', bias_regularizer=None, bias_constraint=None, activity_regularizer=None, bias=True, input_dim=None, factorization=simple_tensor_factorization(), **kwargs): self.activation = activations.get(activation) self.units = units self.input_dim = input_dim self.factorization = factorization self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.kernel_initializer = get_initializer(kernel_initializer) self.bias_initializer = get_initializer(bias_initializer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.activity_regularizer = regularizers.get(activity_regularizer) self.bias = bias self.initial_weights = weights self.input_spec = [InputSpec(ndim=2)] if self.input_dim: kwargs['input_shape'] = (self.input_dim,) super(DenseTensor, self).__init__(**kwargs)
def __init__(self, filters_simple, filters_complex, nb_row, nb_col, init='glorot_uniform', activation='relu', weights=None, padding='valid', strides=(1, 1), data_format=K.image_data_format(), kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, W_constraint=None, bias_constraint=None, bias=True, **kwargs): if padding not in {'valid', 'same'}: raise Exception('Invalid border mode for Convolution2DEnergy:', padding) self.filters_simple = filters_simple self.filters_complex = filters_complex self.nb_row = nb_row self.nb_col = nb_col self.init = initializers.get(init, data_format=data_format) self.activation = activations.get(activation) assert padding in {'valid', 'same'}, 'padding must be in {valid, same}' self.padding = padding self.strides = tuple(strides) assert data_format in {'channels_last', 'channels_first'}, 'data_format must be in {tf, th}' self.data_format = data_format self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.W_constraint = constraints.UnitNormOrthogonal(filters_complex, data_format) self.bias_constraint = constraints.get(bias_constraint) self.bias = bias self.input_spec = [InputSpec(ndim=4)] self.initial_weights = weights super(Convolution2DEnergy, self).__init__(**kwargs)
def __init__(self, rank, kernel_size=3, data_format=None, kernel_initialization=.1, bias_initialization=1, kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs): super(_ConvGDN, self).__init__(**kwargs) self.rank = rank self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank, 'kernel_size') self.strides = conv_utils.normalize_tuple(1, rank, 'strides') self.padding = conv_utils.normalize_padding('same') self.data_format = conv_utils.normalize_data_format(data_format) self.dilation_rate = conv_utils.normalize_tuple(1, rank, 'dilation_rate') self.kernel_initializer = initializers.Constant(kernel_initialization) self.bias_initializer = initializers.Constant(bias_initialization) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.bias_regularizer = regularizers.get(bias_regularizer) self.activity_regularizer = regularizers.get(activity_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.bias_constraint = constraints.get(bias_constraint) self.input_spec = InputSpec(ndim=self.rank + 2)
def __init__(self, filters, kernel_initializer='glorot_uniform', kernel_regularizer=None, kernel_constraint=kconstraints.NonNeg(), k_initializer='zeros', k_regularizer=None, k_constraint=None, tied_k=False, activity_regularizer=None, strides=1, padding='valid', dilation_rate=1, data_format=K.image_data_format(), **kwargs): if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'),) super(Conv2DSoftMinMax, self).__init__(**kwargs) self.filters = filters self.kernel_initializer = initializers.get(kernel_initializer) self.kernel_regularizer = regularizers.get(kernel_regularizer) self.kernel_constraint = constraints.get(kernel_constraint) self.k_initializer = initializers.get(k_initializer) self.k_regularizer = regularizers.get(k_regularizer) self.k_constraint = constraints.get(k_constraint) self.tied_k = tied_k self.activity_regularizer = regularizers.get(activity_regularizer) self.strides = conv_utils.normalize_tuple(strides, 2, 'strides') self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2, 'dilation_rate') self.padding = conv_utils.normalize_padding(padding) self.input_spec = InputSpec(min_ndim=2) self.data_format = data_format self.supports_masking = True
def __init__(self, init='one', power_init=1, weights=None, axis=-1, fit=True, **kwargs): self.supports_masking = True self.init = initializations.get(init) self.initial_weights = weights self.axis = axis self.power_init = power_init self.fit = fit super(PowerReLU, self).__init__(**kwargs)
def __init__(self, quadratic_filters_ex=2, quadratic_filters_sup=2, W_quad_ex_initializer='glorot_uniform', W_quad_sup_initializer='glorot_uniform', W_lin_initializer='glorot_uniform', W_quad_ex_regularizer=None, W_quad_sup_regularizer=None, W_lin_regularizer=None, W_quad_ex_constraint=None, W_quad_sup_constraint=None, W_lin_constraint=None, **kwargs): self.quadratic_filters_ex = quadratic_filters_ex self.quadratic_filters_sup = quadratic_filters_sup self.W_quad_ex_initializer = initializers.get(W_quad_ex_initializer) self.W_quad_sup_initializer = initializers.get(W_quad_sup_initializer) self.W_lin_initializer = initializers.get(W_lin_initializer) self.W_quad_ex_constraint = constraints.get(W_quad_ex_constraint) self.W_quad_sup_constraint = constraints.get(W_quad_sup_constraint) self.W_lin_constraint = constraints.get(W_lin_constraint) self.W_quad_ex_regularizer = regularizers.get(W_quad_ex_regularizer) self.W_quad_sup_regularizer = regularizers.get(W_quad_sup_regularizer) self.W_lin_regularizer = regularizers.get(W_lin_regularizer) self.input_spec = [InputSpec(ndim=2)] if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'),) super(RustSTC, self).__init__(**kwargs)
def __init__(self, weights=None, kernel_initializer='glorot_uniform', alpha_initializer='ones', alpha_regularizer=None, alpha_constraint=None, beta_delta_initializer='ones', beta_delta_regularizer=None, beta_delta_constraint=None, gamma_eta_initializer='ones', gamma_eta_regularizer=None, gamma_eta_constraint=None, rho_initializer='ones', rho_regularizer=None, rho_constraint=None, **kwargs): self.alpha_initializer = initializers.get(alpha_initializer) self.beta_delta_initializer = initializers.get(beta_delta_initializer) self.gamma_eta_initializer = initializers.get(gamma_eta_initializer) self.rho_initializer = initializers.get(rho_initializer) self.alpha_constraint = constraints.get(alpha_constraint) self.beta_delta_constraint = constraints.get(beta_delta_constraint) self.gamma_eta_constraint = constraints.get(gamma_eta_constraint) self.rho_constraint = constraints.get(rho_constraint) self.alpha_regularizer = regularizers.get(alpha_regularizer) self.beta_delta_regularizer = regularizers.get(beta_delta_regularizer) self.gamma_eta_regularizer = regularizers.get(gamma_eta_regularizer) self.rho_regularizer = regularizers.get(rho_regularizer) self.input_spec = [InputSpec(ndim=2)] if 'input_shape' not in kwargs and 'input_dim' in kwargs: kwargs['input_shape'] = (kwargs.pop('input_dim'),) super(NakaRushton, self).__init__(**kwargs)