我们从Python开源项目中,提取了以下26个代码示例,用于说明如何使用keras.initializers.RandomNormal()。
def __init__(self, filters, init_normal_stddev=0.01, **kwargs): """Init Parameters ---------- filters : int Number of channel of the input feature map init_normal_stddev : float Normal kernel initialization **kwargs: Pass to superclass. See Con2D layer in Keras """ self.filters = filters super(ConvOffset2D, self).__init__( self.filters * 2, (3, 3), padding='same', use_bias=False, kernel_initializer=RandomNormal(0, init_normal_stddev), **kwargs )
def createBaseNetworkSmall(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkLarge(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength)) baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(2048, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(2048, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkSmall(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkSmall(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkLarge(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(2048, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(2048, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def netSigmoid(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkSmall(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(1024, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkLarge(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength)) baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(1024, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(Conv1D(1024, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal(mean=0.0, stddev=0.02), bias_initializer=RandomNormal(mean=0.0, stddev=0.02))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(2048, activation='relu')) baseNetwork.add(Dropout(0.5)) baseNetwork.add(Dense(2048, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkSmall(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(128, activation='relu')) baseNetwork.add(Dropout(0.2)) baseNetwork.add(Dense(128, activation='relu')) baseNetwork.add(Dropout(0.2)) return baseNetwork
def __init__(self): self.x_dim = 784 self.name = 'mnist/dcgan/discriminator' self.initializer = RandomNormal(mean=0.0, stddev=0.02, seed=None) self.regularizer = regularizers.l2(2.5e-5)
def __init__(self): self.z_dim = 100 self.x_dim = 784 self.name = 'mnist/dcgan/generator' self.initializer = RandomNormal(mean=0.0, stddev=0.02, seed=None) self.regularizer = regularizers.l2(2.5e-5)
def createSplitBaseNetworkSmall(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) return baseNetwork
def netC256P3C256P3C256P3f128(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(128, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def netC256P3C256P3f32(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(32, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def netC256P3C256P3f64(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(64, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createBaseNetworkSmaller(inputLength, inputDim): baseNetwork = Sequential() baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', input_shape=(inputLength, inputDim), kernel_initializer=RandomNormal(mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 3, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Flatten()) baseNetwork.add(Dense(32, activation='relu')) baseNetwork.add(Dropout(0.5)) return baseNetwork
def createSplitBaseNetworkSmall(inputDim, inputLength): baseNetwork = Sequential() baseNetwork.add(Embedding(input_dim=inputDim, output_dim=inputDim, input_length=inputLength)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) baseNetwork.add(Conv1D(256, 7, strides=1, padding='valid', activation='relu', kernel_initializer=RandomNormal( mean=0.0, stddev=0.05), bias_initializer=RandomNormal(mean=0.0, stddev=0.05))) baseNetwork.add(MaxPooling1D(pool_size=3, strides=3)) return baseNetwork
def create_actor_network(self, state_size,action_dim): print("Now we build the model") S = Input(shape=[state_size]) h0 = Dense(HIDDEN1_UNITS, activation='relu')(S) h1 = Dense(HIDDEN2_UNITS, activation='relu')(h0) # ,init=lambda shape, name: RandomNormal(shape, scale=1e-4, name=name) V = Dense(action_dim,activation='tanh')(h1) model = Model(input=S,output=V) return model, model.trainable_weights, S