Python sklearn.neural_network 模块,MLPRegressor() 实例源码

我们从Python开源项目中,提取了以下11个代码示例,用于说明如何使用sklearn.neural_network.MLPRegressor()

项目:dynamicpricing    作者:marcelja    | 项目源码 | 文件源码
def train_universal_model(self, features: dict):
        logging.debug('Start training universal model')
        universal_model = MLPRegressor(hidden_layer_sizes=(5,),
                                       activation='relu',
                                       solver='adam',
                                       learning_rate='adaptive',
                                       max_iter=1000,
                                       learning_rate_init=0.01,
                                       alpha=0.01)
        start_time = int(time() * 1000)
        f_vector = []
        s_vector = []
        for product_id, vector_tuple in features.items():
            f_vector.extend(vector_tuple[0])
            s_vector.extend(vector_tuple[1])
        universal_model.fit(f_vector, s_vector)
        end_time = int(time() * 1000)
        logging.debug('Finished training universal model')
        logging.debug('Training took {} ms'.format(end_time - start_time))
        self.set_universal_model_thread_safe(universal_model)
项目:Parallel-SGD    作者:angadgill    | 项目源码 | 文件源码
def test_partial_fit_regression():
    # Test partial_fit on regression.
    # `partial_fit` should yield the same results as 'fit' for regression.
    X = Xboston
    y = yboston

    for momentum in [0, .9]:
        mlp = MLPRegressor(algorithm='sgd', max_iter=100, activation='relu',
                           random_state=1, learning_rate_init=0.01,
                           batch_size=X.shape[0], momentum=momentum)
        with warnings.catch_warnings(record=True):
            # catch convergence warning
            mlp.fit(X, y)
        pred1 = mlp.predict(X)
        mlp = MLPRegressor(algorithm='sgd', activation='relu',
                           learning_rate_init=0.01, random_state=1,
                           batch_size=X.shape[0], momentum=momentum)
        for i in range(100):
            mlp.partial_fit(X, y)

        pred2 = mlp.predict(X)
        assert_almost_equal(pred1, pred2, decimal=2)
        score = mlp.score(X, y)
        assert_greater(score, 0.75)
项目:OpenAPS    作者:medicinexlab    | 项目源码 | 文件源码
def mlp_regression(parameter_array):
    layer_value = parameter_array[0]
    second_layer_value = parameter_array[1]
    learning_rate = parameter_array[2]
    return neural_network.MLPRegressor(hidden_layer_sizes=(layer_value,second_layer_value), activation='identity', solver='adam', alpha=1,
                                        batch_size='auto', learning_rate='constant', learning_rate_init=learning_rate, power_t=0.5,
                                        max_iter=200, shuffle=True, random_state=None, tol=0.0001, verbose=False, warm_start=False,
                                        momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-08)

#Dictionary with the name of the algorithm as the key and the function as the value
项目:finance-ml    作者:Omarkhursheed    | 项目源码 | 文件源码
def train():
    os.chdir(dname)
    for selected_stock in onlyfiles:
        df = pd.read_csv(os.path.join('data_files',selected_stock))
        #preprocessing the data
        df = df[['Adj. Open',  'Adj. High',  'Adj. Low',  'Adj. Close', 'Adj. Volume']]
        #measure of volatility
        df['HL_PCT'] = (df['Adj. High'] - df['Adj. Low']) / df['Adj. Low'] * 100.0
        df['PCT_change'] = (df['Adj. Close'] - df['Adj. Open']) / df['Adj. Open'] * 100.0
        df = df[['Adj. Close', 'HL_PCT', 'PCT_change', 'Adj. Volume']]
        forecast_col = 'Adj. Close'
        df.fillna(value=-99999, inplace=True)
        forecast_out = int(math.ceil(0.01 * len(df)))
        df['label'] = df[forecast_col].shift(-forecast_out)

        X = np.array(df.drop(['label'],1))
        X = preprocessing.scale(X)
        X_lately = X[-forecast_out:]
        X = X[:-forecast_out]

        df.dropna(inplace=True)
        y = np.array(df['label'])
        X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.2)

        svr = SVR()
        pickle.dump(svr,open(join(dname+'/models/svr_unfit/', selected_stock+'svr.sav'),'wb'))
        svr.fit(X_train, y_train)

        lr = LinearRegression()
        pickle.dump(lr,open(join(dname+'/models/lr_unfit/', selected_stock+'lr.sav'),'wb'))
        lr.fit(X_train, y_train)

        mlp = MLPRegressor()
        pickle.dump(mlp,open(join(dname+'/models/mlp_unfit/', selected_stock+'mlp.sav'),'wb'))
        mlp.fit(X_train, y_train)

        pickle.dump(svr,open(join(dname+'/models/svr_fit/', selected_stock+'svr.sav'),'wb'))
        pickle.dump(lr,open(join(dname+'/models/lr_fit/', selected_stock+'lr.sav'),'wb'))
        pickle.dump(mlp,open(join(dname+'/models/mlp_fit/', selected_stock+'mlp.sav'),'wb'))

        print(selected_stock+" - trained")
项目:dask-ml    作者:dask    | 项目源码 | 文件源码
def test_basic(self, single_chunk_classification):
        X, y = single_chunk_classification
        a = nn.ParitalMLPRegressor(random_state=0)
        b = nn_.MLPRegressor(random_state=0)
        a.fit(X, y)
        b.partial_fit(X, y)
        assert_estimator_equal(a, b)
项目:sensorimotorProject    作者:iaakhter    | 项目源码 | 文件源码
def train(self):
        print "Training"
        #xTrain = processImages.convertImageToArray(self.numberOfExamples, self.imagePath)
        xTrain = processImages.constructXFromTargetFocusLocations(self.numberOfExamples, 4,self.imagePath)
        yTrain = processImages.convertLabelToArray(self.numberOfExamples, 2,self.labelPath)
        yTrain = np.reshape(yTrain,(xTrain.shape[0],2))
        self.model = MLPRegressor(hidden_layer_sizes=(30,),alpha=1.0)
        self.model.fit(xTrain,yTrain)
        joblib.dump(self.model,'sklearnModel.pkl')
项目:dynamicpricing    作者:marcelja    | 项目源码 | 文件源码
def train_model_for_id(self, product_id, data):
        product_model = MLPRegressor(hidden_layer_sizes=(5,),
                                     activation='relu',
                                     solver='adam',
                                     learning_rate='adaptive',
                                     max_iter=1000,
                                     learning_rate_init=0.01,
                                     alpha=0.01)
        product_model.fit(data[0], data[1])
        self.set_product_model_thread_safe(product_id, product_model)
项目:Parallel-SGD    作者:angadgill    | 项目源码 | 文件源码
def test_lbfgs_regression():
    # Test lbfgs on the boston dataset, a regression problems."""
    X = Xboston
    y = yboston
    for activation in ACTIVATION_TYPES:
        mlp = MLPRegressor(algorithm='l-bfgs', hidden_layer_sizes=50,
                           max_iter=150, shuffle=True, random_state=1,
                           activation=activation)
        mlp.fit(X, y)
        assert_greater(mlp.score(X, y), 0.95)
项目:Parallel-SGD    作者:angadgill    | 项目源码 | 文件源码
def test_multioutput_regression():
    # Test that multi-output regression works as expected"""
    X, y = make_regression(n_samples=200, n_targets=5)
    mlp = MLPRegressor(algorithm='l-bfgs', hidden_layer_sizes=50, max_iter=200,
                       random_state=1)
    mlp.fit(X, y)
    assert_greater(mlp.score(X, y), 0.9)
项目:Power-Consumption-Prediction    作者:YoungGod    | 项目源码 | 文件源码
def choose_best_lag(seq, pre_period, lags = range(1,30)):
    """
    ????lazzy model,?????
    ???(?????????)
    """
    models = []
    # ???
    std_sca = StandardScaler().fit(np.array(seq).reshape(-1,1))
    seq = std_sca.transform(np.array(seq).reshape(-1,1))

    # ????????????,???????
    from sklearn.model_selection import train_test_split
    for input_lag in lags:
#        window = input_lag + pre_period
        X, Y = create_dataset(seq.flatten(), input_lag, pre_period)
        # do more cv
#        for state in range(0,3):
        err = 0.0
        X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.01, random_state=0)
        for lag in lags:
            hidden = (lag + pre_period + 3)/2
            reg = MLPRegressor(activation = 'relu',hidden_layer_sizes = (hidden,),
                               max_iter=10000,learning_rate='adaptive',
                               tol=0.0,warm_start=True,solver='adam')
            reg.fit(X_train,y_train)

            y_pred = reg.predict(X_test)
            err += err_evaluation(y_pred,y_test)
        models.append((err/len(X_test),lag))
    models.sort()
    best_lag = models[0][1]

    return models, best_lag

# df for dataframe, s for series
项目:finance-ml    作者:Omarkhursheed    | 项目源码 | 文件源码
def train():
    if request.method == 'POST':
        selected_stock = request.form['file_select']
        os.chdir(dname)

        df = pd.read_csv(os.path.join('data_files',selected_stock))

        #preprocessing the data
        df = df[['Adj. Open',  'Adj. High',  'Adj. Low',  'Adj. Close', 'Adj. Volume']]
        #measure of volatility
        df['HL_PCT'] = (df['Adj. High'] - df['Adj. Low']) / df['Adj. Low'] * 100.0
        df['PCT_change'] = (df['Adj. Close'] - df['Adj. Open']) / df['Adj. Open'] * 100.0
        df = df[['Adj. Close', 'HL_PCT', 'PCT_change', 'Adj. Volume']]
        forecast_col = 'Adj. Close'
        df.fillna(value=-99999, inplace=True)
        forecast_out = int(math.ceil(0.01 * len(df)))
        df['label'] = df[forecast_col].shift(-forecast_out)

        X = np.array(df.drop(['label'],1))
        X = preprocessing.scale(X)
        X_lately = X[-forecast_out:]
        X = X[:-forecast_out]

        df.dropna(inplace=True)

        y = np.array(df['label'])

        X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.2)
        svr = SVR()
        pickle.dump(svr,open(join(dname+'/models/svr_unfit/', selected_stock+'svr.sav'),'wb'))
        svr.fit(X_train, y_train)

        lr = LinearRegression()
        pickle.dump(lr,open(join(dname+'/models/lr_unfit/', selected_stock+'lr.sav'),'wb'))
        lr.fit(X_train, y_train)

        mlp = MLPRegressor()
        pickle.dump(mlp,open(join(dname+'/models/mlp_unfit/', selected_stock+'mlp.sav'),'wb'))
        mlp.fit(X_train, y_train)

        pickle.dump(svr,open(join(dname+'/models/svr_fit/', selected_stock+'svr.sav'),'wb'))
        pickle.dump(lr,open(join(dname+'/models/lr_fit/', selected_stock+'lr.sav'),'wb'))
        pickle.dump(mlp,open(join(dname+'/models/mlp_fit/', selected_stock+'mlp.sav'),'wb'))

        return adminsec()