Python scipy.optimize 模块,basinhopping() 实例源码

我们从Python开源项目中,提取了以下6个代码示例,用于说明如何使用scipy.optimize.basinhopping()

项目:cellstar    作者:Fafa87    | 项目源码 | 文件源码
def optimize_basinhopping(params_to_optimize, distance_function, time_percent=100):
    minimizer_kwargs = {"method": "COBYLA"}
    # bounds = ContourBounds
    # minimizer_kwargs = {"method": "L-BFGS-B", "bounds" : zip(bounds.xmin,bounds.xmax)}
    bounds = None
    result = opt.basinhopping(distance_function, params_to_optimize, accept_test=bounds,
                              minimizer_kwargs=minimizer_kwargs, niter=35 * time_percent / 100)
    logger.debug("Opt finished: " + str(result))
    return result.x, result.fun


#
#
#   MULTIPROCESSING - MULTIPLE METHODS
#
#
项目:senti    作者:stevenxxiu    | 项目源码 | 文件源码
def fit_voting(self):
        voting = 'soft'
        names = [
            # 'svm(word_n_grams,char_n_grams,all_caps,hashtags,punctuations,punctuation_last,emoticons,emoticon_last,'
            # 'elongated,negation_count)',
            # 'logreg(w2v_doc)',
            # 'logreg(w2v_word_avg_google)',
            'word2vec_bayes',
            'cnn_word(embedding=google)',
            'rnn_word(embedding=google)',
        ]
        classifiers = [ExternalModel({
            self.val_docs: os.path.join(self.data_dir, 'results/val/{}.json'.format(name)),
            self.test_docs: os.path.join(self.data_dir, 'results/test/{}.json'.format(name)),
        }) for name in names]
        all_scores = []
        for classifier in classifiers:
            scores = classifier.predict_proba(self.val_docs)
            if voting == 'hard':
                scores = Binarizer(1 / 3).transform(scores)
            all_scores.append(scores)
        all_scores = np.array(all_scores)
        all_scores_first, all_scores_rest = all_scores[0], all_scores[1:]
        le = LabelEncoder().fit(self.classes_)
        val_label_indexes = le.transform(self.val_labels())
        # assume w_0=1 as w is invariant to scaling
        w = basinhopping(
            lambda w_: -(val_label_indexes == np.argmax((
                all_scores_first + all_scores_rest * w_.reshape((len(w_), 1, 1))
            ).sum(axis=0), axis=1)).sum(), np.ones(len(classifiers) - 1), niter=1000,
            minimizer_kwargs=dict(method='L-BFGS-B', bounds=[(0, None)] * (len(classifiers) - 1))
        ).x
        w = np.hstack([[1], w])
        w /= w.sum()
        logging.info('w: {}'.format(w))
        estimator = VotingClassifier(list(zip(names, classifiers)), voting=voting, weights=w)
        estimator.le_ = le
        estimator.estimators_ = classifiers
        return 'vote({})'.format(','.join(names)), estimator
项目:cellstar    作者:Fafa87    | 项目源码 | 文件源码
def optimize_basinhopping(params_to_optimize, distance_function):
    bounds = RankBounds
    # minimizer_kwargs = {"method": "COBYLA", bounds=bounds}
    minimizer_kwargs = {"method": "L-BFGS-B", "bounds": zip(bounds.xmin, bounds.xmax)}
    result = opt.basinhopping(distance_function, params_to_optimize, accept_test=bounds,
                              minimizer_kwargs=minimizer_kwargs, niter=170)
    logger.debug("Opt finished: " + str(result))
    return result.x, result.fun


#
#
#   MULTIPROCESSING METHODS
#
#
项目:learning-to-prune    作者:timvieira    | 项目源码 | 文件源码
def fit(X, Y):
    """
    Fit a parametric curve to Pareto frontier.
    """

    if 0:
        # plot random cross sections for fit objective, which is nonconvex.
        # the cross sections helped me determine the positivity constraints
        # and that L2 regression is better than L1 regression.
        from arsenal.math import spherical
        fff = lambda w: np.sum([(F(w, x) - y)**2 for x,y in zip(X,Y)])
        x0 = np.array([-1, -1, 0, -1])
        for _ in range(10):
            d = spherical(4)
            xx = np.linspace(-10,10,100)
            yy = [fff(x0 + a*d) for a in xx]
            pl.figure()
            pl.plot(xx,yy)
            pl.ylim(0, min(100, max(yy)))
            pl.show()

    # Minimize mean squared error with Basin hopping to avoid local minima.
    w = basinhopping(lambda w: np.sum([(F(w, x) - y)**2 for x,y in zip(X,Y)]),
                     np.array([0, 0, 0, 0]), niter=100).x

    # Return closure over best parameters.
    return (lambda x: F(w, x),
            lambda x: dFdx(w, x))
项目:oktopus    作者:KeplerGO    | 项目源码 | 文件源码
def fit(self, optimizer='minimize', **kwargs):
        """
        Minimizes the :func:`evaluate` function using :func:`scipy.optimize.minimize`,
        :func:`scipy.optimize.differential_evolution`,
        :func:`scipy.optimize.basinhopping`, or :func:`skopt.gp.gp_minimize`.

        Parameters
        ----------
        optimizer : str
            Optimization algorithm. Options are::

                - ``'minimize'`` uses :func:`scipy.optimize.minimize`

                - ``'differential_evolution'`` uses :func:`scipy.optimize.differential_evolution`

                - ``'basinhopping'`` uses :func:`scipy.optimize.basinhopping`

                - ``'gp_minimize'`` uses :func:`skopt.gp.gp_minimize`

            `'minimize'` is usually robust enough and therefore recommended
            whenever a good initial guess can be provided. The remaining options
            are global optimizers which might provide better results precisely
            in cases where a close engouh initial guess cannot be obtained
            trivially.
        kwargs : dict
            Dictionary for additional arguments.

        Returns
        -------
        opt_result : :class:`scipy.optimize.OptimizeResult` object
            Object containing the results of the optimization process.
            Note: this is also stored in **self.opt_result**.
        """

        if optimizer == 'minimize':
            self.opt_result = minimize(self.evaluate, **kwargs)
        elif optimizer == 'differential_evolution':
            self.opt_result = differential_evolution(self.evaluate, **kwargs)
        elif optimizer == 'basinhopping':
            self.opt_result = basinhopping(self.evaluate, **kwargs)
        elif optimizer == 'gp_minimize':
            self.opt_result = gp_minimize(self.evaluate, **kwargs)
        else:
            raise ValueError("optimizer {} is not available".format(optimizer))

        return self.opt_result
项目:KDDCUP2016    作者:hugochan    | 项目源码 | 文件源码
def acq_max(self, gp, ymax, restarts, bh_steps, Bounds):
                ''' A function to find the maximum of the acquisition function using the 'L-BFGS-B' method.

                        Parameters
                        ----------
                        gp : A gaussian process fitted to the relevant data.

                        ymax : The current maximum known value of the target function.

                        restarts : The number of times minimation if to be repeated. Larger number of restarts
                                             improves the chances of finding the true maxima.

                        Bounds : The variables bounds to limit the search of the acq max.


                        Returns
                        -------
                        x_max : The arg max of the acquisition function.
                '''

                x_max = Bounds[:, 0]
                ei_max = 0

                for i in range(restarts):
                        #Sample some points at random.
                        x_try = numpy.asarray([numpy.random.uniform(x[0], x[1], size = 1) for x in Bounds]).T

                        #Find the minimum of minus que acquisition function
                        '''
                        res = basinhopping(lambda x: -self.ac(x, gp = gp, ymax = ymax), \
                                                             x0 = x_try, niter=bh_steps, T=2, stepsize=0.1,\
                                                             minimizer_kwargs = {'bounds' : Bounds, 'method' : 'L-BFGS-B'})
                        '''

                        res = minimize(lambda x: -self.ac(x, gp = gp, ymax = ymax), x_try, bounds = Bounds, method = 'L-BFGS-B')


                        #Store it if better than previous minimum(maximum).
                        if -res.fun >= ei_max:
                                x_max = res.x
                                ei_max = -res.fun

                        #print(-res.fun, ei_max)

                return x_max


        # ----------------------- // ----------------------- # ----------------------- // ----------------------- #
        # ----------------------- // ----------------------- # ----------------------- // ----------------------- #