Python scipy.optimize 模块,differential_evolution() 实例源码

我们从Python开源项目中,提取了以下7个代码示例,用于说明如何使用scipy.optimize.differential_evolution()

项目:pastas    作者:pastas    | 项目源码 | 文件源码
def __init__(self, model, tmin=None, tmax=None, noise=True, freq='D'):
        BaseSolver.__init__(self)

        self.freq = freq
        self.model = model
        self.tmin = tmin
        self.tmax = tmax
        self.noise = noise
        self.parameters = self.model.parameters.initial.values
        self.vary = self.model.parameters.vary.values.astype('bool')
        self.pmin = self.model.parameters.pmin.values[self.vary]
        self.pmax = self.model.parameters.pmax.values[self.vary]
        self.fit = differential_evolution(self.objfunction,
                                          zip(self.pmin, self.pmax))
        self.optimal_params = self.model.parameters.initial.values
        self.optimal_params[self.vary] = self.fit.values()[3]
        self.report = str(self.fit)
项目:elfi    作者:elfi-dev    | 项目源码 | 文件源码
def stochastic_optimization(fun, bounds, maxiter=1000, polish=True, seed=0):
    """Find the minimum of function 'fun' in 'maxiter' iterations.

    Parameters
    ----------
    fun : callable
        Function to minimize.
    bounds : list of tuples
        Bounds for each parameter.
    maxiter : int, optional
        Maximum number of iterations.
    polish : bool, optional
        Whether to "polish" the result.
    seed : int, optional

    See scipy.optimize.differential_evolution.

    Returns
    -------
    tuple of the found coordinates of minimum and the corresponding value.

    """
    result = differential_evolution(
        func=fun, bounds=bounds, maxiter=maxiter, polish=polish, init='latinhypercube', seed=seed)
    return result.x, result.fun


# TODO: allow argument for specifying the optimization algorithm
项目:kernel_tuner    作者:benvanwerkhoven    | 项目源码 | 文件源码
def tune(runner, kernel_options, device_options, tuning_options):
    """ Find the best performing kernel configuration in the parameter space

    :params runner: A runner from kernel_tuner.runners
    :type runner: kernel_tuner.runner

    :param kernel_options: A dictionary with all options for the kernel.
    :type kernel_options: kernel_tuner.interface.Options

    :param device_options: A dictionary with all options for the device
        on which the kernel should be tuned.
    :type device_options: kernel_tuner.interface.Options

    :param tuning_options: A dictionary with all options regarding the tuning
        process.
    :type tuning_options: kernel_tuner.interface.Options

    :returns: A list of dictionaries for executed kernel configurations and their
        execution times. And a dictionary that contains a information
        about the hardware/software environment on which the tuning took place.
    :rtype: list(dict()), dict()

    """

    results = []

    #build a bounds array as needed for the optimizer
    bounds = minimize.get_bounds(tuning_options.tune_params)

    #call the differential evolution optimizer
    opt_result = differential_evolution(_cost_func, bounds, [kernel_options, tuning_options, runner, results],
                                        maxiter=1, polish=False, disp=tuning_options.verbose)

    if tuning_options.verbose:
        print(opt_result.message)
        print('best config:', minimize.snap_to_nearest_config(opt_result.x, tuning_options.tune_params))

    return results, runner.dev.get_environment()
项目:ikdb    作者:krishauser    | 项目源码 | 文件源码
def __init__(self,method='auto'):
        if method == 'auto':
            #Runs the DIRECT method
            #method = 'DIRECT'
            #Runs the DIRECT method then cleans it up with the default local
            #optimizer
            #method = ['DIRECT','auto']
            #Runs the scipy differential evolution technique
            method = 'scipy.differential_evolution'
            #Runs random restarts using scipy as a local optimizer
            #method = 'random-restart.scipy'
            #Runs random restarts using pyOpt as a local optimizer
            #method = 'random-restart.pyOpt.SLSQP'
        self.method = method
        self.seed = None
项目:piecewise_linear_fit_py    作者:cjekel    | 项目源码 | 文件源码
def fit(self, numberOfSegments, **kwargs):
        #   a function which uses differntial evolution to finds the optimum
        #   location of break points for a given number of line segments by
        #   minimizing the sum of the square of the errors
        #
        #   input:
        #   the number of line segments that you want to find
        #   the optimum break points for
        #   ex:
        #   breaks = fit(3)
        #
        #   output:
        #   returns the break points of the optimal piecewise contionus lines

        self.numberOfSegments = int(numberOfSegments)
        self.numberOfParameters = self.numberOfSegments+1

        #self.fitBreaks = self.numberOfSegments+1



        #   calculate the number of variables I have to solve for
        self.nVar = self.numberOfSegments - 1

        #   initaite the bounds of the optimization
        bounds = np.zeros([self.nVar, 2])
        bounds[:,0] = self.break0
        bounds[:,1] = self.breakN

        if len(kwargs) == 0:
            res = differential_evolution(self.fitWithBreaksOpt, bounds, strategy='best1bin',
                    maxiter=1000, popsize=50, tol=1e-3, mutation=(0.5, 1),
                    recombination=0.7, seed=None, callback=None, disp=False,
                    polish=True, init='latinhypercube', atol=1e-4)
        else:
            res = differential_evolution(self.fitWithBreaksOpt, bounds, **kwargs)
        print(res)

        self.SSr = res.fun

        var = np.sort(res.x)
        if np.isclose(var[0],var[1]) == True:
            var[1] += 0.00001
        breaks = np.zeros(len(var)+2)
        breaks[1:-1] = var.copy()
        breaks[0] = self.break0
        breaks[-1] = self.breakN
        self.fitBreaks = breaks
        #   assign p
        self.fitWithBreaks(self.fitBreaks)

        return(self.fitBreaks)
项目:oktopus    作者:KeplerGO    | 项目源码 | 文件源码
def fit(self, optimizer='minimize', **kwargs):
        """
        Minimizes the :func:`evaluate` function using :func:`scipy.optimize.minimize`,
        :func:`scipy.optimize.differential_evolution`,
        :func:`scipy.optimize.basinhopping`, or :func:`skopt.gp.gp_minimize`.

        Parameters
        ----------
        optimizer : str
            Optimization algorithm. Options are::

                - ``'minimize'`` uses :func:`scipy.optimize.minimize`

                - ``'differential_evolution'`` uses :func:`scipy.optimize.differential_evolution`

                - ``'basinhopping'`` uses :func:`scipy.optimize.basinhopping`

                - ``'gp_minimize'`` uses :func:`skopt.gp.gp_minimize`

            `'minimize'` is usually robust enough and therefore recommended
            whenever a good initial guess can be provided. The remaining options
            are global optimizers which might provide better results precisely
            in cases where a close engouh initial guess cannot be obtained
            trivially.
        kwargs : dict
            Dictionary for additional arguments.

        Returns
        -------
        opt_result : :class:`scipy.optimize.OptimizeResult` object
            Object containing the results of the optimization process.
            Note: this is also stored in **self.opt_result**.
        """

        if optimizer == 'minimize':
            self.opt_result = minimize(self.evaluate, **kwargs)
        elif optimizer == 'differential_evolution':
            self.opt_result = differential_evolution(self.evaluate, **kwargs)
        elif optimizer == 'basinhopping':
            self.opt_result = basinhopping(self.evaluate, **kwargs)
        elif optimizer == 'gp_minimize':
            self.opt_result = gp_minimize(self.evaluate, **kwargs)
        else:
            raise ValueError("optimizer {} is not available".format(optimizer))

        return self.opt_result
项目:RRMPG    作者:kratzert    | 项目源码 | 文件源码
def fit(self, qobs, prec, initial_state=0):
        """Fit the model to a timeseries of discharge using.

        This functions uses scipy's global optimizer (differential evolution)
        to find a good set of parameters for the model, so that the observed 
        discharge is simulated as good as possible.

        Args:
            qobs: Array of observed streaflow discharge.
            prec: Array of precipitation data.
            initial_state: (optional) Initial value for the storage.

        Returns:
            res: A scipy OptimizeResult class object.

        Raises:
            ValueError: If one of the inputs contains invalid values.
            TypeError: If one of the inputs has an incorrect datatype.

        """
        # Validation check of the inputs
        qobs = validate_array_input(qobs, np.float64, 'qobs')
        prec = validate_array_input(prec, np.float64, 'precipitation')

        # Check if there exist negative precipitation
        if check_for_negatives(prec):
            raise ValueError("In the precipitation array are negative values.")

        # Validation check of the initial state
        if not isinstance(initial_state, numbers.Number) or initial_state < 0:
            msg = ["The variable 'initial_state' must be a numercial scaler ",
                   "greate than 0."]
            raise TypeError("".join(msg))

        # Cast initial state as float
        initial_state = float(initial_state)

        # pack input arguments for scipy optimizer
        args = (prec, initial_state, qobs, self._dtype)
        bnds = tuple([self._default_bounds[p] for p in self._param_list])

        # call the actual optimizer function
        res = optimize.differential_evolution(_loss, bounds=bnds, args=args)

        return res