Python sklearn.externals.joblib 模块,Memory() 实例源码

我们从Python开源项目中,提取了以下9个代码示例,用于说明如何使用sklearn.externals.joblib.Memory()

项目:modl    作者:arthurmensch    | 项目源码 | 文件源码
def __init__(self,
                 mask=None, smoothing_fwhm=None,
                 standardize=True, detrend=True,
                 low_pass=None, high_pass=None, t_r=None,
                 target_affine=None, target_shape=None,
                 mask_strategy='epi', mask_args=None,
                 memory=Memory(cachedir=None),
                 memory_level=2,
                 n_jobs=1, verbose=0, ):
        self.mask = mask
        self.smoothing_fwhm = smoothing_fwhm
        self.standardize = standardize
        self.detrend = detrend
        self.low_pass = low_pass
        self.high_pass = high_pass
        self.t_r = t_r
        self.target_affine = target_affine
        self.target_shape = target_shape
        self.mask_strategy = mask_strategy
        self.mask_args = mask_args
        self.memory = memory
        self.memory_level = memory_level
        self.n_jobs = n_jobs
        self.verbose = verbose
项目:modl    作者:arthurmensch    | 项目源码 | 文件源码
def __init__(self,
                 n_components=20,
                 alpha=0.1,
                 dict_init=None,
                 transform_batch_size=None,
                 mask=None, smoothing_fwhm=None,
                 standardize=True, detrend=True,
                 low_pass=None, high_pass=None, t_r=None,
                 target_affine=None, target_shape=None,
                 mask_strategy='background', mask_args=None,
                 memory=Memory(cachedir=None),
                 memory_level=2,
                 n_jobs=1, verbose=0, ):
        BaseNilearnEstimator.__init__(self,
                                      mask=mask,
                                      smoothing_fwhm=smoothing_fwhm,
                                      standardize=standardize,
                                      detrend=detrend,
                                      low_pass=low_pass,
                                      high_pass=high_pass,
                                      t_r=t_r,
                                      target_affine=target_affine,
                                      target_shape=target_shape,
                                      mask_strategy=mask_strategy,
                                      mask_args=mask_args,
                                      memory=memory,
                                      memory_level=memory_level,
                                      n_jobs=n_jobs,
                                      verbose=verbose)

        self.n_components = n_components
        self.transform_batch_size = transform_batch_size
        self.dict_init = dict_init
        self.alpha = alpha
项目:modl    作者:arthurmensch    | 项目源码 | 文件源码
def __init__(self, dictionary,
                 alpha=0.1,
                 transform_batch_size=None,
                 mask=None, smoothing_fwhm=None,
                 standardize=False, detrend=False,
                 low_pass=None, high_pass=None, t_r=None,
                 target_affine=None, target_shape=None,
                 mask_strategy='background', mask_args=None,
                 memory=Memory(cachedir=None),
                 memory_level=2,
                 n_jobs=1, verbose=0, ):
        self.dictionary = dictionary
        fMRICoderMixin.__init__(self,
                                n_components=None,
                                alpha=alpha,
                                dict_init=self.dictionary,
                                mask=mask,
                                smoothing_fwhm=smoothing_fwhm,
                                standardize=standardize,
                                detrend=detrend,
                                low_pass=low_pass,
                                high_pass=high_pass,
                                transform_batch_size=transform_batch_size,
                                t_r=t_r,
                                target_affine=target_affine,
                                target_shape=target_shape,
                                mask_strategy=mask_strategy,
                                mask_args=mask_args,
                                memory=memory,
                                memory_level=memory_level,
                                n_jobs=n_jobs,
                                verbose=verbose)
项目:modl    作者:arthurmensch    | 项目源码 | 文件源码
def test_dict_fact(method, memory):
    if memory:
        memory = Memory(cachedir=get_cache_dirs()[0])
        memory_level = 2
    else:
        if method != 'masked':
            pytest.skip()
        memory = Memory(cachedir=None)
        memory_level = 0
    data, mask_img, components, init = _make_test_data(n_subjects=10)
    dict_fact = fMRIDictFact(n_components=4, random_state=0,
                             memory=memory,
                             memory_level=memory_level,
                             mask=mask_img,
                             dict_init=init,
                             method=method,
                             reduction=2,
                             smoothing_fwhm=0., n_epochs=2, alpha=1)
    dict_fact.fit(data)
    maps = np.rollaxis(dict_fact.components_img_.get_data(), 3, 0)
    components = np.rollaxis(components.get_data(), 3, 0)
    maps = maps.reshape((maps.shape[0], -1))
    components = components.reshape((components.shape[0], -1))

    S = np.sqrt(np.sum(components ** 2, axis=1))
    S[S == 0] = 1
    components /= S[:, np.newaxis]

    S = np.sqrt(np.sum(maps ** 2, axis=1))
    S[S == 0] = 1
    maps /= S[:, np.newaxis]

    G = np.abs(components.dot(maps.T))

    recovered_maps = np.sum(G > 0.95)
    assert (recovered_maps >= 4)
项目:modl    作者:arthurmensch    | 项目源码 | 文件源码
def load_image(source,
               scale=1,
               gray=False,
               memory=Memory(cachedir=None)):
    data_dir = get_data_dirs()[0]
    if source == 'face':
        image = face(gray=gray)
        image = image.astype(np.float32) / 255
        if image.ndim == 2:
            image = image[..., np.newaxis]
        if scale != 1:
            image = memory.cache(rescale)(image, scale=scale)
        return image
    elif source == 'lisboa':
        image = imread(join(data_dir, 'images', 'lisboa.jpg'), as_grey=gray)
        image = image.astype(np.float32) / 255
        if image.ndim == 2:
            image = image[..., np.newaxis]
        if scale != 1:
            image = memory.cache(rescale)(image, scale=scale)
        return image
    elif source == 'aviris':
        image = open_image(
            join(data_dir,
                 'aviris',
                 'f100826t01p00r05rdn_b/'
                 'f100826t01p00r05rdn_b_sc01_ort_img.hdr'))
        image = np.array(image.open_memmap(), dtype=np.float32)
        good_bands = list(range(image.shape[2]))
        good_bands.remove(110)
        image = image[:, :, good_bands]
        indices = image == -50
        image[indices] = -1
        image[~indices] -= np.min(image[~indices])
        image[~indices] /= np.max(image[~indices])
        return image
    else:
        raise ValueError('Data source is not known')
项目:FaceRecognition    作者:bekhzod0725    | 项目源码 | 文件源码
def get_lookalike_people():
    m = Memory(cachedir='./cache_data', compress=6, verbose=0)
    load_func = m.cache(_get_lookalike_people)

    #faces, targets, target_ids = _get_lookalike_people()
    faces, targets, target_ids = load_func()

    return Bunch( data=faces.reshape(len(faces), -1), 
                  images=faces, 
                  target=target_ids, 
                  target_names=targets,
                  DESCR="Look Alike People Dataset")
项目:elm    作者:ContinuumIO    | 项目源码 | 文件源码
def _fit(self, X, y=None, **fit_params):

        self._validate_steps()
        # Setup the memory
        memory = self.memory
        if memory is None:
            memory = Memory(cachedir=None, verbose=0)
        elif isinstance(memory, six.string_types):
            memory = Memory(cachedir=memory, verbose=0)
        elif not isinstance(memory, Memory):
            raise ValueError("'memory' should either be a string or"
                             " a joblib.Memory instance, got"
                             " 'memory={!r}' instead.".format(memory))

        fit_transform_one_cached = memory.cache(_fit_transform_one)

        fit_params_steps = dict((name, {}) for name, step in self.steps
                                if step is not None)
        for pname, pval in six.iteritems(fit_params):
            step, param = pname.split('__', 1)
            fit_params_steps[step][param] = pval
        Xt = X
        for step_idx, (name, transformer) in enumerate(self.steps[:-1]):
            #if self._do_this_step(step_idx):
            Xt, y = self._astype(transformer, Xt, y=y)
            print('Types', step_idx, [type(_) for _ in (Xt, y)])
            if transformer is None:
                pass
            else:
                if memory.cachedir is None:
                    # we do not clone when caching is disabled to preserve
                    # backward compatibility
                    cloned_transformer = transformer
                else:
                    cloned_transformer = clone(transformer)
                # Fit or load from cache the current transfomer
                Xt, fitted_transformer = fit_transform_one_cached(
                    cloned_transformer, None, Xt, y,
                    **fit_params_steps[name])
                # Replace the transformer of the step with the fitted
                # transformer. This is necessary when loading the transformer
                # from the cache.
                self.steps[step_idx] = (name, fitted_transformer)
        if self._final_estimator is None:
            return Xt, {}
        fit_params = fit_params_steps[self.steps[-1][0]]
        return Xt, y, fit_params
项目:modl    作者:arthurmensch    | 项目源码 | 文件源码
def __init__(self,
                 method='masked',
                 step_size=1,
                 n_components=20,
                 n_epochs=1,
                 alpha=0.1,
                 dict_init=None,
                 random_state=None,
                 batch_size=20,
                 reduction=1,
                 learning_rate=1,
                 positive=False,
                 transform_batch_size=None,
                 mask=None, smoothing_fwhm=None,
                 standardize=True, detrend=True,
                 low_pass=None, high_pass=None, t_r=None,
                 target_affine=None, target_shape=None,
                 mask_strategy='background', mask_args=None,
                 memory=Memory(cachedir=None), memory_level=0,
                 n_jobs=1, verbose=0,
                 callback=None):
        fMRICoderMixin.__init__(self, n_components=n_components,
                                alpha=alpha,
                                dict_init=dict_init,
                                mask=mask,
                                transform_batch_size=transform_batch_size,
                                smoothing_fwhm=smoothing_fwhm,
                                standardize=standardize,
                                detrend=detrend,
                                low_pass=low_pass,
                                high_pass=high_pass,
                                t_r=t_r,
                                target_affine=target_affine,
                                target_shape=target_shape,
                                mask_strategy=mask_strategy,
                                mask_args=mask_args,
                                memory=memory,
                                memory_level=memory_level,
                                n_jobs=n_jobs,
                                verbose=verbose)
        self.n_epochs = n_epochs
        self.batch_size = batch_size
        self.reduction = reduction
        self.method = method
        self.step_size = step_size
        self.positive = positive
        self.learning_rate = learning_rate
        self.random_state = random_state
        self.callback = callback
项目:Parallel-SGD    作者:angadgill    | 项目源码 | 文件源码
def check_parameters_default_constructible(name, Estimator):
    classifier = LinearDiscriminantAnalysis()
    # test default-constructibility
    # get rid of deprecation warnings
    with warnings.catch_warnings(record=True):
        if name in META_ESTIMATORS:
            estimator = Estimator(classifier)
        else:
            estimator = Estimator()
        # test cloning
        clone(estimator)
        # test __repr__
        repr(estimator)
        # test that set_params returns self
        assert_true(estimator.set_params() is estimator)

        # test if init does nothing but set parameters
        # this is important for grid_search etc.
        # We get the default parameters from init and then
        # compare these against the actual values of the attributes.

        # this comes from getattr. Gets rid of deprecation decorator.
        init = getattr(estimator.__init__, 'deprecated_original',
                       estimator.__init__)

        try:
            def param_filter(p):
                """Identify hyper parameters of an estimator"""
                return (p.name != 'self'
                        and p.kind != p.VAR_KEYWORD
                        and p.kind != p.VAR_POSITIONAL)

            init_params = [p for p in signature(init).parameters.values()
                           if param_filter(p)]
        except (TypeError, ValueError):
            # init is not a python function.
            # true for mixins
            return
        params = estimator.get_params()
        if name in META_ESTIMATORS:
            # they can need a non-default argument
            init_params = init_params[1:]

        for init_param in init_params:
            assert_not_equal(init_param.default, init_param.empty,
                             "parameter %s for %s has no default value"
                             % (init_param.name, type(estimator).__name__))
            assert_in(type(init_param.default),
                      [str, int, float, bool, tuple, type(None),
                       np.float64, types.FunctionType, Memory])
            if init_param.name not in params.keys():
                # deprecated parameter, not in get_params
                assert_true(init_param.default is None)
                continue

            param_value = params[init_param.name]
            if isinstance(param_value, np.ndarray):
                assert_array_equal(param_value, init_param.default)
            else:
                assert_equal(param_value, init_param.default)