How to use the mlxtend.externals.name_estimators._name_estimators function in mlxtend

To help you get started, we’ve selected a few mlxtend examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github rasbt / mlxtend / mlxtend / classifier / ensemble_vote.py View on Github external
if not self.refit:
            self.clfs_ = [clf for clf in self.clfs]

        else:
            self.clfs_ = [clone(clf) for clf in self.clfs]

            if self.verbose > 0:
                print("Fitting %d classifiers..." % (len(self.clfs)))

            for clf in self.clfs_:

                if self.verbose > 0:
                    i = self.clfs_.index(clf) + 1
                    print("Fitting clf%d: %s (%d/%d)" %
                          (i, _name_estimators((clf,))[0][0], i,
                           len(self.clfs_)))

                if self.verbose > 2:
                    if hasattr(clf, 'verbose'):
                        clf.set_params(verbose=self.verbose - 2)

                if self.verbose > 1:
                    print(_name_estimators((clf,))[0][1])

                if sample_weight is None:
                    clf.fit(X, self.le_.transform(y))
                else:
                    clf.fit(X, self.le_.transform(y),
                            sample_weight=sample_weight)
        return self
github rasbt / mlxtend / mlxtend / classifier / stacking_classification.py View on Github external
if self.verbose > 0:
            print("Fitting %d classifiers..." % (len(self.classifiers)))

        for clf in self.clfs_:

            if self.verbose > 0:
                i = self.clfs_.index(clf) + 1
                print("Fitting classifier%d: %s (%d/%d)" %
                      (i, _name_estimators((clf,))[0][0], i, len(self.clfs_)))

            if self.verbose > 2:
                if hasattr(clf, 'verbose'):
                    clf.set_params(verbose=self.verbose - 2)

            if self.verbose > 1:
                print(_name_estimators((clf,))[0][1])
            if sample_weight is None:
                clf.fit(X, y)
            else:
                clf.fit(X, y, sample_weight=sample_weight)

        meta_features = self.predict_meta_features(X)

        if self.store_train_meta_features:
            self.train_meta_features_ = meta_features

        if not self.use_features_in_secondary:
            pass
        elif sparse.issparse(X):
            meta_features = sparse.hstack((X, meta_features))
        else:
            meta_features = np.hstack((X, meta_features))
github rasbt / mlxtend / mlxtend / regressor / stacking_regression.py View on Github external
if self.refit:
            self.regr_ = clone(self.regressors)
            self.meta_regr_ = clone(self.meta_regressor)
        else:
            self.regr_ = self.regressors
            self.meta_regr_ = self.meta_regressor

        if self.verbose > 0:
            print("Fitting %d regressors..." % (len(self.regressors)))

        for regr in self.regr_:

            if self.verbose > 0:
                i = self.regr_.index(regr) + 1
                print("Fitting regressor%d: %s (%d/%d)" %
                      (i, _name_estimators((regr,))[0][0], i, len(self.regr_)))

            if self.verbose > 2:
                if hasattr(regr, 'verbose'):
                    regr.set_params(verbose=self.verbose - 2)

            if self.verbose > 1:
                print(_name_estimators((regr,))[0][1])

            if sample_weight is None:
                regr.fit(X, y)
            else:
                regr.fit(X, y, sample_weight=sample_weight)

        meta_features = self.predict_meta_features(X)

        if not self.use_features_in_secondary:
github rasbt / mlxtend / mlxtend / classifier / stacking_cv_classification.py View on Github external
def named_classifiers(self):
        return _name_estimators(self.classifiers)
github rasbt / mlxtend / mlxtend / feature_selection / exhaustive_feature_selector.py View on Github external
def __init__(self, estimator, min_features=1, max_features=1,
                 print_progress=True, scoring='accuracy',
                 cv=5, n_jobs=1,
                 pre_dispatch='2*n_jobs',
                 clone_estimator=True):
        self.estimator = estimator
        self.min_features = min_features
        self.max_features = max_features
        self.pre_dispatch = pre_dispatch
        self.scoring = scoring
        self.scorer = get_scorer(scoring)
        self.cv = cv
        self.print_progress = print_progress
        self.n_jobs = n_jobs
        self.named_est = {key: value for key, value in
                          _name_estimators([self.estimator])}
        self.clone_estimator = clone_estimator
        if self.clone_estimator:
            self.est_ = clone(self.estimator)
        else:
            self.est_ = self.estimator
        self.fitted = False
        self.interrupted_ = False

        # don't mess with this unless testing
        self._TESTING_INTERRUPT_MODE = False
github rasbt / mlxtend / mlxtend / regressor / stacking_regression.py View on Github external
def named_regressors(self):
        return _name_estimators(self.regressors)
github rasbt / mlxtend / mlxtend / regressor / stacking_cv_regression.py View on Github external
def named_regressors(self):
        """
        Returns
        -------
        List of named estimator tuples, like [('svc', SVC(...))]
        """
        return _name_estimators(self.regressors)
github rasbt / mlxtend / mlxtend / classifier / ensemble_vote.py View on Github external
def __init__(self, clfs, voting='hard',
                 weights=None, verbose=0, refit=True):

        self.clfs = clfs
        self.named_clfs = {key: value for key, value in _name_estimators(clfs)}
        self.voting = voting
        self.weights = weights
        self.verbose = verbose
        self.refit = refit
github rasbt / mlxtend / mlxtend / classifier / stacking_classification.py View on Github external
def named_classifiers(self):
        return _name_estimators(self.classifiers)
github rasbt / mlxtend / mlxtend / feature_selection / sequential_feature_selector.py View on Github external
def named_estimators(self):
        """
        Returns
        -------
        List of named estimator tuples, like [('svc', SVC(...))]
        """
        return _name_estimators([self.estimator])