How to use the mlxtend.externals.estimator_checks.check_is_fitted function in mlxtend

To help you get started, we’ve selected a few mlxtend examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github rasbt / mlxtend / mlxtend / regressor / stacking_cv_regression.py View on Github external
Parameters
        ----------
        X : numpy array, shape = [n_samples, n_features]
            Test vectors, where n_samples is the number of samples and
            n_features is the number of features.

        Returns
        -------
        meta-features : numpy array, shape = [n_samples, len(self.regressors)]
            meta-features for test data, where n_samples is the number of
            samples in test data and len(self.regressors) is the number
            of regressors.

        """
        check_is_fitted(self, 'regr_')
        return np.column_stack([regr.predict(X) for regr in self.regr_])
github rasbt / mlxtend / mlxtend / classifier / stacking_classification.py View on Github external
def predict_meta_features(self, X):
        """ Get meta-features of test-data.

        Parameters
        ----------
        X : numpy array, shape = [n_samples, n_features]
            Test vectors, where n_samples is the number of samples and
            n_features is the number of features.

        Returns
        -------
        meta-features : numpy array, shape = [n_samples, n_classifiers]
            Returns the meta-features for test data.

        """
        check_is_fitted(self, 'clfs_')
        if self.use_probas:
            if self.drop_last_proba:
                probas = np.asarray([clf.predict_proba(X)[:, :-1]
                                     for clf in self.clfs_])
            else:
                probas = np.asarray([clf.predict_proba(X)
                                     for clf in self.clfs_])
            if self.average_probas:
                vals = np.average(probas, axis=0)
            else:
                vals = np.concatenate(probas, axis=1)
        else:
            vals = np.column_stack([clf.predict(X) for clf in self.clfs_])
        return vals
github rasbt / mlxtend / mlxtend / regressor / stacking_cv_regression.py View on Github external
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
            Training vectors, where n_samples is the number of samples and
            n_features is the number of features.

        Returns
        ----------
        y_target : array-like, shape = [n_samples] or [n_samples, n_targets]
            Predicted target values.
        """

        #
        # First we make predictions with the base-models then we predict with
        # the meta-model from that info.
        #

        check_is_fitted(self, 'regr_')

        meta_features = np.column_stack([
            regr.predict(X) for regr in self.regr_
        ])

        if not self.use_features_in_secondary:
            return self.meta_regr_.predict(meta_features)
        elif sparse.issparse(X):
            return self.meta_regr_.predict(sparse.hstack((X, meta_features)))
        else:
            return self.meta_regr_.predict(np.hstack((X, meta_features)))
github rasbt / mlxtend / mlxtend / regressor / stacking_regression.py View on Github external
Parameters
        ----------
        X : numpy array, shape = [n_samples, n_features]
            Test vectors, where n_samples is the number of samples and
            n_features is the number of features.

        Returns
        -------
        meta-features : numpy array, shape = [n_samples, len(self.regressors)]
            meta-features for test data, where n_samples is the number of
            samples in test data and len(self.regressors) is the number
            of regressors.

        """
        check_is_fitted(self, 'regr_')
        return np.column_stack([r.predict(X) for r in self.regr_])
github rasbt / mlxtend / mlxtend / classifier / stacking_cv_classification.py View on Github external
def predict_meta_features(self, X):
        """ Get meta-features of test-data.

        Parameters
        ----------
        X : numpy array, shape = [n_samples, n_features]
            Test vectors, where n_samples is the number of samples and
            n_features is the number of features.

        Returns
        -------
        meta-features : numpy array, shape = [n_samples, n_classifiers]
            Returns the meta-features for test data.

        """
        check_is_fitted(self, ['clfs_', 'meta_clf_'])

        per_model_preds = []

        for model in self.clfs_:
            if not self.use_probas:
                prediction = model.predict(X)[:, np.newaxis]
            else:
                if self.drop_last_proba:
                    prediction = model.predict_proba(X)[:, :-1]
                else:
                    prediction = model.predict_proba(X)

            per_model_preds.append(prediction)

        return np.hstack(per_model_preds)
github rasbt / mlxtend / mlxtend / regressor / stacking_regression.py View on Github external
def predict(self, X):
        """ Predict target values for X.

        Parameters
        ----------
        X : {array-like, sparse matrix}, shape = [n_samples, n_features]
            Training vectors, where n_samples is the number of samples and
            n_features is the number of features.

        Returns
        ----------
        y_target : array-like, shape = [n_samples] or [n_samples, n_targets]
            Predicted target values.
        """
        check_is_fitted(self, 'regr_')
        meta_features = self.predict_meta_features(X)

        if not self.use_features_in_secondary:
            return self.meta_regr_.predict(meta_features)
        elif sparse.issparse(X):
            return self.meta_regr_.predict(sparse.hstack((X, meta_features)))
        else:
            return self.meta_regr_.predict(np.hstack((X, meta_features)))