How to use the pyriemann.spatialfilters.Xdawn function in pyriemann

To help you get started, we’ve selected a few pyriemann examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github alexandrebarachant / pyRiemann / tests / test_spatialfilters.py View on Github external
def test_Xdawn_baselinecov():
    """Test cov precomputation"""
    x = np.random.randn(100, 3, 10)
    labels = np.array([0, 1]).repeat(50)
    baseline_cov = np.identity(3)
    xd = Xdawn(baseline_cov=baseline_cov)
    xd.fit(x, labels)
    xd.transform(x)
github alexandrebarachant / pyRiemann / tests / test_spatialfilters.py View on Github external
def test_Xdawn_init():
    """Test init of Xdawn"""
    xd = Xdawn()
github alexandrebarachant / pyRiemann / tests / test_spatialfilters.py View on Github external
def test_Xdawn_fit():
    """Test Fit of Xdawn"""
    x = np.random.randn(100, 3, 10)
    labels = np.array([0, 1]).repeat(50)
    xd = Xdawn()
    xd.fit(x, labels)
github alexandrebarachant / pyRiemann / tests / test_spatialfilters.py View on Github external
def test_Xdawn_transform():
    """Test transform of Xdawn"""
    x = np.random.randn(100, 3, 10)
    labels = np.array([0, 1]).repeat(50)
    xd = Xdawn()
    xd.fit(x, labels)
    xd.transform(x)
github alexandrebarachant / pyRiemann / pyriemann / estimation.py View on Github external
Estimate spatial filters and prototyped response for each classes.

        Parameters
        ----------
        X : ndarray, shape (n_trials, n_channels, n_samples)
            ndarray of trials.
        y : ndarray shape (n_trials,)
            labels corresponding to each trial.

        Returns
        -------
        self : XdawnCovariances instance
            The XdawnCovariances instance.
        """
        self.Xd_ = Xdawn(
            nfilter=self.nfilter,
            classes=self.classes,
            estimator=self.xdawn_estimator,
            baseline_cov=self.baseline_cov)
        self.Xd_.fit(X, y)
        self.P_ = self.Xd_.evokeds_
        return self
github alexandrebarachant / decoding-brain-challenge-2016 / cross_validation_paper.py View on Github external
from sklearn.cross_validation import KFold
from sklearn.metrics import roc_auc_score

from utils import (DownSampler, EpochsVectorizer, CospBoostingClassifier,
                   epoch_data)

dataframe1 = pd.read_csv('ecog_train_with_labels.csv')

array_clfs = OrderedDict()

# ERPs models
array_clfs['XdawnCov'] = make_pipeline(XdawnCovariances(6, estimator='oas'),
                                       TangentSpace('riemann'),
                                       LogisticRegression('l2'))

array_clfs['Xdawn'] = make_pipeline(Xdawn(12, estimator='oas'),
                                    DownSampler(5),
                                    EpochsVectorizer(),
                                    LogisticRegression('l2'))

# Induced activity models

baseclf = make_pipeline(ElectrodeSelection(10, metric=dict(mean='logeuclid',
                                                           distance='riemann')),
                        TangentSpace('riemann'),
                        LogisticRegression('l1'))

array_clfs['Cosp'] = make_pipeline(CospCovariances(fs=1000, window=32,
                                                   overlap=0.95, fmax=300,
                                                   fmin=1),
                                   CospBoostingClassifier(baseclf))
github alexandrebarachant / decoding-brain-challenge-2016 / generate_models.py View on Github external
from sklearn.cross_validation import KFold
from sklearn.metrics import roc_auc_score

from utils import (DownSampler, EpochsVectorizer, CospBoostingClassifier,
                   epoch_data)

dataframe1 = pd.read_csv('ecog_train_with_labels.csv')

array_clfs = OrderedDict()

# ERPs models
array_clfs['XdawnCov'] = make_pipeline(XdawnCovariances(6, estimator='oas'),
                                       TangentSpace('riemann'),
                                       LogisticRegression('l2'))

array_clfs['Xdawn'] = make_pipeline(Xdawn(12, estimator='oas'),
                                    DownSampler(5),
                                    EpochsVectorizer(),
                                    LogisticRegression('l2'))

# Induced activity models

baseclf = make_pipeline(ElectrodeSelection(10, metric=dict(mean='logeuclid',
                                                           distance='riemann')),
                        TangentSpace('riemann'),
                        LogisticRegression('l1'))

array_clfs['Cosp'] = make_pipeline(CospCovariances(fs=1000, window=32,
                                                   overlap=0.95, fmax=300,
                                                   fmin=1),
                                   CospBoostingClassifier(baseclf))
github ZhangXiao96 / EEGAdversary / lib / Blocks.py View on Github external
"""

        :param n_filters: The number of spatial filters. When "transform_flag=False", the original_data would
               not be filtered.
        :param with_xdawn_templates: Set True if padding the templates on the original EEG epochs.
               Usually used to calculate Xdawn Covariance Matrix.
        :param apply_filters: Sometimes only the templates are needed, in this case set
               'transform=False' to just pad the templates on the original EEG epochs. Usually
               set to 'False' when using Xdawn Covariance Matrix.
        :param name: The name of the block.
        """
        super(Xdawn, self).__init__(name)
        self.n_filters = n_filters
        self.apply_filters = apply_filters
        self.with_templates = with_xdawn_templates
        self.model = riemman_Xdawn(nfilter=self.n_filters)
github alexandrebarachant / decoding-brain-challenge-2016 / cross_validation_challenge.py View on Github external
from sklearn.cross_validation import KFold
from sklearn.metrics import roc_auc_score

from utils import (DownSampler, EpochsVectorizer, CospBoostingClassifier,
                   epoch_data)

dataframe1 = pd.read_csv('ecog_train_with_labels.csv')

array_clfs = OrderedDict()

# ERPs models
array_clfs['XdawnCov'] = make_pipeline(XdawnCovariances(6, estimator='oas'),
                                       TangentSpace('riemann'),
                                       LogisticRegression('l2'))

array_clfs['Xdawn'] = make_pipeline(Xdawn(12, estimator='oas'),
                                    DownSampler(5),
                                    EpochsVectorizer(),
                                    LogisticRegression('l2'))

# Induced activity models

baseclf = make_pipeline(ElectrodeSelection(10, metric=dict(mean='logeuclid',
                                                           distance='riemann')),
                        TangentSpace('riemann'),
                        LogisticRegression('l1'))

array_clfs['Cosp'] = make_pipeline(CospCovariances(fs=1000, window=32,
                                                   overlap=0.95, fmax=300,
                                                   fmin=1),
                                   CospBoostingClassifier(baseclf))