How to use the nilearn.datasets function in nilearn

To help you get started, we’ve selected a few nilearn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nilearn / nilearn / examples / manipulating_visualizing / plot_haxby_mass_univariate.py View on Github external
[2] Anderson, M. J. & Robinson, J. (2001).
    Permutation tests for linear models.
    Australian & New Zealand Journal of Statistics, 43(1), 75-88.
    (http://avesbiodiv.mncn.csic.es/estadistica/permut2.pdf)

"""
# Author: Virgile Fritsch, , Feb. 2014
import numpy as np
from scipy import linalg
from nilearn import datasets
from nilearn.input_data import NiftiMasker
from nilearn.mass_univariate import permuted_ols

### Load Haxby dataset ########################################################
haxby_dataset = datasets.fetch_haxby_simple()

### Mask data #################################################################
mask_filename = haxby_dataset.mask
nifti_masker = NiftiMasker(
    mask_img=mask_filename,
    memory='nilearn_cache', memory_level=1)  # cache options
func_filename = haxby_dataset.func
fmri_masked = nifti_masker.fit_transform(func_filename)

### Restrict to faces and houses ##############################################
conditions_encoded, sessions = np.loadtxt(
    haxby_dataset.session_target).astype("int").T
conditions = np.recfromtxt(haxby_dataset.conditions_target)['f0']
condition_mask = np.logical_or(conditions == 'face', conditions == 'house')
conditions_encoded = conditions_encoded[condition_mask]
fmri_masked = fmri_masked[condition_mask]
github dPys / PyNets / pynets / core / nodemaker.py View on Github external
if atlas == 'atlas_harvard_oxford':
        atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas, 'atlas_name')('cort-maxprob-thr0-1mm')
    elif atlas == 'atlas_pauli_2017':
        if parc is False:
            atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas, 'version')('prob')
        else:
            atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas, 'version')('det')
    elif 'atlas_talairach' in atlas:
        if atlas == 'atlas_talairach_lobe':
            atlas = 'atlas_talairach'
            print('Fetching level: lobe...')
            atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas, 'level')('lobe')
        elif atlas == 'atlas_talairach_gyrus':
            atlas = 'atlas_talairach'
            print('Fetching level: gyrus...')
            atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas, 'level')('gyrus')
        elif atlas == 'atlas_talairach_ba':
            atlas = 'atlas_talairach'
            print('Fetching level: ba...')
            atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas, 'level')('ba')
    else:
        atlas_fetch_obj = getattr(datasets, 'fetch_%s' % atlas)()
    if len(list(atlas_fetch_obj.keys())) > 0:
        if 'maps' in list(atlas_fetch_obj.keys()):
            uatlas = atlas_fetch_obj.maps
        else:
            uatlas = None
        if 'labels' in list(atlas_fetch_obj.keys()):
            try:
                labels = [i.decode("utf-8") for i in atlas_fetch_obj.labels]
            except:
                labels = [i for i in atlas_fetch_obj.labels]
github nidata / nidata / nidata / _external / nilearn / examples / connectivity / plot_adhd_spheres.py View on Github external
"""
Extracting brain signal from spheres
====================================

This example extract brain signals from spheres described by the coordinates
of their center in MNI space and a given radius in millimeters. In particular,
this example extracts signals from Default Mode Network regions and compute a
connectome from them.

"""

# Fetching dataset ############################################################
from nilearn import datasets
adhd_dataset = datasets.fetch_adhd(n_subjects=1)

# print basic information on the dataset
print('First subject functional nifti image (4D) is at: %s' %
      adhd_dataset.func[0])  # 4D data


# Extracting region signals ###################################################
from nilearn import input_data


# Coordinates of Default Mode Network
dmn_coords = [(0, -52, 18), (-46, -68, 32), (46, -68, 32), (0, 50, -5)]
labels = [
    'Posterior Cingulate Cortex',
    'Left Temporoparietal junction',
    'Right Temporoparietal junction',
github nilearn / nilearn / examples / 05_advanced / plot_localizer_simple_analysis.py View on Github external
variates.  The user can refer to the
`plot_localizer_mass_univariate_methods.py` example to see how to use these.


"""
# Author: Virgile Fritsch, , May. 2014
import numpy as np
import matplotlib.pyplot as plt
from nilearn import datasets
from nilearn.input_data import NiftiMasker


############################################################################
# Load Localizer contrast
n_samples = 20
localizer_dataset = datasets.fetch_localizer_calculation_task(
    n_subjects=n_samples)
tested_var = np.ones((n_samples, 1))


############################################################################
# Mask data
nifti_masker = NiftiMasker(
    smoothing_fwhm=5,
    memory='nilearn_cache', memory_level=1)  # cache options
cmap_filenames = localizer_dataset.cmaps
fmri_masked = nifti_masker.fit_transform(cmap_filenames)


############################################################################
# Anova (parametric F-scores)
from sklearn.feature_selection import f_regression
github photon-team / photon / photonai / neuro / photon_neuro_example.py View on Github external
from Framework.PhotonBase import PipelineElement, Hyperpipe
from PhotonNeuro.BrainAtlas import  BrainAtlas
from PhotonNeuro.AtlasStacker import AtlasStacker, AtlasInfo
from sklearn.model_selection import KFold

# get oasis gm data and age from nilearn
# imgs
from nilearn import datasets
oasis_dataset = datasets.fetch_oasis_vbm(n_subjects=20)
dataset_files = oasis_dataset.gray_matter_maps
targets = oasis_dataset.ext_vars['age'].astype(float)   # age

# # data
# from sklearn.datasets import load_breast_cancer
# dataset = load_breast_cancer()
# dataset_files = dataset.data
# targets = dataset.target

print(BrainAtlas._getAtlasDict())

# setup photonai HP
my_pipe = Hyperpipe('primary_pipe', optimizer='grid_search',
                    optimizer_params={},
                    metrics=['mean_squared_error', 'mean_absolute_error'],
                    inner_cv=KFold(n_splits=2, shuffle=True, random_state=3),
github nilearn / nilearn / examples / decoding / plot_oasis_vbm.py View on Github external
____

"""
# Authors: Elvis Dhomatob, , Apr. 2014
#          Virgile Fritsch, , Apr 2014
#          Gael Varoquaux, Apr 2014
import numpy as np
from scipy import linalg
import matplotlib.pyplot as plt
from nilearn import datasets
from nilearn.input_data import NiftiMasker

n_subjects = 100   # more subjects requires more memory

### Load Oasis dataset ########################################################
oasis_dataset = datasets.fetch_oasis_vbm(n_subjects=n_subjects)
gray_matter_map_filenames = oasis_dataset.gray_matter_maps
age = oasis_dataset.ext_vars['age'].astype(float)

# print basic information on the dataset
print('First gray-matter anatomy image (3D) is located at: %s' %
      oasis_dataset.gray_matter_maps[0])  # 3D data
print('First white-matter anatomy image (3D) is located at: %s' %
      oasis_dataset.white_matter_maps[0])  # 3D data

assert 0

### Preprocess data ###########################################################
nifti_masker = NiftiMasker(
    standardize=False,
    smoothing_fwhm=2,
    memory='nilearn_cache')  # cache options
github neurospin / pypreprocess / pypreprocess / time_diff.py View on Github external
stuff = reorder_img(results[which], resample="continuous")

        # XXX: Passing axes=ax param to plot_stat_map produces miracles!
        # XXX: As a quick fix, we simply plot and then do ax = plt.gca()
        plot_stat_map(stuff, bg_img=None, display_mode='z', cut_coords=5,
                      black_bg=True, title=title, **kwargs)
        if not use_same_figure:
            axes.append(plt.gca())

    return axes


if __name__ == '__main__':
    import matplotlib.pyplot as plt
    from nilearn import datasets
    nyu_rest_dataset = datasets.fetch_nyu_rest(n_subjects=2)
    filenames = nyu_rest_dataset.func
    results = multi_session_time_slice_diffs(filenames)
    plot_tsdiffs(results)
    plot_tsdiffs(results, use_same_figure=False)
    plt.show()
github nilearn / nilearn / examples / 05_advanced / plot_haxby_mass_univariate.py View on Github external
----------
[1] Winkler, A. M. et al. (2014).
    Permutation inference for the general linear model. Neuroimage.

[2] Anderson, M. J. & Robinson, J. (2001).
    Permutation tests for linear models.
    Australian & New Zealand Journal of Statistics, 43(1), 75-88.
    (http://avesbiodiv.mncn.csic.es/estadistica/permut2.pdf)

"""
# Author: Virgile Fritsch, , Feb. 2014

##############################################################################
# Load Haxby dataset
from nilearn import datasets
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('Mask nifti image (3D) is located at: %s' % haxby_dataset.mask)
print('Functional nifti image (4D) is located at: %s' % haxby_dataset.func[0])

##############################################################################
# Mask data
mask_filename = haxby_dataset.mask
from nilearn.input_data import NiftiMasker
nifti_masker = NiftiMasker(
    mask_img=mask_filename,
    memory='nilearn_cache', memory_level=1)  # cache options
func_filename = haxby_dataset.func[0]
fmri_masked = nifti_masker.fit_transform(func_filename)

##############################################################################
github nidata / nidata / nidata / _external / nilearn / examples / decoding / plot_miyawaki_reconstruction.py View on Github external
It reconstructs 10x10 binary images from functional MRI data. Random images
are used as training set and structured images are used for reconstruction.
"""

### Imports ###################################################################

from matplotlib import pyplot as plt
import time
import sys

### Load Kamitani dataset #####################################################
from nilearn import datasets
sys.stderr.write("Fetching dataset...")
t0 = time.time()

miyawaki_dataset = datasets.fetch_miyawaki2008()

# print basic information on the dataset
print('First functional nifti image (4D) is located at: %s' %
      miyawaki_dataset.func[0])  # 4D data

X_random_filenames = miyawaki_dataset.func[12:]
X_figure_filenames = miyawaki_dataset.func[:12]
y_random_filenames = miyawaki_dataset.label[12:]
y_figure_filenames = miyawaki_dataset.label[:12]
y_shape = (10, 10)

sys.stderr.write(" Done (%.2fs).\n" % (time.time() - t0))

### Preprocess and mask #######################################################
import numpy as np
from nilearn.input_data import MultiNiftiMasker