How to use the nilearn.datasets.fetch_adhd function in nilearn

To help you get started, we’ve selected a few nilearn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nistats / nistats / nistats / reporting / _visual_testing / _glm_reporter_visual_inspection_suite_.py View on Github external
def report_flm_adhd_dmn():  # pragma: no cover
    t_r = 2.
    slice_time_ref = 0.
    n_scans = 176
    pcc_coords = (0, -53, 26)
    adhd_dataset = nilearn.datasets.fetch_adhd(n_subjects=1)
    seed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True,
                                     standardize=True, low_pass=0.1,
                                     high_pass=0.01, t_r=2.,
                                     memory='nilearn_cache',
                                     memory_level=1, verbose=0)
    seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0])
    frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans)
    design_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm',
                                                   add_regs=seed_time_series,
                                                   add_reg_names=["pcc_seed"])
    dmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1))
    contrasts = {'seed_based_glm': dmn_contrast}

    first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref)
    first_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0],
                                              design_matrices=design_matrix)
github nilearn / nilearn / examples / 04_manipulating_images / plot_nifti_simple.py View on Github external
"""
Simple example of NiftiMasker use
==================================

Here is a simple example of automatic mask computation using the nifti masker.
The mask is computed and visualized.
"""

###########################################################################
# Retrieve the NYU test-retest dataset

from nilearn import datasets
dataset = datasets.fetch_adhd(n_subjects=1)
func_filename = dataset.func[0]

# print basic information on the dataset
print('First functional nifti image (4D) is at: %s' % func_filename)

###########################################################################
# Compute the mask
from nilearn.input_data import NiftiMasker

# As this is raw resting-state EPI, the background is noisy and we cannot
# rely on the 'background' masking strategy. We need to use the 'epi' one
nifti_masker = NiftiMasker(standardize=True, mask_strategy='epi',
                           memory="nilearn_cache", memory_level=2,
                           smoothing_fwhm=8)
nifti_masker.fit(func_filename)
mask_img = nifti_masker.mask_img_
github nilearn / nilearn / plot_adhd_covariance2.py View on Github external
vmin=-span, vmax=span,
              cmap=pl.cm.get_cmap("bwr"))
    pl.colorbar()
    pl.title("%s / precision" % title)


n_subjects = 10  # Number of subjects to consider


print("-- Computing covariance matrices ...")
import joblib
mem = joblib.Memory(".")

import nilearn.datasets
msdl_atlas = nilearn.datasets.fetch_msdl_atlas()
dataset = nilearn.datasets.fetch_adhd()

import nilearn.image
import nilearn.input_data

subjects = []

for subject_n in range(n_subjects):
    filename = dataset["func"][subject_n]
    print("Processing file %s" % filename)

    print("-- Computing confounds ...")
    confound_file = dataset["confounds"][subject_n]
    hv_confounds = mem.cache(nilearn.image.high_variance_confounds)(filename)

    print("-- Computing region signals ...")
    masker = nilearn.input_data.NiftiMapsMasker(
github nilearn / nilearn / examples / 03_connectivity / plot_connectivity_measures.py View on Github external
"""
Functional connectivity measures for group analysis of connectomes
===================================================================

This example compares different measures of functional connectivity between
regions of interest : correlation, partial correlation, as well as a measure
called tangent. The resulting connectivity coefficients are used to
classify ADHD vs control subjects and the tangent measure outperforms the
standard measures.

"""

# Fetch dataset
from nilearn import datasets
atlas = datasets.fetch_atlas_msdl()
dataset = datasets.fetch_adhd(n_subjects=20)


######################################################################
# Extract regions time series signals
from nilearn import input_data
masker = input_data.NiftiMapsMasker(
    atlas.maps, resampling_target="maps", detrend=True,
    low_pass=.1, high_pass=.01, t_r=2.5, standardize=False,
    memory='nilearn_cache', memory_level=1)
subjects = []
sites = []
adhds = []
for func_file, confound_file, phenotypic in zip(
        dataset.func, dataset.confounds, dataset.phenotypic):
    time_series = masker.fit_transform(func_file, confounds=confound_file)
    subjects.append(time_series)
github nilearn / nilearn / examples / 03_connectivity / plot_sphere_based_connectome.py View on Github external
[1] Power, Jonathan D., et al. "Functional network organization of the
human brain." Neuron 72.4 (2011): 665-678.

[2] Dosenbach N.U., Nardos B., et al. "Prediction of individual brain maturity
using fMRI.", 2010, Science 329, 1358-1361.

"""

###############################################################################
# Load fMRI data and Power atlas
# ------------------------------
#
# We are going to use a single subject from the ADHD dataset.
from nilearn import datasets

adhd = datasets.fetch_adhd(n_subjects=1)

###############################################################################
# We store the paths to its functional image and the confounds file.
fmri_filename = adhd.func[0]
confounds_filename = adhd.confounds[0]
print('Functional image is {0},\nconfounds are {1}.'.format(fmri_filename,
      confounds_filename))

###############################################################################
# We fetch the coordinates of Power atlas.
power = datasets.fetch_coords_power_2011()
print('Power atlas comes with {0}.'.format(power.keys()))

###############################################################################
# Compute within spheres averaged time-series
# -------------------------------------------
github nilearn / nilearn / examples / 03_connectivity / plot_signal_extraction.py View on Github external
"""

##############################################################################
# Retrieve the atlas and the data
# --------------------------------
from nilearn import datasets

dataset = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
atlas_filename = dataset.maps
labels = dataset.labels

print('Atlas ROIs are located in nifti image (4D) at: %s' %
      atlas_filename)  # 4D data

# One subject of resting-state data
data = datasets.fetch_adhd(n_subjects=1)
fmri_filenames = data.func[0]

##############################################################################
# Extract signals on a parcellation defined by labels
# -----------------------------------------------------
# Using the NiftiLabelsMasker
from nilearn.input_data import NiftiLabelsMasker
masker = NiftiLabelsMasker(labels_img=atlas_filename, standardize=True,
                           memory='nilearn_cache', verbose=5)

# Here we go from nifti files to the signal time series in a numpy
# array. Note how we give confounds to be regressed out during signal
# extraction
time_series = masker.fit_transform(fmri_filenames, confounds=data.confounds)

github nilearn / nilearn / examples / 03_connectivity / plot_fast_clustering.py View on Github external
This makes it well suited to use in the consensus on several random
parcellations, see for instance: `Randomized parcellation based inference
`_, Da Mota et al, Neuroimage 2014.

The big picture discussion corresponding to this example can be found
in the documentation section :ref:`parcellating_brain`.
"""

##################################################################
# Download a rest dataset and turn it to a data matrix
# -----------------------------------------------------
#
# We download one subject of the ADHD dataset from Internet

from nilearn import datasets
dataset = datasets.fetch_adhd(n_subjects=1)

# print basic information on the dataset
print('First subject functional nifti image (4D) is at: %s' %
      dataset.func[0])  # 4D data


##################################################################
# Transform nifti files to a data matrix with the NiftiMasker
from nilearn import input_data

# The NiftiMasker will extract the data on a mask. We do not have a
# mask, hence we need to compute one.
#
# This is resting-state data: the background has not been removed yet,
# thus we need to use mask_strategy='epi' to compute the mask from the
# EPI images
github nilearn / nilearn / examples / 04_manipulating_images / plot_smooth_mean_image.py View on Github external
"""
Smoothing an image
===================

Here we smooth a mean EPI image and plot the result

As we vary the smoothing FWHM, note how we decrease the amount of noise,
but also loose spatial details. In general, the best amount of smoothing
for a given analysis depends on the spatial extent of the effects that
are expected.

"""

from nilearn import datasets, plotting, image

data = datasets.fetch_adhd(n_subjects=1)

# Print basic information on the dataset
print('First subject functional nifti image (4D) are located at: %s' %
      data.func[0])

first_epi_file = data.func[0]

# First the compute the mean image, from the 4D series of image
mean_func = image.mean_img(first_epi_file)

# Then we smooth, with a varying amount of smoothing, from none to 20mm
# by increments of 5mm
for smoothing in range(0, 25, 5):
    smoothed_img = image.smooth_img(mean_func, smoothing)
    plotting.plot_epi(smoothed_img,
                      title="Smoothing %imm" % smoothing)
github nilearn / nilearn / examples / 03_connectivity / plot_inverse_covariance_connectome.py View on Github external
with the highest values.

"""

##############################################################################
# Retrieve the atlas and the data
# --------------------------------
from nilearn import datasets
atlas = datasets.fetch_atlas_msdl()
# Loading atlas image stored in 'maps'
atlas_filename = atlas['maps']
# Loading atlas data stored in 'labels'
labels = atlas['labels']

# Loading the functional datasets
data = datasets.fetch_adhd(n_subjects=1)

# print basic information on the dataset
print('First subject functional nifti images (4D) are at: %s' %
      data.func[0])  # 4D data

##############################################################################
# Extract time series
# --------------------
from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=atlas_filename, standardize=True,
                         memory='nilearn_cache', verbose=5)

time_series = masker.fit_transform(data.func[0],
                                   confounds=data.confounds)

##############################################################################
github nilearn / nilearn / examples / 03_connectivity / plot_multi_subject_connectome.py View on Github external
# Display covariance matrix
    plotting.plot_matrix(cov, cmap=plotting.cm.bwr,
                         vmin=-1, vmax=1, title="%s / covariance" % title,
                         labels=labels)
    # Display precision matrix
    plotting.plot_matrix(prec, cmap=plotting.cm.bwr,
                         vmin=-span, vmax=span, title="%s / precision" % title,
                         labels=labels)


##############################################################################
# Fetching datasets
# ------------------
from nilearn import datasets
msdl_atlas_dataset = datasets.fetch_atlas_msdl()
adhd_dataset = datasets.fetch_adhd(n_subjects=n_subjects)

# print basic information on the dataset
print('First subject functional nifti image (4D) is at: %s' %
      adhd_dataset.func[0])  # 4D data


##############################################################################
# Extracting region signals
# --------------------------
from nilearn import image
from nilearn import input_data

# A "memory" to avoid recomputation
from sklearn.externals.joblib import Memory
mem = Memory('nilearn_cache')