How to use the nilearn.input_data.NiftiMasker function in nilearn

To help you get started, we’ve selected a few nilearn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github arthurmensch / cogspaces / scripts / unmask.py View on Github external
def unmask(data_dir=None, unmasked_dir='unmasked',
           n_jobs=30):
    data_dir = get_data_dir(data_dir)
    unmasked_dir = join(data_dir, unmasked_dir)
    if not os.path.exists(unmasked_dir):
        os.makedirs(unmasked_dir)
    contrasts = fetch_all()
    mask = fetch_mask()
    masker = NiftiMasker(smoothing_fwhm=4, mask_img=mask,
                         verbose=0, memory_level=1, memory=None).fit()
    imgs = contrasts['z_map'].values
    n_samples = imgs.shape[0]
    batches = list(gen_batches(n_samples, 1))
    unmask_single(masker, imgs, create_structure=True)
    Parallel(n_jobs=n_jobs, verbose=10)(delayed(unmask_single)(masker,
                                                               imgs[batch])
                                        for batch in batches)
github nilearn / nilearn / plot_nifti_advanced.py View on Github external
# Display helper
background = np.mean(haxby_func, axis=-1)[..., 27]


def display_mask(background, mask, title):
    plt.axis('off')
    plt.imshow(np.rot90(background), interpolation='nearest', cmap=plt.cm.gray)
    ma = np.ma.masked_equal(mask, False)
    plt.imshow(np.rot90(ma), interpolation='nearest',
              cmap=plt.cm.autumn, alpha=0.5)
    plt.title(title)

# Generate mask with default parameters
from nilearn.input_data import NiftiMasker
masker = NiftiMasker()
masker.fit(haxby_img)
default_mask = masker.mask_img_.get_data().astype(np.bool)
plt.figure(figsize=(3, 5))
display_mask(background, default_mask[..., 27], 'Default mask')
plt.tight_layout()

# Generate mask with opening
masker = NiftiMasker(mask_opening=0)
masker.fit(haxby_img)
opening_mask = masker.mask_img_.get_data().astype(np.bool)
plt.figure(figsize=(3, 5))
display_mask(background, opening_mask[..., 27], 'Mask without opening')
plt.tight_layout()

# Generate mask with upper cutoff
masker = NiftiMasker(mask_opening=True, mask_upper_cutoff=0.8)
github arthurmensch / modl / examples / experimental / fmri / hcp_analysis.py View on Github external
def get_init_objective(output_dir):
    mask, func_filenames = get_hcp_data(raw=True)

    masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None,
                         standardize=False)
    masker.fit()

    rsn70 = fetch_atlas_smith_2009().rsn70
    components = masker.transform(rsn70)
    print(components.shape)
    enet_scale(components.T, inplace=True)
    print(np.sum(np.abs(components), axis=1))
    test_data = func_filenames[(-n_test_records * 2)::2]

    n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape
    X = np.empty((n_test_records * n_samples, n_voxels))

    for i, this_data in enumerate(test_data):
        X[i * n_samples:(i + 1) * n_samples] = np.load(this_data,
                                                       mmap_mode='r')
github IBT-FMI / SAMRI / samri / analysis / fc.py View on Github external
roi_mask : str
	Path to the ROI mask for which to select the t-values.

	figure : {"per-participant", "per-voxel", "both"}
	At what level to resolve the t-values. Per-participant compares participant means, per-voxel compares all voxel values, both creates two plots covering the aforementioned cases.

	roi_mask_normalize : str
	Path to a ROI mask by the mean of whose t-values to normalite the t-values in roi_mask.
	"""

	if isinstance(roi,str):
		roi_mask = path.abspath(path.expanduser(roi))
	if isinstance(seed,str):
		seed_mask = path.abspath(path.expanduser(seed))

	seed_masker = NiftiMasker(
			mask_img=seed_mask,
			smoothing_fwhm=smoothing_fwhm,
			detrend=detrend,
			standardize=standardize,
			low_pass=low_pass,
			high_pass=high_pass,
			t_r=tr,
			memory=cachedir, memory_level=1, verbose=0
			)
	brain_masker = NiftiMasker(
			mask_img=roi_mask,
			smoothing_fwhm=smoothing_fwhm,
			detrend=detrend,
			standardize=standardize,
			low_pass=low_pass,
			high_pass=high_pass,
github neurospin / pypreprocess / pypreprocess / external / nistats / glm.py View on Github external
2. fit an ols regression to (Y, X)
        3. fit an AR(1) regression of require
        This results in an internal (labels_, regression_results_) parameters

        Parameters
        ----------
        imgs: Niimg-like object or list of Niimg-like objects,
            See http://nilearn.github.io/building_blocks/manipulating_mr_images.html#niimg.
            Data on which the GLM will be fitted. If this is a list,
            the affine is considered the same for all.

        design_matrices: pandas DataFrame or list of pandas DataFrames,
            fMRI design matrices
        """
        # First, learn the mask
        if not isinstance(self.mask, NiftiMasker):
            self.masker_ = NiftiMasker(
                mask_img=self.mask, smoothing_fwhm=self.smoothing_fwhm,
                target_affine=self.target_affine,
                standardize=self.standardize, low_pass=self.low_pass,
                high_pass=self.high_pass, mask_strategy='epi',
                t_r=self.t_r, memory=self.memory,
                verbose=max(0, self.verbose - 1),
                target_shape=self.target_shape,
                memory_level=self.memory_level)
        else:
            self.masker_ = clone(self.mask)
            for param_name in ['target_affine', 'target_shape',
                               'smoothing_fwhm', 'low_pass', 'high_pass',
                               't_r', 'memory', 'memory_level']:
                our_param = getattr(self, param_name)
                if our_param is None:
github cosanlab / nltools / build / lib / nltools / analysis.py View on Github external
Returns:
            pexp: Outputs a vector of pattern expression values

    """

    if mask is not None:
        if type(mask) is not nib.nifti1.Nifti1Image:
            raise ValueError("Mask is not a nibabel instance")
    else:
        mask = nib.load(os.path.join(get_resource_path(),'MNI152_T1_2mm_brain_mask.nii.gz'))

    if type(data) is not nib.nifti1.Nifti1Image:
        raise ValueError("Data is not a nibabel instance")

    nifti_masker = NiftiMasker(mask_img=mask)
    data_masked = nifti_masker.fit_transform(data)

    if type(weight_map) is not nib.nifti1.Nifti1Image:
        raise ValueError("Weight_map is not a nibabel instance")

    weight_map_masked = nifti_masker.fit_transform(weight_map)

    # Calculate pattern expression
    if method is 'dot_product':
        pexp = np.dot(data_masked,np.transpose(weight_map_masked)).squeeze()
    elif method is 'correlation':
        pexp = pearson(data_masked,weight_map_masked)

    if save_output:
        np.savetxt(os.path.join(output_dir,"Pattern_Expression_" + method + ".csv"), pexp, delimiter=",")
github nilearn / nilearn / examples / 02_decoding / plot_haxby_anova_svm.py View on Github external
condition_mask = np.logical_or(conditions == b'face', conditions == b'house')
conditions = conditions[condition_mask]

# We now have 2 conditions
print(np.unique(conditions))
session = behavioral[condition_mask]

#############################################################################
# Prepare the fMRI data: smooth and apply the mask
from nilearn.input_data import NiftiMasker

mask_filename = haxby_dataset.mask

# For decoding, standardizing is often very important
# note that we are also smoothing the data
masker = NiftiMasker(mask_img=mask_filename, smoothing_fwhm=4,
                     standardize=True, memory="nilearn_cache", memory_level=1)
func_filename = haxby_dataset.func[0]
X = masker.fit_transform(func_filename)
# Apply our condition_mask
X = X[condition_mask]

#############################################################################
# Build the decoder

# Define the prediction function to be used.
# Here we use a Support Vector Classification, with a linear kernel
from sklearn.svm import SVC
svc = SVC(kernel='linear')

# Define the dimension reduction to be used.
# Here we use a classical univariate feature selection based on F-test,
github IBT-FMI / SAMRI / samri / report / utilities.py View on Github external
exclude_zero : bool, optional
		Whether to filter out zero values.
	substitution : dict, optional
		A dictionary with keys which include 'subject' and 'session'.
	zero_threshold : float, optional
		Absolute value below which values are to be considered zero.
	"""
	if substitution:
		img_path = img_path.format(**substitution)
	img_path = path.abspath(path.expanduser(img_path))
	img = nib.load(img_path)
	try:
		masked_data = masker.fit_transform(img)
	except:
		masker = path.abspath(path.expanduser(masker))
		masker = NiftiMasker(mask_img=masker)
		masked_data = masker.fit_transform(img)
	masked_data = masked_data.flatten()
	masked_data = masked_data[~np.isnan(masked_data)]
	if exclude_zero:
		masked_data = masked_data[np.abs(masked_data)>=zero_threshold]
	masked_mean = np.mean(masked_data)
	masked_median = np.median(masked_data)
	return masked_mean, masked_median
github IBT-FMI / SAMRI / samri / report / roi.py View on Github external
def mean(img_path, mask_path):
	"""Return the mean of the masked region of an image.
	"""
	mask = path.abspath(path.expanduser(mask_path))
	if mask_path.endswith("roi"):
		mask = loadmat(mask)["ROI"]
		while mask.ndim != 3:
			mask=mask[0]
		img_path = path.abspath(path.expanduser(img_path))
		img = nib.load(img_path)
	else:
		masker = NiftiMasker(mask_img=mask)
		roi_df(img_path,masker)
github nilearn / nilearn / nilearn / decomposition / multi_pca.py View on Github external
# Hack to support single-subject data:
        if isinstance(imgs, (_basestring, nibabel.Nifti1Image)):
            imgs = [imgs]
            # This is a very incomplete hack, as it won't work right for
            # single-subject list of 3D filenames
        if len(imgs) == 0:
            # Common error that arises from a null glob. Capture
            # it early and raise a helpful message
            raise ValueError('Need one or more Niimg-like objects as input, '
                             'an empty list was given.')
        if confounds is None:
            confounds = itertools.repeat(None, len(imgs))

        # First, learn the mask
        if not isinstance(self.mask, (NiftiMasker, MultiNiftiMasker)):
            self.masker_ = MultiNiftiMasker(mask_img=self.mask,
                                            smoothing_fwhm=self.smoothing_fwhm,
                                            target_affine=self.target_affine,
                                            target_shape=self.target_shape,
                                            standardize=self.standardize,
                                            low_pass=self.low_pass,
                                            high_pass=self.high_pass,
                                            mask_strategy='epi',
                                            t_r=self.t_r,
                                            memory=self.memory,
                                            memory_level=self.memory_level,
                                            n_jobs=self.n_jobs,
                                            verbose=max(0, self.verbose - 1))
        else:
            try:
                self.masker_ = clone(self.mask)