How to use the nilearn.datasets.fetch_haxby function in nilearn

To help you get started, we’ve selected a few nilearn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nilearn / nilearn / examples / 02_decoding / plot_haxby_space_net.py View on Github external
"""
Decoding with SpaceNet: face vs house object recognition
=========================================================

Here is a simple example of decoding with a SpaceNet prior (i.e Graph-Net,
TV-l1, etc.), reproducing the Haxby 2001 study on a face vs house
discrimination task.

See also the SpaceNet documentation: :ref:`space_net`.
"""

##############################################################################
# Load the Haxby dataset
from nilearn.datasets import fetch_haxby
data_files = fetch_haxby()

# Load behavioral data
import numpy as np
behavioral = np.recfromcsv(data_files.session_target[0], delimiter=" ")


# Restrict to face and house conditions
conditions = behavioral['labels']
condition_mask = np.logical_or(conditions == b"face",
                               conditions == b"house")

# Split data into train and test samples, using the chunks
condition_mask_train = np.logical_and(condition_mask, behavioral['chunks'] <= 6)
condition_mask_test = np.logical_and(condition_mask, behavioral['chunks'] > 6)

# Apply this sample mask to X (fMRI data) and y (behavioral labels)
github nidata / nidata / nidata / _external / nilearn / examples / manipulating_visualizing / plot_visualization.py View on Github external
"""
NeuroImaging volumes visualization
==================================

Simple example to show Nifti data visualization.
"""

### Fetch data ################################################################

from nilearn import datasets
from nilearn.image.image import mean_img
from nilearn.plotting.img_plotting import plot_epi, plot_roi

haxby_dataset = datasets.fetch_haxby(n_subjects=1)

# print basic information on the dataset
print('First anatomical nifti image (3D) located is at: %s' %
      haxby_dataset.anat[0])
print('First functional nifti image (4D) is located at: %s' %
      haxby_dataset.func[0])

### Visualization #############################################################

import matplotlib.pyplot as plt

# Compute the mean EPI: we do the mean along the axis 3, which is time
func_filename = haxby_dataset.func[0]
mean_haxby = mean_img(func_filename)

plot_epi(mean_haxby)
github nilearn / nilearn / examples / 02_decoding / plot_haxby_anova_svm.py View on Github external
"""
Decoding with ANOVA + SVM: face vs house in the Haxby dataset
===============================================================

This example does a simple but efficient decoding on the Haxby dataset:
using a feature selection, followed by an SVM.

"""

#############################################################################
# Retrieve the files of the Haxby dataset
from nilearn import datasets

# By default 2nd subject will be fetched
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('Mask nifti image (3D) is located at: %s' % haxby_dataset.mask)
print('Functional nifti image (4D) is located at: %s' %
      haxby_dataset.func[0])

#############################################################################
# Load the behavioral data

import numpy as np
# Load target information as string and give a numerical identifier to each
behavioral = np.recfromcsv(haxby_dataset.session_target[0], delimiter=" ")
conditions = behavioral['labels']

# Restrict the analysis to faces and places
condition_mask = np.logical_or(conditions == b'face', conditions == b'house')
github nidata / nidata / nidata / _external / nilearn / examples / plot_haxby_simple.py View on Github external
"""
Simple example of decoding: the Haxby data
==============================================

Here is a simple example of decoding, reproducing the Haxby 2001
study on a face vs cat discrimination task in a mask of the ventral
stream.
"""

### Load haxby dataset ########################################################

from nilearn import datasets
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('First subject anatomical nifti image (3D) is at: %s' %
      haxby_dataset.anat[0])
print('First subject functional nifti images (4D) are at: %s' %
      haxby_dataset.func[0])  # 4D data

### Load Target labels ########################################################

import numpy as np
# Load target information as string and give a numerical identifier to each
labels = np.recfromcsv(haxby_dataset.session_target[0], delimiter=" ")

# scikit-learn >= 0.14 supports text labels. You can replace this line by:
# target = labels['labels']
_, target = np.unique(labels['labels'], return_inverse=True)
github nistats / nistats / examples / 05_complete_examples / plot_haxby_block_classification.py View on Github external
--matplotlib`` in a terminal, or use the Jupyter notebook.

.. contents:: **Contents**
    :local:
    :depth: 1
"""

##############################################################################
# Fetch example Haxby dataset
# ----------------------------
# We download the Haxby dataset
# This is a study of visual object category representation
from nilearn import datasets

# By default 2nd subject will be fetched
haxby_dataset = datasets.fetch_haxby()

# repetition has to be known
TR = 2.5

#############################################################################
# Load the behavioral data
# -------------------------
import pandas as pd

# Load target information as string and give a numerical identifier to each
behavioral = pd.read_csv(haxby_dataset.session_target[0], sep=' ')
conditions = behavioral['labels'].values

# Record these as an array of sessions
sessions = behavioral['chunks'].values
unique_sessions = behavioral['chunks'].unique()
github nilearn / nilearn / examples / 02_decoding / plot_haxby_searchlight.py View on Github external
Searchlight analysis requires fitting a classifier a large amount of
times. As a result, it is an intrinsically slow method. In order to speed
up computing, in this example, Searchlight is run only on one slice on
the fMRI (see the generated figures).

"""

#########################################################################
# Load Haxby dataset
# -------------------
import pandas as pd
from nilearn import datasets
from nilearn.image import new_img_like, load_img

# We fetch 2nd subject from haxby datasets (which is default)
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('Anatomical nifti image (3D) is located at: %s' % haxby_dataset.mask)
print('Functional nifti image (4D) is located at: %s' % haxby_dataset.func[0])

fmri_filename = haxby_dataset.func[0]
labels = pd.read_csv(haxby_dataset.session_target[0], sep=" ")
y = labels['labels']
session = labels['chunks']

#########################################################################
# Restrict to faces and houses
# ------------------------------
from nilearn.image import index_img
condition_mask = y.isin(['face', 'house'])
github nilearn / nilearn / examples / plot_haxby_simple.py View on Github external
"""
Simple example of decoding: the Haxby data
===========================================

Here is a simple example of decoding, reproducing the Haxby 2001
study on a face vs cat discrimination task in a mask of the ventral
stream.
"""

### Load haxby dataset ########################################################

from nilearn import datasets
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('First subject anatomical nifti image (3D) is at: %s' %
      haxby_dataset.anat[0])
print('First subject functional nifti images (4D) are at: %s' %
      haxby_dataset.func[0])  # 4D data

### Load Target labels ########################################################

import numpy as np
# Load target information as string and give a numerical identifier to each
labels = np.recfromcsv(haxby_dataset.session_target[0], delimiter=" ")

# scikit-learn >= 0.14 supports text labels. You can replace this line by:
# target = labels['labels']
_, target = np.unique(labels['labels'], return_inverse=True)
github nilearn / nilearn / examples / 01_plotting / plot_haxby_masks.py View on Github external
"""
Plot Haxby masks
=================

Small script to plot the masks of the Haxby dataset.
"""
import numpy as np
from scipy import linalg
import matplotlib.pyplot as plt

from nilearn import datasets
haxby_dataset = datasets.fetch_haxby()

# print basic information on the dataset
print('First subject anatomical nifti image (3D) is at: %s' %
      haxby_dataset.anat[0])
print('First subject functional nifti image (4D) is at: %s' %
      haxby_dataset.func[0])  # 4D data

# Build the mean image because we have no anatomic data
from nilearn import image
func_filename = haxby_dataset.func[0]
mean_img = image.mean_img(func_filename)

z_slice = -14
from nilearn.image.resampling import coord_transform
affine = mean_img.affine
_, _, k_slice = coord_transform(0, 0, z_slice,
github nidata / nidata / nidata / _external / nilearn / examples / manipulating_visualizing / plot_demo_plotting.py View on Github external
"""
Plotting in nilearn
==========================

Nilearn comes with a set of plotting function for Nifti-like images,
see :ref:`plotting` for more details.
"""

from nilearn import datasets
from nilearn import plotting, image

###############################################################################
# Retrieve the data: haxby dataset to have EPI images and masks, and
# localizer dataset to have contrast maps

haxby_dataset = datasets.fetch_haxby(n_subjects=1)

# print basic information on the dataset
print('First subject anatomical nifti image (3D) is at: %s' %
      haxby_dataset.anat[0])
print('First subject functional nifti image (4D) is at: %s' %
      haxby_dataset.func[0])  # 4D data

haxby_anat_filename = haxby_dataset.anat[0]
haxby_mask_filename = haxby_dataset.mask_vt[0]
haxby_func_filename = haxby_dataset.func[0]

localizer_dataset = datasets.fetch_localizer_contrasts(
    ["left vs right button press"],
    n_subjects=2,
    get_anats=True,
    get_tmaps=True)