How to use the nilearn.plotting.plot_glass_brain function in nilearn

To help you get started, we’ve selected a few nilearn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github poldracklab / fitlins / fitlins / interfaces / visualizations.py View on Github external
subcort, ltexture, rtexture = decompose_dscalar(img)
    fig = plt.figure(figsize=(11, 9))
    ax1 = plt.subplot2grid((3, 2), (0, 0), projection='3d')
    ax2 = plt.subplot2grid((3, 2), (0, 1), projection='3d')
    ax3 = plt.subplot2grid((3, 2), (1, 0), projection='3d')
    ax4 = plt.subplot2grid((3, 2), (1, 1), projection='3d')
    ax5 = plt.subplot2grid((3, 2), (2, 0), colspan=2)
    lsurf = nb.load('/home/cjmarkie/Downloads/Conte69.L.inflated.32k_fs_LR.surf.gii').agg_data()
    rsurf = nb.load('/home/cjmarkie/Downloads/Conte69.R.inflated.32k_fs_LR.surf.gii').agg_data()
    kwargs = {'threshold': None if threshold == 'auto' else threshold,
              'colorbar': False, 'plot_abs': plot_abs, 'cmap': cmap, 'vmax': vmax}
    nlp.plot_surf_stat_map(lsurf, ltexture, view='lateral', axes=ax1, **kwargs)
    nlp.plot_surf_stat_map(rsurf, rtexture, view='medial', axes=ax2, **kwargs)
    nlp.plot_surf_stat_map(lsurf, ltexture, view='medial', axes=ax3, **kwargs)
    nlp.plot_surf_stat_map(rsurf, rtexture, view='lateral', axes=ax4, **kwargs)
    nlp.plot_glass_brain(subcort, display_mode='lyrz', axes=ax5, **kwargs)
    if colorbar:
        data = img.get_fdata(dtype=np.float32)
        if vmax is None:
            vmax = max(-data.min(), data.max())
        norm = mpl.colors.Normalize(vmin=-vmax if data.min() < 0 else 0, vmax=vmax)
        sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
        fig.colorbar(sm, ax=fig.axes, location='right', aspect=50)
    if output_file:
        fig.savefig(output_file)
        plt.close(fig)
github arthurmensch / cogspaces / exps / analyse / plot_figure_4.py View on Github external
if display_mode == 'xz':
                rcc = (cc[0], cc[2])
                ann = 'x = %i, z = %i' % rcc
            elif display_mode == 'yz':
                rcc = (cc[1], cc[2])
                ann = 'y = %i, z = %i' % rcc

            plot_stat_map(img, figure=fig,
                          threshold=0,
                          colorbar=False,
                          annotate=False,
                          cut_coords=rcc,
                          display_mode=display_mode,
                          vmax=vmax,
                          axes=ax_stat)
            plot_glass_brain(img, figure=fig,
                             threshold=vmax / 2.5 if contrast == 'language_vs_sound' else vmax / 3,
                             plot_abs=False,
                             vmax=vmax,
                             cut_coords=cut_coords,
                             colorbar=False,
                             annotate=column == 1,
                             display_mode='z',
                             axes=ax_glass)

            if column == 0:
                ax_stat.annotate(ann, xycoords='axes fraction',
                                 va='bottom',
                                 xytext=(0., ann_offsets[row]),
                                 fontsize=10,
                                 textcoords='offset points',
                                 bbox=dict(facecolor='white', edgecolor=None,
github nilearn / nilearn / examples / manipulating_visualizing / plot_demo_glass_brain.py View on Github external
# Retrieve the data
from nilearn import datasets

localizer_dataset = datasets.fetch_localizer_contrasts(
    ["left vs right button press"],
    n_subjects=2,
    get_tmaps=True)
localizer_tmap_filename = localizer_dataset.tmaps[1]

###############################################################################
# demo glass brain plotting
from nilearn import plotting

plotting.plot_glass_brain(localizer_tmap_filename, threshold=0, colorbar=True)

plotting.plot_glass_brain(localizer_tmap_filename, threshold=3, colorbar=True)

plotting.plot_glass_brain(localizer_tmap_filename, title='plot_glass_brain',
                          black_bg=True, display_mode='xz', threshold=3, colorbar=True)

plotting.plot_glass_brain(localizer_tmap_filename, threshold=0, colorbar=True, plot_negative=True)

plotting.plot_glass_brain(localizer_tmap_filename, threshold=3, colorbar=True,
                          plot_negative=True)

plotting.plot_glass_brain(localizer_tmap_filename, title='plot_glass_brain',
                          black_bg=True, display_mode='xz', threshold=3, colorbar=True,
                          plot_negative=True)

import matplotlib.pyplot as plt
plt.show()
github arthurmensch / cogspaces / sandbox / introspect.py View on Github external
def plot_single_img(name, plot_dir, study, this_img, to=1 / 3):
    vmax = np.max(np.abs(this_img.get_data()))
    cut_coords = find_xyz_cut_coords(this_img,
                                     activation_threshold=vmax / 3)
    fig = plt.figure()
    plot_glass_brain(this_img, title='%s::%s' % (study, name),
                     plot_abs=False,
                     cut_coords=cut_coords,
                     threshold=vmax * to, figure=fig)
    plt.savefig(join(plot_dir, '%s_%s_glass.png' % (study, name)))
    plt.close(fig)
    fig = plt.figure()
    plot_stat_map(this_img, title='%s::%s' % (study, name),
                  cut_coords=cut_coords,
                  threshold=vmax * to, figure=fig)
    plt.savefig(join(plot_dir, '%s_%s.png' % (study, name)))
    plt.close(fig)
github arthurmensch / cogspaces / cogspaces / plotting.py View on Github external
cut_coords=cut_coords,
                              vmax=vmax,
                              colorbar=False,
                              output_file=src,
                              # cmap=cmap
                              )
                plot_stat_map(img, threshold=threshold,
                              cut_coords=cut_coords,
                              vmax=vmax,
                              display_mode='ortho',
                              colorbar=True,
                              output_file=src.replace('.png', '_z.svg'),
                              # cmap=cmap
                              )
            else:
                plot_glass_brain(img, threshold=threshold,
                                 vmax=vmax,
                                 plot_abs=False,
                                 output_file=src,
                                 colorbar=False,
                                 # cmap=cmap_white
                                 )
                plot_glass_brain(img, threshold=threshold,
                                 vmax=vmax,
                                 display_mode='ortho',
                                 plot_abs=False,
                                 output_file=src.replace('.png', '_xz.svg'),
                                 colorbar=True,
                                 # cmap=cmap_white
                                 )
        else:
            raise ValueError('Wrong view type in `view_types`: got %s' %
github nidata / nidata / nidata / _external / nilearn / examples / manipulating_visualizing / plot_demo_plotting.py View on Github external
n_subjects=2,
    get_anats=True,
    get_tmaps=True)
localizer_anat_filename = localizer_dataset.anats[1]
localizer_tmap_filename = localizer_dataset.tmaps[1]

###############################################################################
# demo the different plotting functions

# Plotting statistical maps
plotting.plot_stat_map(localizer_tmap_filename, bg_img=localizer_anat_filename,
                       threshold=3, title="plot_stat_map",
                       cut_coords=(36, -27, 66))

# Plotting glass brain
plotting.plot_glass_brain(localizer_tmap_filename, title='plot_glass_brain',
                          threshold=3)

# Plotting anatomical maps
plotting.plot_anat(haxby_anat_filename, title="plot_anat")

# Plotting ROIs (here the mask)
plotting.plot_roi(haxby_mask_filename, bg_img=haxby_anat_filename,
                  title="plot_roi")

# Plotting EPI haxby
mean_haxby_img = image.mean_img(haxby_func_filename)
plotting.plot_epi(mean_haxby_img, title="plot_epi")

import matplotlib.pyplot as plt
plt.show()
github nilearn / nilearn / examples / 01_plotting / plot_demo_glass_brain.py View on Github external
See :ref:`plotting` for more plotting functionalities.
"""


###############################################################################
# Retrieve data from Internet
from nilearn import datasets

localizer_dataset = datasets.fetch_localizer_button_task()
localizer_tmap_filename = localizer_dataset.tmaps[0]

###############################################################################
# Demo glass brain plotting using whole brain sagittal cuts
from nilearn import plotting

plotting.plot_glass_brain(localizer_tmap_filename, threshold=3)

###############################################################################
# On a black background (option "black_bg"), and with only the x and
# the z view (option "display_mode").
plotting.plot_glass_brain(
    localizer_tmap_filename, title='plot_glass_brain',
    black_bg=True, display_mode='xz', threshold=3)

###############################################################################
# Hemispheric sagittal cuts
plotting.plot_glass_brain(localizer_tmap_filename,
                          title='plot_glass_brain with display_mode="lyrz"',
                          display_mode='lyrz', threshold=3)

plotting.show()
github cosanlab / nltools / nltools / plotting.py View on Github external
colorbar=True,
            cmap=cmap,
            plot_abs=False,
            **kwargs
        )
        for v, c in zip(views, cut_coords):
            plot_stat_map(
                obj.to_nifti(),
                cut_coords=c,
                display_mode=v,
                cmap=cmap,
                bg_img=resolve_mni_path(MNI_Template)["brain"],
                **kwargs
            )
    elif how == "glass":
        plot_glass_brain(
            obj.to_nifti(),
            display_mode="lzry",
            colorbar=True,
            cmap=cmap,
            plot_abs=False,
            **kwargs
        )
    elif how == "mni":
        for v, c in zip(views, cut_coords):
            plot_stat_map(
                obj.to_nifti(),
                cut_coords=c,
                display_mode=v,
                cmap=cmap,
                bg_img=resolve_mni_path(MNI_Template)["brain"],
                **kwargs
github nistats / nistats / nistats / reporting / glm_reporter.py View on Github external
table_details: pandas.Dataframe
        Dataframe listing the parameters used for clustering,
        to be included in the plot.

    Returns
    -------
    stat_map_svg: string
        SVG Image Data URL representing a statistical map.
    """
    if plot_type == 'slice':
        stat_map_plot = plot_stat_map(stat_img,
                                      bg_img=bg_img,
                                      display_mode=display_mode,
                                      )
    elif plot_type == 'glass':
        stat_map_plot = plot_glass_brain(stat_img,
                                         display_mode=display_mode,
                                         colorbar=True,
                                         plot_abs=False,
                                         )
    else:
        raise ValueError('Invalid plot type provided. Acceptable options are'
                         "'slice' or 'glass'.")
    with pd.option_context('display.precision', 2):
        stat_map_plot = _add_params_to_plot(table_details, stat_map_plot)
    fig = plt.gcf()
    stat_map_svg = plot_to_svg(fig)
    # prevents sphinx-gallery & jupyter from scraping & inserting plots
    plt.close()
    return stat_map_svg