How to use the napari.Viewer function in napari

To help you get started, we’ve selected a few napari examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github constantinpape / elf / test / wrapper / test_resized_volume.py View on Github external
x = np.concatenate(256 * [x[None]], axis=0)

        out_shape = 3 * (128,)
        order = 0
        out1 = vigra.sampling.resize(x, shape=out_shape, order=order)
        out2 = ResizedVolume(x, shape=out_shape, order=order)
        out3 = resize(x, out_shape, order=0, preserve_range=True, anti_aliasing=False)
        assert out1.shape == out2.shape == out_shape
        # bb = np.s_[:64, :, 64:]
        bb = np.s_[:]
        o1 = out1[bb]
        o2 = out2[bb]
        o3 = out3[bb]
        import napari
        with napari.gui_qt():
            viewer = napari.Viewer()
            viewer.add_image(o1, name='elf')
            viewer.add_image(o2, name='vigra')
            viewer.add_image(o3, name='skimage')
            # viewer.add_labels(diff, name='pix-diff')
github napari / napari / examples / xarray_nD_image.py View on Github external
try:
    import xarray as xr
except ImportError:
    raise ImportError("""This example uses a xarray but xarray is not
    installed. To install try 'pip install xarray'.""")

import numpy as np
import napari

data = np.random.random((20, 40, 50))
xdata = xr.DataArray(data, dims=['z', 'y', 'x'])

with napari.gui_qt():
    # create an empty viewer
    viewer = napari.Viewer()

    # add the xarray
    layer = viewer.add_image(xdata, name='xarray')
github napari / napari / examples / set_colormaps.py View on Github external
"""

import numpy as np
import vispy.color
from skimage import data
import napari


histo = data.astronaut() / 255
rch, gch, bch = np.transpose(histo, (2, 0, 1))
red = vispy.color.Colormap([[0.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
green = vispy.color.Colormap([[0.0, 0.0, 0.0], [0.0, 1.0, 0.0]])
blue = vispy.color.Colormap([[0.0, 0.0, 0.0], [0.0, 0.0, 1.0]])

with napari.gui_qt():
    v = napari.Viewer()

    rlayer = v.add_image(rch, name='red channel')
    rlayer.blending = 'additive'
    rlayer.colormap = 'red', red
    glayer = v.add_image(gch, name='green channel')
    glayer.blending = 'additive'
    glayer.colormap = green  # this will appear as [unnamed colormap]
    blayer = v.add_image(bch, name='blue channel')
    blayer.blending = 'additive'
    blayer.colormap = {'blue': blue}
github zeiss-microscopy / OAD / notebooks / Read_And_Display_Images_using_Widgets_and_Napari / imgfileutils.py View on Github external
def show_napari(array, metadata, verbose=True):

    import napari

    with napari.gui_qt():

        # create scalefcator with all ones
        scalefactors = [1] * len(array.shape)

        # initialize the napari viewer
        viewer = napari.Viewer()

        if metadata['ImageType'] == 'ometiff':

            # find position of dimensions
            posZ = metadata['DimOrder BF Array'].find('Z')
            posC = metadata['DimOrder BF Array'].find('C')
            posT = metadata['DimOrder BF Array'].find('T')

            # get the scalefactors from the metadata
            scalef = get_scalefactor(metadata)
            # modify the tuple for the scales for napari
            scalefactors[posZ] = scalef['zx']

            if verbose:
                print('Dim PosT : ', posT)
                print('Dim PosC : ', posC)
github napari / napari / examples / add_vectors_image.py View on Github external
"""
This example generates an image of vectors
Vector data is an array of shape (N, M, 2)
Each vector position is defined by an (x-proj, y-proj) element
    where x-proj and y-proj are the vector projections at each center
    where each vector is centered on a pixel of the NxM grid
"""

import napari
import numpy as np


with napari.gui_qt():
    # create the viewer and window
    viewer = napari.Viewer()

    n = 20
    m = 40

    image = 0.2 * np.random.random((n, m)) + 0.5
    layer = viewer.add_image(image, contrast_limits=[0, 1], name='background')

    # sample vector image-like data
    # n x m grid of slanted lines
    # random data on the open interval (-1, 1)
    pos = np.zeros(shape=(n, m, 2), dtype=np.float32)
    rand1 = 2 * (np.random.random_sample(n * m) - 0.5)
    rand2 = 2 * (np.random.random_sample(n * m) - 0.5)

    # assign projections for each vector
    pos[:, :, 0] = rand1.reshape((n, m))
github napari / napari / benchmarks / benchmark_qt_viewer_image.py View on Github external
def setup(self, n):
        _ = QApplication.instance() or QApplication([])
        np.random.seed(0)
        self.data = np.random.random((n, n))
        self.viewer = napari.Viewer()
github constantinpape / elf / example / segmentation / visualize_edges.py View on Github external
large_values_are_attractive=True,
                                                                edge_direction=2)
    else:
        att1xy, rep1xy = visualise_attractive_and_repulsive_edges(rag, edge_feats,
                                                                  ignore_edges=z_edges,
                                                                  threshold=.5,
                                                                  large_values_are_attractive=False,
                                                                  edge_direction=0)
        att1z, rep1z = visualise_attractive_and_repulsive_edges(rag, edge_feats,
                                                                ignore_edges=xy_edges,
                                                                threshold=.5,
                                                                large_values_are_attractive=False,
                                                                edge_direction=2)

    with napari.gui_qt():
        viewer = napari.Viewer()
        viewer.add_image(raw, name='raw')
        viewer.add_image(boundaries, name='boundaries')

        viewer.add_image(att1xy, name='attractive-xy')
        viewer.add_image(rep1xy, name='repuslive-xy')
        viewer.add_image(att1z, name='attractive-z')
        viewer.add_image(rep1z, name='repulsive-z')
github constantinpape / elf / example / segmentation / visualize_edges.py View on Github external
affs = f['affinities'][:3, :]

    boundaries = np.mean(affs, axis=0)
    watershed, max_id = ws.stacked_watershed(boundaries, threshold=.5, sigma_seeds=2.)
    # compute the region adjacency graph
    rag = feats.compute_rag(watershed, n_labels=max_id + 1)

    # compute the edge weights
    edge_weights = feats.compute_boundary_features(rag, boundaries)[:, 0]
    z_edges = feats.compute_z_edge_mask(rag, watershed)
    xy_edges = ~z_edges
    xy_vals = visualise_edges(rag, edge_weights, ignore_edges=z_edges, edge_direction=0)
    z_vals = visualise_edges(rag, edge_weights, ignore_edges=xy_edges, edge_direction=2)

    with napari.gui_qt():
        viewer = napari.Viewer()
        viewer.add_image(raw, name='raw')
        viewer.add_image(boundaries, name='boundaries')

        viewer.add_image(xy_vals, name='xy-edges')
        viewer.add_image(z_vals, name='z-edges')
github napari / napari / examples / nD_volume.py View on Github external
from skimage import data
import numpy as np
import napari


with napari.gui_qt():
    blobs = np.asarray(
        [
            data.binary_blobs(length=64, volume_fraction=0.1, n_dim=3).astype(
                float
            )
            for i in range(10)
        ]
    )
    viewer = napari.Viewer(ndisplay=3)

    # add the volume
    layer = viewer.add_image(blobs)
github constantinpape / elf / example / embeddings / ovules.py View on Github external
in_folder = '/home/pape/Work/data/data_science_bowl/dsb2018/test/images'
    input_images = os.listdir(in_folder)

    test_image = input_images[0]
    test_name = os.path.splitext(test_image)[0]
    im = np.asarray(imageio.imread(os.path.join(in_folder, test_image)))

    pred_file = './predictions.h5'
    with h5py.File(pred_file, 'r') as f:
        pred = f[test_name][:]

    pca = embed.embedding_pca(pred).transpose((1, 2, 0))
    seg = embed.embedding_slic(pred)

    with napari.gui_qt():
        viewer = napari.Viewer()
        viewer.add_image(im, name='image')
        viewer.add_image(pca, rgb=True, name='pca')
        viewer.add_labels(seg, name='segmentation')