How to use dipy - 10 common examples

To help you get started, we’ve selected a few dipy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nipy / dipy / 1.0.0 / _downloads / 034a6637eb62a224f34de94d97ecf916 / tracking_stopping_criterion.py View on Github external
- threshold: float

**Stopping States**

- 'ENDPOINT': stops at a position where metric_map < threshold; the streamline
reached the target stopping area.
- 'OUTSIDEIMAGE': stops at a position outside of metric_map; the streamline
reached an area outside the image where no direction data is available.
- 'TRACKPOINT': stops at a position because no direction is available; the
streamline is stopping where metric_map >= threshold, but there is no valid
direction to follow.
- 'INVALIDPOINT': N/A.
"""


tensor_model = TensorModel(gtab)
tenfit = tensor_model.fit(data, mask=labels > 0)
FA = fractional_anisotropy(tenfit.evals)

threshold_criterion = ThresholdStoppingCriterion(FA, .2)

fig = plt.figure()
mask_fa = FA.copy()
mask_fa[mask_fa < 0.2] = 0
plt.xticks([])
plt.yticks([])
plt.imshow(mask_fa[:, :, data.shape[2] // 2].T, cmap='gray', origin='lower',
           interpolation='nearest')
fig.tight_layout()
fig.savefig('threshold_fa.png')

"""
github nipy / dipy / doc / examples / newApi.py View on Github external
def prob_tracking_example(model, data, mask, N, hdr, filename):
    # Fit data to model
    fit = model.fit(data, mask)

    # Create objects to be passed to tracker
    pdg = ProbabilisticDirectionGetter.from_shfit(fit, 45., default_sphere)
    gfa = fit.gfa
    gfa = np.where(np.isnan(gfa), 0., gfa)
    ttc = ThresholdTissueClassifier(gfa, .2)

    # Create around N seeds
    seeds = utils.seeds_from_mask(gfa > .25, 2, affine=affine)
    seeds = seeds[::len(seeds) // N + 1]

    # Create streamline generator
    streamlines = LocalTracking(pdg, ttc, seeds, affine, .5, max_cross=1)
    trk_streamlines = utils.move_streamlines(streamlines,
                                         input_space=affine,
                                         output_space=trackvis_affine)

    trk = ((streamline, None, None) for streamline in trk_streamlines)
    # Save streamlines
    nib.trackvis.write(filename, trk, hdr)
github nipy / dipy / doc / examples / reconst_shore_metrics.py View on Github external
fraw, fbval, fbvec = get_fnames('taiwan_ntu_dsi')

"""
img contains a nibabel Nifti1Image object (data) and gtab contains a
GradientTable object (gradient information e.g. b-values). For example, to
read the b-values it is possible to write print(gtab.bvals).

Load the raw diffusion data and the affine.
"""

data, affine = load_nifti(fraw)
bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
bvecs[1:] = (bvecs[1:] /
             np.sqrt(np.sum(bvecs[1:] * bvecs[1:], axis=1))[:, None])
gtab = gradient_table(bvals, bvecs)
print('data.shape (%d, %d, %d, %d)' % data.shape)

"""
Instantiate the Model.
"""

asm = ShoreModel(gtab)

"""
Let's just use only one slice only from the data.
"""

dataslice = data[30:70, 20:80, data.shape[2] // 2]

"""
Fit the signal with the model and calculate the SHORE coefficients.
github nipy / dipy / dipy / reconst / qtdmri.py View on Github external
def qtdmri_isotropic_signal_matrix(radial_order, time_order, us, ut, q, tau):
    ind_mat = qtdmri_isotropic_index_matrix(radial_order, time_order)
    qvals, theta, phi = cart2sphere(q[:, 0], q[:, 1], q[:, 2])

    n_dat = int(qvals.shape[0])
    n_elem = int(ind_mat.shape[0])

    num_j = int(np.max(ind_mat[:, 0]))
    num_o = int(time_order + 1)
    num_l = int(radial_order // 2 + 1)
    num_m = int(radial_order * 2 + 1)

    # Radial Basis
    radial_storage = np.zeros([num_j, num_l, n_dat])
    for j in range(1, num_j + 1):
        for ll in range(0, radial_order + 1, 2):
            radial_storage[j - 1, ll // 2, :] = radial_basis_opt(
                j, ll, us, qvals)
github nipy / dipy / dipy / reconst / mapmri.py View on Github external
ind_mat = mapmri_isotropic_index_matrix(radial_order)

    n_elem = ind_mat.shape[0]
    n_rgrad = rgrad.shape[0]
    K = np.zeros((n_rgrad, n_elem))

    counter = 0
    for n in range(0, radial_order + 1, 2):
        for j in range(1, 2 + n // 2):
            l = n + 2 - 2 * j
            const = (-1) ** (j - 1) *\
                (np.sqrt(2) * np.pi) ** (-1) *\
                (r ** 2 / 2) ** (l / 2)
            for m in range(-l, l+1):
                K[:, counter] = const * real_sph_harm(m, l, theta, phi)
                counter += 1
    return K
github nipy / dipy / dipy / reconst / mapmri.py View on Github external
qval, theta, phi = cart2sphere(q[:, 0], q[:, 1], q[:, 2])
    theta[np.isnan(theta)] = 0

    ind_mat = mapmri_isotropic_index_matrix(radial_order)

    n_elem = ind_mat.shape[0]
    n_qgrad = q.shape[0]
    M = np.zeros((n_qgrad, n_elem))

    counter = 0
    for n in range(0, radial_order + 1, 2):
        for j in range(1, 2 + n // 2):
            l = n + 2 - 2 * j
            const = mapmri_isotropic_radial_signal_basis(j, l, mu, qval)
            for m in range(-l, l+1):
                M[:, counter] = const * real_sph_harm(m, l, theta, phi)
                counter += 1
    return M
github nipy / dipy / doc / examples / newApi.py View on Github external
def detr_tracking_example(model, data, mask, N, hdr, filename):
    csapeaks = peaks_from_model(model=csamodel,
                                data=data,
                                sphere=sphere,
                                relative_peak_threshold=.5,
                                min_separation_angle=45,
                                mask=mask,
                                return_odf=False,
                                normalize_peaks=True)
    gfa = csapeaks.gfa
    gfa = np.where(np.isnan(gfa), 0., gfa)
    ttc = ThresholdTissueClassifier(gfa, .2)

    # Create around N seeds
    seeds = utils.seeds_from_mask(gfa > .25, 2, affine=affine)
    seeds = seeds[::len(seeds) // N + 1]

    # Create streamline generator
    streamlines = LocalTracking(csapeaks, ttc, seeds, affine, .5, max_cross=1)
    trk_streamlines = utils.move_streamlines(streamlines,
                                             input_space=affine,
                                             output_space=trackvis_affine)
    trk = ((streamline, None, None) for streamline in trk_streamlines)

    # Save streamlines
    nib.trackvis.write(filename, trk, hdr)
github fury-gl / fury / tests / test_window.py View on Github external
@npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it)
@xvfb_it
def test_renderer():

    ren = window.Renderer()

    npt.assert_equal(ren.size(), (0, 0))

    # background color for renderer (1, 0.5, 0)
    # 0.001 added here to remove numerical errors when moving from float
    # to int values
    bg_float = (1, 0.501, 0)

    # that will come in the image in the 0-255 uint scale
    bg_color = tuple((np.round(255 * np.array(bg_float))).astype('uint8'))

    ren.background(bg_float)
github fury-gl / fury / tests / test_actors.py View on Github external
def test_labels(interactive=False):

    text_actor = actor.label("Hello")

    renderer = window.Renderer()
    renderer.add(text_actor)
    renderer.reset_camera()
    renderer.reset_clipping_range()

    if interactive:
        window.show(renderer, reset_camera=False)

    npt.assert_equal(renderer.GetActors().GetNumberOfItems(), 1)
github fury-gl / fury / tests / test_actors.py View on Github external
def test_odf_slicer(interactive=False):

    sphere = get_sphere('symmetric362')

    shape = (11, 11, 11, sphere.vertices.shape[0])

    fid, fname = mkstemp(suffix='_odf_slicer.mmap')
    print(fid)
    print(fname)

    odfs = np.memmap(fname, dtype=np.float64, mode='w+',
                     shape=shape)

    odfs[:] = 1

    affine = np.eye(4)
    renderer = window.Renderer()

    mask = np.ones(odfs.shape[:3])