How to use the dipy.core.gradients.gradient_table function in dipy

To help you get started, we’ve selected a few dipy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nipy / dipy / doc / examples / reconst_shore_metrics.py View on Github external
fraw, fbval, fbvec = get_fnames('taiwan_ntu_dsi')

"""
img contains a nibabel Nifti1Image object (data) and gtab contains a
GradientTable object (gradient information e.g. b-values). For example, to
read the b-values it is possible to write print(gtab.bvals).

Load the raw diffusion data and the affine.
"""

data, affine = load_nifti(fraw)
bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
bvecs[1:] = (bvecs[1:] /
             np.sqrt(np.sum(bvecs[1:] * bvecs[1:], axis=1))[:, None])
gtab = gradient_table(bvals, bvecs)
print('data.shape (%d, %d, %d, %d)' % data.shape)

"""
Instantiate the Model.
"""

asm = ShoreModel(gtab)

"""
Let's just use only one slice only from the data.
"""

dataslice = data[30:70, 20:80, data.shape[2] // 2]

"""
Fit the signal with the model and calculate the SHORE coefficients.
github neuropoly / spinalcordtoolbox / scripts / sct_dmri_compute_dti.py View on Github external
:param prefix: output prefix. Example: "dti_"
    :param method: algo for computing dti
    :param evecs: bool: output diffusion tensor eigenvectors and eigenvalues
    :return: True/False
    """
    # Open file.
    from spinalcordtoolbox.image import Image
    nii = Image(fname_in)
    data = nii.data
    sct.printv('data.shape (%d, %d, %d, %d)' % data.shape)

    # open bvecs/bvals
    from dipy.io import read_bvals_bvecs
    bvals, bvecs = read_bvals_bvecs(fname_bvals, fname_bvecs)
    from dipy.core.gradients import gradient_table
    gtab = gradient_table(bvals, bvecs)

    # mask and crop the data. This is a quick way to avoid calculating Tensors on the background of the image.
    if not file_mask == '':
        sct.printv('Open mask file...', param.verbose)
        # open mask file
        nii_mask = Image(file_mask)
        mask = nii_mask.data

    # fit tensor model
    sct.printv('Computing tensor using "' + method + '" method...', param.verbose)
    import dipy.reconst.dti as dti
    if method == 'standard':
        tenmodel = dti.TensorModel(gtab)
        if file_mask == '':
            tenfit = tenmodel.fit(data)
        else:
github nipy / dipy / dipy / reconst / cross_validation.py View on Github external
msg = "np.mod(%s, %s) is %s" % (data_b.shape[-1], folds, div_by_folds)
        raise ValueError(msg)

    data_0 = data[..., gtab.b0s_mask]
    S0 = np.mean(data_0, -1)
    n_in_fold = data_b.shape[-1] / folds
    prediction = np.zeros(data.shape)
    # We are going to leave out some randomly chosen samples in each iteration:
    order = np.random.permutation(data_b.shape[-1])

    nz_bval = gtab.bvals[~gtab.b0s_mask]
    nz_bvec = gtab.bvecs[~gtab.b0s_mask]

    # Pop the mask, if there is one, out here for use in every fold:
    mask = model_kwargs.pop('mask', None)
    gtgt = gt.gradient_table  # Shorthand
    for k in range(folds):
        fold_mask = np.ones(data_b.shape[-1], dtype=bool)
        fold_idx = order[int(k * n_in_fold): int((k + 1) * n_in_fold)]
        fold_mask[fold_idx] = False
        this_data = np.concatenate([data_0, data_b[..., fold_mask]], -1)

        this_gtab = gtgt(np.hstack([gtab.bvals[gtab.b0s_mask],
                                    nz_bval[fold_mask]]),
                         np.concatenate([gtab.bvecs[gtab.b0s_mask],
                                         nz_bvec[fold_mask]]))
        left_out_gtab = gtgt(np.hstack([gtab.bvals[gtab.b0s_mask],
                                        nz_bval[~fold_mask]]),
                             np.concatenate([gtab.bvecs[gtab.b0s_mask],
                                             nz_bvec[~fold_mask]]))
        this_model = model.__class__(this_gtab, *model_args, **model_kwargs)
        this_fit = this_model.fit(this_data, mask=mask)
github nipy / dipy / doc / examples / reconst_mapmri.py View on Github external
"""
``data`` contains the voxel data and ``gtab`` contains a ``GradientTable``
object (gradient information e.g. b-values). For example, to show the b-values
it is possible to write::

   print(gtab.bvals)

For the values of the q-space indices to make sense it is necessary to
explicitly state the ``big_delta`` and ``small_delta`` parameters in the
gradient table.
"""

img, gtab = read_cfin_dwi()
big_delta = 0.0365  # seconds
small_delta = 0.0157  # seconds
gtab = gradient_table(bvals=gtab.bvals, bvecs=gtab.bvecs,
                      big_delta=big_delta,
                      small_delta=small_delta)
data = img.get_data()
data_small = data[40:65, 50:51]

print('data.shape (%d, %d, %d, %d)' % data.shape)

"""
The MAPMRI Model can now be instantiated. The ``radial_order`` determines the
expansion order of the basis, i.e., how many basis functions are used to
approximate the signal.

First, we must decide to use the anisotropic or isotropic MAPMRI basis. As was
shown in [Fick2016a]_, the isotropic basis is best used for tractography
purposes, as the anisotropic basis has a bias towards smaller crossing angles
in the ODF. For signal fitting and estimation of scalar quantities the
github nipy / dipy / dipy / data / fetcher.py View on Github external
def read_cfin_dwi():
    """Load CFIN multi b-value DWI data.

    Returns
    -------
    img : obj,
        Nifti1Image
    gtab : obj,
        GradientTable

    """
    fraw, fbval, fbvec, _ = get_fnames('cfin_multib')
    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
    gtab = gradient_table(bvals, bvecs)
    img = nib.load(fraw)
    return img, gtab
github nipy / dipy / 1.0.0 / _downloads / 3a1a3270fd10e7db18e81044b7f3e7ec / simulate_dki.py View on Github external
"""
DKI requires data from more than one non-zero b-value. Since the dataset
``small_64D`` was acquired with one non-zero b-value we artificially produce a
second non-zero b-value.
"""

bvals = np.concatenate((bvals, bvals * 2), axis=0)
bvecs = np.concatenate((bvecs, bvecs), axis=0)

"""
The b-values and gradient directions are then converted to DIPY's
``GradientTable`` format.
"""

gtab = gradient_table(bvals, bvecs)

"""
In ``mevals`` we save the eigenvalues of each tensor. To simulate crossing
fibers with two different media (representing intra and extra-cellular media),
a total of four components have to be taken in to account (i.e. the first two
compartments correspond to the intra and extra cellular media for the first
fiber population while the others correspond to the media of the second fiber
population)
"""

mevals = np.array([[0.00099, 0, 0],
                   [0.00226, 0.00087, 0.00087],
                   [0.00099, 0, 0],
                   [0.00226, 0.00087, 0.00087]])

"""
github daducci / AMICO / amico / core.py View on Github external
if self.KERNELS is None :
            ERROR( 'Response functions not generated; call "generate_kernels()" and "load_kernels()" first' )
        if self.KERNELS['model'] != self.model.id :
            ERROR( 'Response functions were not created with the same model' )

        self.set_config('fit_time', None)
        totVoxels = np.count_nonzero(self.niiMASK_img)
        LOG( '\n-> Fitting "%s" model to %d voxels:' % ( self.model.name, totVoxels ) )

        # setup fitting directions
        peaks_filename = self.get_config('peaks_filename')
        if peaks_filename is None :
            DIRs = np.zeros( [self.get_config('dim')[0], self.get_config('dim')[1], self.get_config('dim')[2], 3], dtype=np.float32 )
            nDIR = 1
            if self.get_config('doMergeB0'):
                gtab = gradient_table( np.hstack((0,self.scheme.b[self.scheme.dwi_idx])), np.vstack((np.zeros((1,3)),self.scheme.raw[self.scheme.dwi_idx,:3])) )
            else:
                gtab = gradient_table( self.scheme.b, self.scheme.raw[:,:3] )
            DTI = dti.TensorModel( gtab )
        else :
            niiPEAKS = nibabel.load( pjoin( self.get_config('DATA_path'), peaks_filename) )
            DIRs = niiPEAKS.get_data().astype(np.float32)
            nDIR = np.floor( DIRs.shape[3]/3 )
            print('\t* peaks dim = %d x %d x %d x %d' % DIRs.shape[:4])
            if DIRs.shape[:3] != self.niiMASK_img.shape[:3] :
                ERROR( 'PEAKS geometry does not match with DWI data' )

        # setup other output files
        MAPs = np.zeros( [self.get_config('dim')[0], self.get_config('dim')[1],
                          self.get_config('dim')[2], len(self.model.maps_name)], dtype=np.float32 )

        if self.get_config('doComputeNRMSE') :
github nipy / dipy / doc / examples / streamline_registration.py View on Github external
if not op.exists('lr-superiorfrontal.trk'):
    from streamline_tools import *
    vox_size = hardi_img.header.get_zooms()[0]
else:
    from dipy.core.gradients import gradient_table
    from dipy.data import get_fnames
    from dipy.io.gradients import read_bvals_bvecs
    from dipy.io.image import load_nifti_data, load_nifti, save_nifti

    hardi_fname, hardi_bval_fname, hardi_bvec_fname = get_fnames('stanford_hardi')

    data, affine, hardi_img = load_nifti(hardi_fname, return_img=True)
    vox_size = hardi_img.header.get_zooms()[0]
    bvals, bvecs = read_bvals_bvecs(hardi_bval_fname, hardi_bvec_fname)
    gtab = gradient_table(bvals, bvecs)

"""
The second one will be the T2-contrast MNI template image, which we'll need to
reslice to 2x2x2 mm isotropic voxel resolution to match the HARDI data.

"""

from dipy.data.fetcher import (fetch_mni_template, read_mni_template)
from dipy.align.reslice import reslice

fetch_mni_template()
img_t2_mni = read_mni_template("a", contrast="T2")

new_zooms = (2., 2., 2.)
data2, affine2 = reslice(np.asarray(img_t2_mni.dataobj), img_t2_mni.affine,
                         img_t2_mni.header.get_zooms(), new_zooms)
github nidata / nidata / nidata / diffusion / datasets.py View on Github external
"""
    folder = pjoin(dipy_home, 'stanford_hardi')
    fraw = pjoin(folder, 'HARDI150.nii.gz')
    fbval = pjoin(folder, 'HARDI150.bval')
    fbvec = pjoin(folder, 'HARDI150.bvec')
    md5_dict = {'data': '0b18513b46132b4d1051ed3364f2acbc',
                'bval': '4e08ee9e2b1d2ec3fddb68c70ae23c36',
                'bvec': '4c63a586f29afc6a48a5809524a76cb4'}

    check_md5(fraw, md5_dict['data'])
    check_md5(fbval, md5_dict['bval'])
    check_md5(fbvec, md5_dict['bvec'])

    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)

    gtab = gradient_table(bvals, bvecs)
    img = nib.load(fraw)
    return img, gtab
github nipy / dipy / dipy / workflows / reconst.py View on Github external
with Solid Angle Consideration.
        """
        io_it = self.get_io_iterator()

        for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir,
             opeaks_values, opeaks_indices, ogfa) in io_it:

            logging.info('Loading {0}'.format(dwi))
            data, affine = load_nifti(dwi)

            bvals, bvecs = read_bvals_bvecs(bval, bvec)
            if b0_threshold < bvals.min():
                warn("b0_threshold (value: {0}) is too low, increase your "
                     "b0_threshold. It should be higher than the first b0 value "
                     "({1}).".format(b0_threshold, bvals.min()))
            gtab = gradient_table(bvals, bvecs,
                                  b0_threshold=b0_threshold, atol=bvecs_tol)
            mask_vol = load_nifti_data(maskfile).astype(np.bool)

            peaks_sphere = default_sphere

            logging.info('Starting CSA computations {0}'.format(dwi))

            csa_model = CsaOdfModel(gtab, sh_order)

            peaks_csa = peaks_from_model(model=csa_model,
                                         data=data,
                                         sphere=peaks_sphere,
                                         relative_peak_threshold=.5,
                                         min_separation_angle=25,
                                         mask=mask_vol,
                                         return_sh=True,