How to use the dipy.io.gradients.read_bvals_bvecs function in dipy

To help you get started, we’ve selected a few dipy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github samuelstjean / nlsam / script / nlsam.py View on Github external
param_D['mode'] = args.mode_D
    param_D['iter'] = args.iter
    param_D['K'] = args.nb_atoms_D
    param_D['posD'] = args.pos_D
    param_D['posAlpha'] = args.pos_alpha

    if args.mask is not None:
        mask = nib.load(args.mask).get_data().astype(np.bool)
    else:
        mask = np.ones(data.shape[:-1], dtype=np.bool)

    filename = args.savename

    # Testing neighbors stuff
    bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs)

    b0_thresh = 10
    b0_loc = tuple(np.where(bvals <= b0_thresh)[0])
    num_b0s = len(b0_loc)

    print("found " + str(num_b0s) + " b0s at position " + str(b0_loc))
    # Average multiple b0s, and just use the average for the rest of the script
    # patching them in at the end
    if num_b0s > 1:
        mean_b0 = np.mean(data[..., b0_loc], axis=-1)
        dwis = tuple(np.where(bvals > b0_thresh)[0])
        data = data[..., dwis]
        bvals = np.take(bvals, dwis, axis=0)
        bvecs = np.take(bvecs, dwis, axis=0)

        rest_of_b0s = b0_loc[1:]
github nipy / dipy / dipy / data / fetcher.py View on Github external
def read_cfin_dwi():
    """Load CFIN multi b-value DWI data.

    Returns
    -------
    img : obj,
        Nifti1Image
    gtab : obj,
        GradientTable

    """
    fraw, fbval, fbvec, _ = get_fnames('cfin_multib')
    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
    gtab = gradient_table(bvals, bvecs)
    img = nib.load(fraw)
    return img, gtab
github nipy / dipy / doc / examples / tracking_introduction_eudx.py View on Github external
"""

# Enables/disables interactive visualization
interactive = False

from dipy.core.gradients import gradient_table
from dipy.data import get_fnames
from dipy.io.gradients import read_bvals_bvecs
from dipy.io.image import load_nifti, load_nifti_data

hardi_fname, hardi_bval, hardi_bvec = get_fnames('stanford_hardi')
label_fname = get_fnames('stanford_labels')

data, affine, hardi_img = load_nifti(hardi_fname, return_img=True)
labels = load_nifti_data(label_fname)
bvals, bvecs = read_bvals_bvecs(hardi_bval, hardi_bvec)
gtab = gradient_table(bvals, bvecs)

"""
This dataset provides a label map in which all white matter tissues are
labeled either 1 or 2. Lets create a white matter mask to restrict tracking to
the white matter.
"""

white_matter = (labels == 1) | (labels == 2)

"""
1. The first thing we need to begin fiber tracking is a way of getting
directions from this diffusion data set. In order to do that, we can fit the
data to a Constant Solid Angle ODF Model. This model will estimate the
Orientation Distribution Function (ODF) at each voxel. The ODF is the
distribution of water diffusion as a function of direction. The peaks of an ODF
github scilus / scilpy / scripts / scil_compute_fodf.py View on Github external
args.peaks = args.peaks or 'peaks.nii.gz'
        args.peak_indices = args.peak_indices or 'peak_indices.nii.gz'

    arglist = [args.fodf, args.peaks, args.peak_indices]
    if args.not_all and not any(arglist):
        parser.error('When using --not_all, you need to specify at least '
                     'one file to output.')

    assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs,
                                 args.frf_file])
    assert_outputs_exist(parser, args, arglist)

    full_frf = np.loadtxt(args.frf_file)
    vol = nib.load(args.input)
    data = vol.dataobj
    bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs)

    if args.mask is None:
        mask = None
    else:
        mask = np.asanyarray(nib.load(args.mask).dataobj).astype(np.bool)

    # Computing fODF
    peaks_csd = compute_fodf(data, bvals, bvecs, full_frf,
                             sh_order=args.sh_order,
                             nbr_processes=args.nbr_processes,
                             mask=mask, sh_basis=args.sh_basis,
                             return_sh=True,
                             force_b0_threshold=args.force_b0_threshold)

    # Saving results
    if args.fodf:
github scilus / scilpy / scripts / scil_compute_sh_from_signal.py View on Github external
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()

    assert_inputs_exist(parser, [args.dwi, args.bvals, args.bvecs])
    assert_outputs_exist(parser, args, args.output)

    vol = nib.load(args.dwi)
    dwi = vol.get_fdata()

    bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs)
    gtab = gradient_table(args.bvals, args.bvecs, b0_threshold=bvals.min())

    mask = None
    if args.mask:
        mask = nib.load(args.mask).get_fdata().astype(np.bool)

    sh = compute_sh_coefficients(dwi, gtab, args.sh_order, args.sh_basis,
                                 args.smooth,
                                 use_attenuation=args.use_attenuation,
                                 mask=mask)

    nib.save(nib.Nifti1Image(sh.astype(np.float32), vol.affine), args.output)
github nidata / nidata / nidata / diffusion / __init__.py View on Github external
folder = pjoin(dipy_home, 'taiwan_ntu_dsi')
    fraw = pjoin(folder, 'DSI203.nii.gz')
    fbval = pjoin(folder, 'DSI203.bval')
    fbvec = pjoin(folder, 'DSI203.bvec')
    md5_dict = {'data': '950408c0980a7154cb188666a885a91f',
                'bval': '602e5cb5fad2e7163e8025011d8a6755',
                'bvec': 'a95eb1be44748c20214dc7aa654f9e6b',
                'license': '7fa1d5e272533e832cc7453eeba23f44'}

    check_md5(fraw, md5_dict['data'])
    check_md5(fbval, md5_dict['bval'])
    check_md5(fbvec, md5_dict['bvec'])
    check_md5(pjoin(folder, 'DSI203_license.txt'), md5_dict['license'])

    from dipy.io.gradients import read_bvals_bvecs
    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
    bvecs[1:] = bvecs[1:] / np.sqrt(np.sum(bvecs[1:] * bvecs[1:], axis=1))[:, None]

    from dipy.core.gradients import gradient_table
    gtab = gradient_table(bvals, bvecs)
    img = nib.load(fraw)
    return img, gtab
github scilus / scilpy / scripts / scil_compute_kurtosis_metrics.py View on Github external
assert_inputs_exist(
        parser, [args.in_dwi, args.in_bval, args.in_bvec], args.mask)
    assert_outputs_exist(parser, args, outputs)

    img = nib.load(args.in_dwi)
    data = img.get_fdata(dtype=np.float32)
    affine = img.affine
    if args.mask is None:
        mask = None
    else:
        mask_img = nib.load(args.mask)
        mask = get_data_as_mask(mask_img, dtype=np.bool)

    # Validate bvals and bvecs
    bvals, bvecs = read_bvals_bvecs(args.in_bval, args.in_bvec)
    if not is_normalized_bvecs(bvecs):
        logging.warning('Your b-vectors do not seem normalized...')
        bvecs = normalize_bvecs(bvecs)

    # Find the volume indices that correspond to the shells to extract.
    tol = args.tolerance
    shells, _ = identify_shells(bvals, tol)
    if not len(shells) >= 3:
        parser.error('Data is not multi-shell. You need at least 2 non-zero' +
                     ' b-values')

    if (shells > 2500).any():
        logging.warning('You seem to be using b > 2500 s/mm2 DWI data. ' +
                        'In theory, this is beyond the optimal range for DKI')

    check_b0_threshold(args, bvals.min())
github nipy / dipy / dipy / data / fetcher.py View on Github external
def read_stanford_hardi():
    """Load Stanford HARDI dataset.

    Returns
    -------
    img : obj,
        Nifti1Image
    gtab : obj,
        GradientTable

    """
    fraw, fbval, fbvec = get_fnames('stanford_hardi')
    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
    gtab = gradient_table(bvals, bvecs)
    img = nib.load(fraw)
    return img, gtab
github nipy / dipy / dipy / data / fetcher.py View on Github external
def read_taiwan_ntu_dsi():
    """Load Taiwan NTU dataset.

    Returns
    -------
    img : obj,
        Nifti1Image
    gtab : obj,
        GradientTable

    """
    fraw, fbval, fbvec = get_fnames('taiwan_ntu_dsi')
    bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
    bvecs[1:] = (bvecs[1:] /
                 np.sqrt(np.sum(bvecs[1:] * bvecs[1:], axis=1))[:, None])
    gtab = gradient_table(bvals, bvecs)
    img = nib.load(fraw)
    return img, gtab
github scilus / scilpy / scripts / scil_extract_b0.py View on Github external
def main():
    parser = _build_arg_parser()
    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.INFO)

    assert_inputs_exist(parser, [args.dwi, args.bvals, args.bvecs])

    # We don't assert the existence of any output here because there
    # are many possible inputs/outputs.

    bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs)
    bvals_min = bvals.min()

    # TODO refactor those checks
    # Should be min bval, then b0.
    if bvals_min < 0 or bvals_min > 20:
        raise ValueError(
            'The minimal b-value is lesser than 0 or greater than 20. This '
            'is highly suspicious. Please check your data to ensure '
            'everything is correct. Value found: {}'.format(bvals_min))

    b0_threshold = args.b0_thr
    if b0_threshold < 0 or b0_threshold > 20:
        raise ValueError('Invalid --b0_thr value (<0 or >20). This is highly '
                         'suspicious. Value found: {}'.format(b0_threshold))

    if not np.isclose(bvals_min, 0.0):