Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def create_clean_mask(self, num_std_dev=1.5):
"""
Create a subject-refined version of the clustering mask.
"""
import os
from pynets.core import utils
from nilearn.masking import intersect_masks
from nilearn.image import index_img, math_img, resample_img
mask_name = os.path.basename(self.clust_mask).split('.nii')[0]
self.atlas = "%s%s%s%s%s" % (mask_name, '_', self.clust_type, '_k', str(self.k))
print("%s%s%s%s%s%s%s" % ('\nCreating atlas using ', self.clust_type, ' at cluster level ', str(self.k),
' for ', str(self.atlas), '...\n'))
self._dir_path = utils.do_dir_path(self.atlas, self.func_file)
self.uatlas = "%s%s%s%s%s%s%s%s" % (self._dir_path, '/', mask_name, '_clust-', self.clust_type, '_k',
str(self.k), '.nii.gz')
# Load clustering mask
self._func_img.set_data_dtype(np.float32)
func_vol_img = index_img(self._func_img, 1)
func_vol_img.set_data_dtype(np.uint16)
clust_mask_res_img = resample_img(nib.load(self.clust_mask), target_affine=func_vol_img.affine,
target_shape=func_vol_img.shape, interpolation='nearest')
clust_mask_res_img.set_data_dtype(np.uint16)
func_data = np.asarray(func_vol_img.dataobj).astype('float32')
func_int_thr = np.round(np.mean(func_data[func_data > 0]) - np.std(func_data[func_data > 0]) * num_std_dev, 3)
if self.mask is not None:
self._mask_img = nib.load(self.mask)
self._mask_img.set_data_dtype(np.uint16)
mask_res_img = resample_img(self._mask_img, target_affine=func_vol_img.affine,
print("Label reference file not found. Attempting AAL naming...")
if use_AAL_naming is True:
try:
labels = nodemaker.AAL_naming(coords)
except:
print('AAL reference labeling failed!')
labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist()
else:
print('Using generic numbering labels...')
labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist()
else:
print('WARNING: No labels available since atlas name is not specified!')
print("%s%s" % ('Labels:\n', labels))
atlas_name = atlas
dir_path = utils.do_dir_path(atlas, in_file)
if len(coords) != len(labels):
labels = len(coords) * [np.nan]
if len(coords) != len(labels):
raise ValueError('ERROR: length of coordinates is not equal to length of label names')
return labels, coords, atlas_name, networks_list, parcel_list, par_max, uatlas, dir_path
parcel_list : list
List of 3D boolean numpy arrays or binarized Nifti1Images corresponding to ROI masks.
par_max : int
The maximum label intensity in the parcellation image.
uatlas : str
File path to atlas parcellation Nifti1Image in MNI template space.
dir_path : str
Path to directory containing subject derivative data for given run.
"""
from pynets.core import utils, nodemaker
import pandas as pd
import time
from pathlib import Path
import os.path as op
base_path = utils.get_file()
# Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or networks.
nilearn_parc_atlases = ['atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus',
'atlas_talairach_ba', 'atlas_talairach_lobe']
nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010']
nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017']
if uatlas is None and atlas in nilearn_parc_atlases:
[labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc)
if uatlas:
if not isinstance(uatlas, str):
nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz'))
uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz')
[coords, _, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas)
if parc is True:
parcel_list = nodemaker.gen_img_list(uatlas)
else:
parcel_list = None
if dens_thresh is False:
thr_type = 'prop'
edge_threshold = "%s%s" % (str(np.abs(thr_perc)), '%')
print("%s%.2f%s" % ('\nThresholding proportionally at: ', thr_perc, '% ...\n'))
conn_matrix_thr = thresholding.threshold_proportional(conn_matrix, float(thr))
else:
thr_type = 'dens'
edge_threshold = None
print("%s%.2f%s" % ('\nThresholding to achieve density of: ', thr_perc, '% ...\n'))
conn_matrix_thr = thresholding.density_thresholding(conn_matrix, float(thr))
if not nx.is_connected(nx.from_numpy_matrix(conn_matrix_thr)):
print('Warning: Fragmented graph')
# Save thresholded mat
est_path = utils.create_est_path_func(ID, network, conn_model, thr, roi, dir_path, node_size, smooth, c_boot,
thr_type, hpass, parc)
utils.save_mat(conn_matrix_thr, est_path)
gc.collect()
return conn_matrix_thr, edge_threshold, est_path, thr, node_size, network, conn_model, roi, smooth, prune, ID, dir_path, atlas, uatlas, labels, coords, c_boot, norm, binary, hpass
print("%s%s%s%s" % (Fore.GREEN, 'Direction-getting type: ', Fore.BLUE, 'Bootstrapped'))
elif directget == 'closest':
print("%s%s%s%s" % (Fore.GREEN, 'Direction-getting type: ', Fore.BLUE, 'Closest Peak'))
elif directget == 'det':
print("%s%s%s%s" % (Fore.GREEN, 'Direction-getting type: ', Fore.BLUE, 'Deterministic Maximum'))
print(Style.RESET_ALL)
# Commence Ensemble Tractography
streamlines = track_ensemble(dwi_data, target_samples, atlas_data_wm_gm_int, parcels, mod_fit,
prep_tissues(B0_mask, gm_in_dwi, vent_csf_in_dwi, wm_in_dwi, tiss_class),
get_sphere(sphere), directget, curv_thr_list, step_list, track_type,
maxcrossing, max_length, roi_neighborhood_tol, min_length, waymask)
print('Tracking Complete')
# Create streamline density map
[streams, dir_path, dm_path] = create_density_map(dwi_img, utils.do_dir_path(atlas, dwi_file), streamlines,
conn_model, target_samples, node_size, curv_thr_list, step_list,
network, roi, directget, max_length)
del streamlines, dwi_data, atlas_data_wm_gm_int, atlas_data, mod_fit, parcels
dwi_img.uncache()
gc.collect()
return streams, track_type, target_samples, conn_model, dir_path, network, node_size, dens_thresh, ID, roi, min_span_tree, disp_filt, parc, prune, atlas, uatlas, labels, coords, norm, binary, atlas_mni, curv_thr_list, step_list, fa_path, dm_path, directget, labels_im_file, roi_neighborhood_tol, max_length
from dipy.io.stateful_tractogram import Space, Origin
import time
# Load parcellation
roi_img = nib.load(atlas_mni)
atlas_data = np.around(roi_img.get_fdata())
roi_zooms = roi_img.header.get_zooms()
roi_shape = roi_img.shape
# Read Streamlines
streamlines = Streamlines(load_tractogram(streams, roi_img, to_space=Space.RASMM, to_origin=Origin.TRACKVIS,
bbox_valid_check=False).streamlines)
roi_img.uncache()
fa_weights = values_from_volume(nib.load(warped_fa).get_fdata(), streamlines, np.eye(4))
global_fa_weights = list(utils.flatten(fa_weights))
min_global_fa_wei = min(global_fa_weights)
max_global_fa_wei = max(global_fa_weights)
fa_weights_norm = []
for val_list in fa_weights:
fa_weights_norm.append((val_list - min_global_fa_wei) / (max_global_fa_wei - min_global_fa_wei))
# Instantiate empty networkX graph object & dictionary and create voxel-affine mapping
lin_T, offset = _mapping_to_voxel(np.eye(4))
mx = len(np.unique(atlas_data.astype('uint16'))) - 1
g = nx.Graph(ecount=0, vcount=mx)
edge_dict = defaultdict(int)
node_dict = dict(zip(np.unique(atlas_data.astype('uint16')) + 1, np.arange(mx) + 1))
# Add empty vertices
for node in range(1, mx + 1):
g.add_node(node)