Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
start_time = time.time()
[WB_coords, _, _] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
print("%s%s%s" % ('get_names_and_coords_of_parcels (Masking whole-brain version) --> finished: ',
str(np.round(time.time() - start_time, 1)), 's'))
WB_labels = np.arange(len(WB_coords) + 1)[np.arange(len(WB_coords) + 1) != 0].tolist()
start_time = time.time()
WB_parcel_list = nodemaker.gen_img_list(parlistfile)
[_, _, WB_parcel_list_masked] = nodemaker.parcel_masker(roi, WB_coords, WB_parcel_list, WB_labels, dir_path,
ID, perc_overlap)
print("%s%s%s" % ('parcel_masker (Masking whole-brain version) --> finished: ',
np.round(time.time() - start_time, 1), 's'))
start_time = time.time()
[WB_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(WB_parcel_list_masked)
print("%s%s%s" % ('create_parcel_atlas (Masking whole-brain version) --> finished: ',
np.round(time.time() - start_time, 1), 's'))
start_time = time.time()
[WB_net_parcels_map_nifti_unmasked, WB_coords_unmasked, _,
WB_atlas, WB_uatlas, dir_path] = nodemaker.node_gen(WB_coords, WB_parcel_list, WB_labels,
dir_path, ID, parc, atlas, parlistfile)
print("%s%s%s" % ('node_gen (Masking whole-brain version) --> finished: ',
np.round(time.time() - start_time, 1), 's'))
start_time = time.time()
[WB_net_parcels_map_nifti_masked, WB_coords_masked, WB_labels_masked,
WB_atlas, WB_uatlas, dir_path] = nodemaker.node_gen_masking(roi, WB_coords, WB_parcel_list,
WB_labels, dir_path, ID, parc,
atlas, parlistfile)
parcel_list = nodemaker.gen_img_list(parlistfile)
[net_coords, net_parcel_list, net_labels, network] = nodemaker.get_node_membership(network, func_file, coords,
labels, parc,
parcel_list)
print("%s%s%s" % ('get_node_membership --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
start_time = time.time()
[net_coords_masked, net_labels_masked, net_parcel_list_masked] = nodemaker.parcel_masker(roi, net_coords,
net_parcel_list,
net_labels,
dir_path, ID,
perc_overlap)
print("%s%s%s" % ('parcel_masker --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
start_time = time.time()
[net_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(net_parcel_list_masked)
print("%s%s%s" % ('create_parcel_atlas --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
start_time = time.time()
out_path = nodemaker.gen_network_parcels(parlistfile, network, net_labels_masked, dir_path)
print("%s%s%s" % ('gen_network_parcels --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
assert coords is not None
assert net_coords is not None
assert net_labels is not None
assert net_parcel_list is not None
assert net_coords_masked is not None
assert net_labels_masked is not None
assert net_parcel_list_masked is not None
assert out_path is not None
assert net_parcels_map_nifti is not None
assert parcel_list_exp is not None
print("%s%s%s" % ('get_names_and_coords_of_parcels --> finished: ',
str(np.round(time.time() - start_time, 1)), 's'))
labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist()
start_time = time.time()
parcel_list = nodemaker.gen_img_list(parlistfile)
[net_coords, net_parcel_list, net_labels, network] = nodemaker.get_node_membership(network, func_file, coords,
labels, parc,
parcel_list)
print("%s%s%s" % ('get_node_membership --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
start_time = time.time()
[net_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(net_parcel_list)
print("%s%s%s" % ('create_parcel_atlas --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
start_time = time.time()
out_path = nodemaker.gen_network_parcels(parlistfile, network, net_labels, dir_path)
print("%s%s%s" % ('gen_network_parcels --> finished: ', str(np.round(time.time() - start_time, 1)), 's'))
assert coords is not None
assert net_coords is not None
assert net_labels is not None
assert net_parcel_list is not None
assert out_path is not None
assert net_parcels_map_nifti is not None
assert parcel_list_exp is not None
assert network is not None
"""
# Set example inputs
base_dir = str(Path(__file__).parent/"examples")
parlistfile = base_dir + '/whole_brain_cluster_labels_PCA200.nii.gz'
start_time = time.time()
[WB_coords, _, _] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
print("%s%s%s" % ('get_names_and_coords_of_parcels (User-atlas whole-brain version) --> finished: ',
str(np.round(time.time() - start_time, 1)), 's'))
WB_labels = np.arange(len(WB_coords) + 1)[np.arange(len(WB_coords) + 1) != 0].tolist()
start_time = time.time()
WB_parcel_list = nodemaker.gen_img_list(parlistfile)
[WB_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(WB_parcel_list)
print("%s%s%s" % ('create_parcel_atlas (User-atlas whole-brain version) --> finished: ',
str(np.round(time.time() - start_time, 1)), 's'))
assert WB_coords is not None
assert WB_labels is not None
assert WB_parcel_list is not None
assert WB_parcels_map_nifti is not None
assert parcel_list_exp is not None
labels_adj = list(labels)
coords_adj = list(tuple(x) for x in coords)
parcel_list_adj = parcel_list
try:
for ix in sorted(indices, reverse=True):
print("%s%s%s%s" % ('Removing: ', labels_adj[ix], ' at ', coords_adj[ix]))
labels_adj.pop(ix)
coords_adj.pop(ix)
parcel_list_adj.pop(ix)
except RuntimeError:
print('ERROR: Restrictive masking. No parcels remain after masking with brain mask/roi...')
# Create a resampled 3D atlas that can be viewed alongside mask img for QA
resampled_parcels_nii_path = "%s%s%s%s%s%s" % (dir_path, '/', ID, '_parcels_resampled2roimask_',
op.basename(roi).split('.')[0], '.nii.gz')
resampled_parcels_atlas, _ = nodemaker.create_parcel_atlas(parcel_list_adj)
resampled_parcels_map_nifti = resample_img(resampled_parcels_atlas, target_affine=mask_img.affine,
target_shape=mask_data.shape)
nib.save(resampled_parcels_map_nifti, resampled_parcels_nii_path)
mask_img.uncache()
resampled_parcels_map_nifti.uncache()
if not coords_adj:
raise ValueError('\nERROR: ROI mask was likely too restrictive and yielded < 2 remaining parcels')
return coords_adj, labels_adj, parcel_list_adj
dir_path : str
Path to directory containing subject derivative data for given run.
"""
from pynets.core import nodemaker
import os.path as op
try:
import cPickle as pickle
except ImportError:
import _pickle as pickle
# Mask Parcels
if parc is True:
# For parcel masking, specify overlap thresh and error cushion in mm voxels
[coords, labels, parcel_list_masked] = nodemaker.parcel_masker(roi, coords, parcel_list, labels,
dir_path, ID, perc_overlap)
[net_parcels_map_nifti, _] = nodemaker.create_parcel_atlas(parcel_list_masked)
# Mask Coordinates
else:
[coords, labels] = nodemaker.coords_masker(roi, coords, labels, error)
# Save coords to pickle
coords_path = "%s%s%s%s" % (dir_path, '/atlas_coords_', op.basename(roi).split('.')[0], '.pkl')
with open(coords_path, 'wb') as f:
pickle.dump(coords, f, protocol=2)
net_parcels_map_nifti = None
# Save labels to pickle
labels_path = "%s%s%s%s" % (dir_path, '/atlas_labelnames_', op.basename(roi).split('.')[0], '.pkl')
with open(labels_path, 'wb') as f:
pickle.dump(labels, f, protocol=2)
return net_parcels_map_nifti, coords, labels, atlas, uatlas, dir_path