Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
t2s_limited[t2s_limited > cap_t2s * 10] = cap_t2s
LGR.info('Computing optimal combination')
# optimally combine data
OCcatd = combine.make_optcom(catd, tes, mask, t2s=t2s_full,
combmode=combmode)
# clean up numerical errors
for arr in (OCcatd, s0_limited, t2s_limited):
np.nan_to_num(arr, copy=False)
s0_limited[s0_limited < 0] = 0
t2s_limited[t2s_limited < 0] = 0
io.filewrite(t2s_limited, op.join(out_dir, 't2sv.nii'), ref_img)
io.filewrite(s0_limited, op.join(out_dir, 's0v.nii'), ref_img)
io.filewrite(t2s_full, op.join(out_dir, 't2svG.nii'), ref_img)
io.filewrite(s0_full, op.join(out_dir, 's0vG.nii'), ref_img)
io.filewrite(OCcatd, op.join(out_dir, 'ts_OC.nii'), ref_img)
betas_oc = utils.unmask(computefeats2(data_oc, mmix, mask), mask)
io.filewrite(betas_oc,
op.join(out_dir, 'ica_components.nii.gz'),
ref_img)
comptable = metrics.kundu_metrics(comptable, metric_maps)
comptable = selection.kundu_selection_v2(comptable, n_echos, n_vols)
else:
LGR.info('Using supplied mixing matrix from ICA')
mmix_orig = pd.read_table(op.join(out_dir, 'ica_mixing.tsv')).values
comptable, metric_maps, betas, mmix = metrics.dependence_metrics(
catd, data_oc, mmix_orig, t2s_limited, tes,
ref_img, label='meica_', out_dir=out_dir,
algorithm='kundu_v2', verbose=verbose)
betas_oc = utils.unmask(computefeats2(data_oc, mmix, mask), mask)
io.filewrite(betas_oc,
op.join(out_dir, 'ica_components.nii.gz'),
ref_img)
if ctab is None:
comptable = metrics.kundu_metrics(comptable, metric_maps)
comptable = selection.kundu_selection_v2(comptable, n_echos, n_vols)
else:
comptable = pd.read_csv(ctab, sep='\t', index_col='component')
comptable = selection.manual_selection(comptable, acc=manacc)
# Save decomposition
data_type = 'optimally combined data' if source_tes == -1 else 'z-concatenated data'
comptable['Description'] = 'ICA fit to dimensionally reduced {0}.'.format(data_type)
mmix_dict = {}
mmix_dict['Method'] = ('Independent components analysis with FastICA '
'algorithm implemented by sklearn. Components '
# find time course ofc the spatial global signal
# make basis with the Legendre basis
glsig = np.linalg.lstsq(np.atleast_2d(sphis).T, dat, rcond=None)[0]
glsig = stats.zscore(glsig, axis=None)
np.savetxt('glsig.1D', glsig)
glbase = np.hstack([Lmix, glsig.T])
# Project global signal out of optimally combined data
sol = np.linalg.lstsq(np.atleast_2d(glbase), dat.T, rcond=None)[0]
tsoc_nogs = dat - np.dot(np.atleast_2d(sol[dtrank]).T,
np.atleast_2d(glbase.T[dtrank])) + Gmu[Gmask][:, np.newaxis]
io.filewrite(optcom, 'tsoc_orig', ref_img)
dm_optcom = utils.unmask(tsoc_nogs, Gmask)
io.filewrite(dm_optcom, 'tsoc_nogs', ref_img)
# Project glbase out of each echo
dm_catd = catd.copy() # don't overwrite catd
for echo in range(n_echos):
dat = dm_catd[:, echo, :][Gmask]
sol = np.linalg.lstsq(np.atleast_2d(glbase), dat.T, rcond=None)[0]
e_nogs = dat - np.dot(np.atleast_2d(sol[dtrank]).T,
np.atleast_2d(glbase.T[dtrank]))
dm_catd[:, echo, :] = utils.unmask(e_nogs, Gmask)
return dm_catd, dm_optcom
sphis -= sphis.mean()
io.filewrite(utils.unmask(sphis, Gmask), 'T1gs', ref_img)
# find time course ofc the spatial global signal
# make basis with the Legendre basis
glsig = np.linalg.lstsq(np.atleast_2d(sphis).T, dat, rcond=None)[0]
glsig = stats.zscore(glsig, axis=None)
np.savetxt('glsig.1D', glsig)
glbase = np.hstack([Lmix, glsig.T])
# Project global signal out of optimally combined data
sol = np.linalg.lstsq(np.atleast_2d(glbase), dat.T, rcond=None)[0]
tsoc_nogs = dat - np.dot(np.atleast_2d(sol[dtrank]).T,
np.atleast_2d(glbase.T[dtrank])) + Gmu[Gmask][:, np.newaxis]
io.filewrite(optcom, 'tsoc_orig', ref_img)
dm_optcom = utils.unmask(tsoc_nogs, Gmask)
io.filewrite(dm_optcom, 'tsoc_nogs', ref_img)
# Project glbase out of each echo
dm_catd = catd.copy() # don't overwrite catd
for echo in range(n_echos):
dat = dm_catd[:, echo, :][Gmask]
sol = np.linalg.lstsq(np.atleast_2d(glbase), dat.T, rcond=None)[0]
e_nogs = dat - np.dot(np.atleast_2d(sol[dtrank]).T,
np.atleast_2d(glbase.T[dtrank]))
dm_catd[:, echo, :] = utils.unmask(e_nogs, Gmask)
return dm_catd, dm_optcom
LGR.info('Computing T2* map')
t2s_limited, s0_limited, t2s_full, s0_full = decay.fit_decay(
catd, tes, mask, masksum, fittype)
# set a hard cap for the T2* map
# anything that is 10x higher than the 99.5 %ile will be reset to 99.5 %ile
cap_t2s = stats.scoreatpercentile(t2s_limited.flatten(), 99.5,
interpolation_method='lower')
LGR.debug('Setting cap on T2* map at {:.5f}'.format(cap_t2s * 10))
t2s_limited[t2s_limited > cap_t2s * 10] = cap_t2s
io.filewrite(t2s_limited, op.join(out_dir, 't2sv.nii'), ref_img)
io.filewrite(s0_limited, op.join(out_dir, 's0v.nii'), ref_img)
if verbose:
io.filewrite(t2s_full, op.join(out_dir, 't2svG.nii'), ref_img)
io.filewrite(s0_full, op.join(out_dir, 's0vG.nii'), ref_img)
# optimally combine data
data_oc = combine.make_optcom(catd, tes, mask, t2s=t2s_full, combmode=combmode)
# regress out global signal unless explicitly not desired
if 'gsr' in gscontrol:
catd, data_oc = gsc.gscontrol_raw(catd, data_oc, n_echos, ref_img)
if mixm is None:
# Identify and remove thermal noise from data
dd, n_components = decomposition.tedpca(catd, data_oc, combmode, mask,
t2s_limited, t2s_full, ref_img,
tes=tes, algorithm=tedpca,
source_tes=source_tes,
kdaw=10., rdaw=1.,
out_dir=out_dir,
LGR.info('Computing optimal combination')
# optimally combine data
OCcatd = combine.make_optcom(catd, tes, mask, t2s=t2s_full,
combmode=combmode)
# clean up numerical errors
for arr in (OCcatd, s0_limited, t2s_limited):
np.nan_to_num(arr, copy=False)
s0_limited[s0_limited < 0] = 0
t2s_limited[t2s_limited < 0] = 0
io.filewrite(t2s_limited, op.join(out_dir, 't2sv.nii'), ref_img)
io.filewrite(s0_limited, op.join(out_dir, 's0v.nii'), ref_img)
io.filewrite(t2s_full, op.join(out_dir, 't2svG.nii'), ref_img)
io.filewrite(s0_full, op.join(out_dir, 's0vG.nii'), ref_img)
io.filewrite(OCcatd, op.join(out_dir, 'ts_OC.nii'), ref_img)
LGR.warning('Could not save PCA solution')
else: # if loading existing state
voxel_comp_weights = pcastate['voxel_comp_weights']
varex = pcastate['varex']
comp_ts = pcastate['comp_ts']
comptable = pcastate['comptable']
np.savetxt('mepca_mix.1D', comp_ts.T)
# write component maps to 4D image
comp_maps = np.zeros((OCcatd.shape[0], comp_ts.shape[0]))
for i_comp in range(comp_ts.shape[0]):
temp_comp_ts = comp_ts[i_comp, :][:, None]
comp_map = utils.unmask(model.computefeats2(OCcatd, temp_comp_ts, mask), mask)
comp_maps[:, i_comp] = np.squeeze(comp_map)
io.filewrite(comp_maps, 'mepca_OC_components.nii', ref_img)
# Add new columns to comptable for classification
comptable['classification'] = 'accepted'
comptable['rationale'] = ''
# Select components using decision tree
if method == 'kundu':
comptable = kundu_tedpca(comptable, n_echos, kdaw, rdaw, stabilize=False)
elif method == 'kundu-stabilize':
comptable = kundu_tedpca(comptable, n_echos, kdaw, rdaw, stabilize=True)
elif method == 'mle':
LGR.info('Selected {0} components with MLE dimensionality '
'detection'.format(comptable.shape[0]))
comptable['rationale'] = comptable['rationale'].str.rstrip(';')
comptable.to_csv('comp_table_pca.txt', sep='\t', index=True,