How to use the tedana.utils.unmask function in tedana

To help you get started, we’ve selected a few tedana examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ME-ICA / tedana / tedana / decay.py View on Github external
data_masked, tes, adaptive_mask_masked)
    elif fittype == 'curvefit':
        t2s_limited, s0_limited, t2s_full, s0_full = fit_monoexponential(
            data_masked, tes, adaptive_mask_masked)
    else:
        raise ValueError('Unknown fittype option: {}'.format(fittype))

    t2s_limited[np.isinf(t2s_limited)] = 500.  # why 500?
    # let's get rid of negative values, but keep zeros where limited != full
    t2s_limited[(adaptive_mask_masked > 1) & (t2s_limited <= 0)] = 1.
    s0_limited[np.isnan(s0_limited)] = 0.  # why 0?
    t2s_full[np.isinf(t2s_full)] = 500.  # why 500?
    t2s_full[t2s_full <= 0] = 1.  # let's get rid of negative values!
    s0_full[np.isnan(s0_full)] = 0.  # why 0?

    t2s_limited = utils.unmask(t2s_limited, mask)
    s0_limited = utils.unmask(s0_limited, mask)
    t2s_full = utils.unmask(t2s_full, mask)
    s0_full = utils.unmask(s0_full, mask)

    return t2s_limited, s0_limited, t2s_full, s0_full
github ME-ICA / tedana / tedana / interfaces / tedana.py View on Github external
Reference image to dictate how outputs are saved to disk
    suffix : str, optional
        Appended to name of saved files (before extension). Default: ''

    Returns
    -------
    varexpl : float
        Percent variance of data explained by extracted + retained components
    """

    # mask and de-mean data
    mdata = data[mask]
    dmdata = mdata.T - mdata.T.mean(axis=0)

    # get variance explained by retained components
    betas = get_coeffs(unmask(dmdata.T, mask), mask, mmix)[mask]
    varexpl = (1 - ((dmdata.T - betas.dot(mmix.T))**2.).sum() / (dmdata**2.).sum()) * 100
    lgr.info('++ Variance explained: {:.02f}%'.format(varexpl))

    # create component and de-noised time series and save to files
    hikts = betas[:, acc].dot(mmix.T[acc, :])
    midkts = betas[:, midk].dot(mmix.T[midk, :])
    lowkts = betas[:, rej].dot(mmix.T[rej, :])
    dnts = data[mask] - lowkts - midkts
    if len(acc) != 0:
        filewrite(unmask(hikts, mask), 'hik_ts_{0}'.format(suffix), ref_img)
    if len(midk) != 0:
        filewrite(unmask(midkts, mask), 'midk_ts_{0}'.format(suffix), ref_img)
    if len(rej) != 0:
        filewrite(unmask(lowkts, mask), 'lowk_ts_{0}'.format(suffix), ref_img)
    filewrite(unmask(dnts, mask), 'dn_ts_{0}'.format(suffix), ref_img)
github ME-ICA / tedana / tedana / interfaces / tedana.py View on Github external
# R2 Model
        coeffs_R2 = (B * X2).sum(axis=0) / (X2**2).sum(axis=0)
        SSE_R2 = (B - X2 * np.tile(coeffs_R2, (n_echos, 1)))**2
        SSE_R2 = SSE_R2.sum(axis=0)
        F_R2 = (alpha - SSE_R2) * 2 / (SSE_R2)
        F_R2_maps[:, i] = F_R2

        # compute weights as Z-values
        wtsZ = (WTS[:, i] - WTS[:, i].mean()) / WTS[:, i].std()
        wtsZ[np.abs(wtsZ) > Z_MAX] = (Z_MAX * (np.abs(wtsZ) / wtsZ))[np.abs(wtsZ) > Z_MAX]
        Z_maps[:, i] = wtsZ

        # compute Kappa and Rho
        F_S0[F_S0 > F_MAX] = F_MAX
        F_R2[F_R2 > F_MAX] = F_MAX
        norm_weights = np.abs(np.squeeze(unmask(wtsZ, mask)[t2s != 0]**2.))
        Kappas[i] = np.average(F_R2, weights=norm_weights)
        Rhos[i] = np.average(F_S0, weights=norm_weights)

    # tabulate component values
    comptab_pre = np.vstack([np.arange(n_components), Kappas, Rhos, varex, varex_norm]).T
    if reindex:
        # re-index all components in Kappa order
        comptab = comptab_pre[comptab_pre[:, 1].argsort()[::-1], :]
        Kappas = comptab[:, 1]
        Rhos = comptab[:, 2]
        varex = comptab[:, 3]
        varex_norm = comptab[:, 4]
        nnc = np.array(comptab[:, 0], dtype=np.int)
        mmix_new = mmix[:, nnc]
        F_S0_maps = F_S0_maps[:, nnc]
        F_R2_maps = F_R2_maps[:, nnc]
github ME-ICA / tedana / tedana / io.py View on Github external
fname : :obj:`str`
        Filepath to saved file

    Notes
    -----
    This function writes out a file:

    ======================    =================================================
    Filename                  Content
    ======================    =================================================
    feats_[suffix].nii        Z-normalized spatial component maps.
    ======================    =================================================
    """

    # write feature versions of components
    feats = utils.unmask(computefeats2(data, mmix, mask), mask)
    fname = filewrite(feats, 'feats_{0}'.format(suffix), ref_img)

    return fname
github ME-ICA / tedana / tedana / utils / io.py View on Github external
if len(acc) != 0:
        fout = utils.filewrite(utils.unmask(hikts, mask),
                               'hik_ts_{0}'.format(suffix), ref_img)
        LGR.info('Writing high-Kappa time series: {}'.format(op.abspath(fout)))

    if len(midk) != 0:
        fout = utils.filewrite(utils.unmask(midkts, mask),
                               'midk_ts_{0}'.format(suffix), ref_img)
        LGR.info('Writing mid-Kappa time series: {}'.format(op.abspath(fout)))

    if len(rej) != 0:
        fout = utils.filewrite(utils.unmask(lowkts, mask),
                               'lowk_ts_{0}'.format(suffix), ref_img)
        LGR.info('Writing low-Kappa time series: {}'.format(op.abspath(fout)))

    fout = utils.filewrite(utils.unmask(dnts, mask),
                           'dn_ts_{0}'.format(suffix), ref_img)
    LGR.info('Writing denoised time series: {}'.format(op.abspath(fout)))

    return varexpl
github ME-ICA / tedana / tedana / decay.py View on Github external
log_data = np.log(np.abs(data_2d) + 1)

        # make IV matrix: intercept/TEs x (time series * echos)
        x = np.column_stack([np.ones(echo_num), [-te for te in echo_times[:echo_num]]])
        X = np.repeat(x, n_vols, axis=0)

        # Log-linear fit
        betas = np.linalg.lstsq(X, log_data, rcond=None)[0]
        t2s = 1. / betas[1, :].T
        s0 = np.exp(betas[0, :]).T

        t2s_asc_maps[voxel_idx, i_echo] = t2s
        s0_asc_maps[voxel_idx, i_echo] = s0

    # create limited T2* and S0 maps
    t2s_limited = utils.unmask(t2s_asc_maps[echo_masks], adaptive_mask > 1)
    s0_limited = utils.unmask(s0_asc_maps[echo_masks], adaptive_mask > 1)

    # create full T2* maps with S0 estimation errors
    t2s_full, s0_full = t2s_limited.copy(), s0_limited.copy()
    t2s_full[adaptive_mask == 1] = t2s_asc_maps[adaptive_mask == 1, 0]
    s0_full[adaptive_mask == 1] = s0_asc_maps[adaptive_mask == 1, 0]

    return t2s_limited, s0_limited, t2s_full, s0_full
github ME-ICA / tedana / tedana / workflows / tedana.py View on Github external
if mixm is None:
        # Identify and remove thermal noise from data
        dd, n_components = decomposition.tedpca(catd, data_oc, combmode, mask,
                                                t2s_limited, t2s_full, ref_img,
                                                tes=tes, algorithm=tedpca,
                                                source_tes=source_tes,
                                                kdaw=10., rdaw=1.,
                                                out_dir=out_dir,
                                                verbose=verbose,
                                                low_mem=low_mem)
        mmix_orig = decomposition.tedica(dd, n_components, fixed_seed,
                                         maxit, maxrestart)

        if verbose and (source_tes == -1):
            io.filewrite(utils.unmask(dd, mask),
                         op.join(out_dir, 'ts_OC_whitened.nii'), ref_img)

        LGR.info('Making second component selection guess from ICA results')
        # Estimate betas and compute selection metrics for mixing matrix
        # generated from dimensionally reduced data using full data (i.e., data
        # with thermal noise)
        comptable, metric_maps, betas, mmix = metrics.dependence_metrics(
                    catd, data_oc, mmix_orig, t2s_limited, tes,
                    ref_img, reindex=True, label='meica_', out_dir=out_dir,
                    algorithm='kundu_v2', verbose=verbose)
        comp_names = [io.add_decomp_prefix(comp, prefix='ica', max_value=comptable.index.max())
                      for comp in comptable.index.values]
        mixing_df = pd.DataFrame(data=mmix, columns=comp_names)
        mixing_df.to_csv('ica_mixing.tsv', sep='\t', index=False)
        betas_oc = utils.unmask(computefeats2(data_oc, mmix, mask), mask)
        io.filewrite(betas_oc,
github ME-ICA / tedana / tedana / interfaces / tedana.py View on Github external
sol = np.linalg.lstsq(np.atleast_2d(glbase), dat.T)[0]
    tsoc_nogs = dat - np.dot(np.atleast_2d(sol[dtrank]).T,
                             np.atleast_2d(glbase.T[dtrank])) + Gmu[Gmask][:, np.newaxis]

    filewrite(optcom, 'tsoc_orig', ref_img)
    dm_optcom = unmask(tsoc_nogs, Gmask)
    filewrite(dm_optcom, 'tsoc_nogs', ref_img)

    # Project glbase out of each echo
    dm_catd = catd.copy()  # don't overwrite catd
    for echo in range(n_echos):
        dat = dm_catd[:, echo, :][Gmask]
        sol = np.linalg.lstsq(np.atleast_2d(glbase), dat.T)[0]
        e_nogs = dat - np.dot(np.atleast_2d(sol[dtrank]).T,
                              np.atleast_2d(glbase.T[dtrank]))
        dm_catd[:, echo, :] = unmask(e_nogs, Gmask)

    return dm_catd, dm_optcom
github ME-ICA / tedana / tedana / interfaces / tedana.py View on Github external
is components and `T` is the same as in `data`
    mask : (S,) array_like
        Boolean mask array
    ref_img : str or img_like
        Reference image to dictate how outputs are saved to disk
    suffix : str, optional
        Appended to name of saved files (before extension). Default: ''

    Returns
    -------
    fname : str
        Filepath to saved file
    """

    # write feature versions of components
    feats = unmask(computefeats2(data, mmix, mask), mask)
    fname = filewrite(feats, 'feats_{0}'.format(suffix), ref_img)

    return fname