How to use the pynets.core.thresholding.normalize function in pynets

To help you get started, we’ve selected a few pynets examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dPys / PyNets / tests / test_thresholding.py View on Github external
def test_normalize(x, thr, cp):
        x = thresholding.threshold_proportional(x, 1, copy=True) # remove diagonal
        s = thresholding.normalize(x)
        assert np.max(s) <= 1 and np.min(s) >= 0
        assert np.max(s) == 1 and np.min(s) == round(min(x.flatten())/max(x.flatten()), 1)
github dPys / PyNets / pynets / stats / netstats.py View on Github external
type of hierarchical clustering. 'single' for single-linkage,
        'complete' for complete-linkage. Default value='single'

    Returns
    -------
    M : CxN np.ndarray
        nodal community affiliation matrix.

    References
    ----------
    Adapted from bctpy
    '''
    from pynets.core.thresholding import normalize

    n = len(W)
    W = normalize(W)

    if type_clustering not in ('single', 'complete'):
        print('Error: Unrecognized clustering type')

    # Set diagonal to mean weights
    np.fill_diagonal(W, 0)
    W[range(n), range(n)] = (np.sum(W, axis=0) / np.sum(np.logical_not(W), axis=0) + np.sum(W.T, axis=0) /
                             np.sum(np.logical_not(W.T), axis=0)) / 2

    # Out/in norm squared
    No = np.sum(W ** 2, axis=1)
    Ni = np.sum(W ** 2, axis=0)

    # Weighted in/out jaccard
    Jo = np.zeros((n, n))
    Ji = np.zeros((n, n))
github dPys / PyNets / pynets / stats / netstats.py View on Github external
type of hierarchical clustering. 'single' for single-linkage,
        'complete' for complete-linkage. Default value='single'

    Returns
    -------
    M : CxN np.ndarray
        nodal community affiliation matrix.

    References
    ----------
    Adapted from bctpy
    '''
    from pynets.core.thresholding import normalize

    n = len(W)
    W = normalize(W)

    if type_clustering not in ('single', 'complete'):
        print('Error: Unrecognized clustering type')

    # Set diagonal to mean weights
    np.fill_diagonal(W, 0)
    W[range(n), range(n)] = (np.sum(W, axis=0) / np.sum(np.logical_not(W), axis=0) + np.sum(W.T, axis=0) /
                             np.sum(np.logical_not(W.T), axis=0)) / 2

    # Out/in norm squared
    No = np.sum(W ** 2, axis=1)
    Ni = np.sum(W ** 2, axis=0)

    # Weighted in/out jaccard
    Jo = np.zeros((n, n))
    Ji = np.zeros((n, n))
github dPys / PyNets / pynets / stats / netstats.py View on Github external
def normalize_graph(self):

        # Get hyperbolic tangent (i.e. fischer r-to-z transform) of matrix if non-covariance
        if (self.conn_model == 'corr') or (self.conn_model == 'partcorr'):
            self.in_mat = np.arctanh(self.in_mat)

        # Normalize connectivity matrix
        if self.norm == 3 or self.norm == 4 or self.norm == 5:
            from graspy.utils import pass_to_ranks

        # By maximum edge weight
        if self.norm == 1:
            self.in_mat = thresholding.normalize(self.in_mat)
        # Apply log10
        elif self.norm == 2:
            self.in_mat = np.log10(self.in_mat)
        # Apply PTR simple-nonzero
        elif self.norm == 3:
            self.in_mat = pass_to_ranks(self.in_mat, method="simple-nonzero")
        # Apply PTR simple-all
        elif self.norm == 4:
            self.in_mat = pass_to_ranks(self.in_mat, method="simple-all")
        # Apply PTR zero-boost
        elif self.norm == 5:
            self.in_mat = pass_to_ranks(self.in_mat, method="zero-boost")
        # Apply standardization [0, 1]
        elif self.norm == 6:
            self.in_mat = thresholding.standardize(self.in_mat)
        else:
github dPys / PyNets / pynets / stats / netstats.py View on Github external
def normalize_graph(self):

        # Get hyperbolic tangent (i.e. fischer r-to-z transform) of matrix if non-covariance
        if (self.conn_model == 'corr') or (self.conn_model == 'partcorr'):
            self.in_mat = np.arctanh(self.in_mat)

        # Normalize connectivity matrix
        if self.norm == 3 or self.norm == 4 or self.norm == 5:
            from graspy.utils import pass_to_ranks

        # By maximum edge weight
        if self.norm == 1:
            self.in_mat = thresholding.normalize(self.in_mat)
        # Apply log10
        elif self.norm == 2:
            self.in_mat = np.log10(self.in_mat)
        # Apply PTR simple-nonzero
        elif self.norm == 3:
            self.in_mat = pass_to_ranks(self.in_mat, method="simple-nonzero")
        # Apply PTR simple-all
        elif self.norm == 4:
            self.in_mat = pass_to_ranks(self.in_mat, method="simple-all")
        # Apply PTR zero-boost
        elif self.norm == 5:
            self.in_mat = pass_to_ranks(self.in_mat, method="zero-boost")
        # Apply standardization [0, 1]
        elif self.norm == 6:
            self.in_mat = thresholding.standardize(self.in_mat)
        else:
github dPys / PyNets / pynets / plotting / plot_gen.py View on Github external
from nipype.utils.filemanip import save_json

    # Advanced Settings
    comm = 'nodes'
    pruned = False
    #color_scheme = 'interpolateCool'
    #color_scheme = 'interpolateGnBu'
    #color_scheme = 'interpolateOrRd'
    #color_scheme = 'interpolatePuRd'
    #color_scheme = 'interpolateYlOrRd'
    #color_scheme = 'interpolateReds'
    #color_scheme = 'interpolateGreens'
    color_scheme = 'interpolateBlues'
    # Advanced Settings

    conn_matrix = normalize(conn_matrix)
    G = nx.from_numpy_matrix(np.abs(conn_matrix))
    if pruned is True:
        [G, pruned_nodes] = most_important(G)
        conn_matrix = nx.to_numpy_array(G)

        pruned_nodes.sort(reverse=True)
        for j in pruned_nodes:
            del labels[labels.index(labels[j])]

    # def _doClust(X, clust_levels):
    #     """
    #     Create Ward cluster linkages.
    #     """
    #     # get the linkage diagram
    #     Z = linkage(X, 'ward')
    #     # choose # cluster levels