How to use the tensorly.backend function in tensorly

To help you get started, we’ve selected a few tensorly examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tensorly / tensorly / tensorly / base.py View on Github external
def vec_to_tensor(vec, shape):
    """Folds a vectorised tensor back into a tensor of shape `shape`
    
    Parameters
    ----------
    vec : 1D-array
        vectorised tensor of shape ``(i_1 * i_2 * ... * i_n)``
    shape : tuple
        shape of the ful tensor
    
    Returns
    -------
    ndarray
        tensor of shape `shape` = ``(i_1, ..., i_n)``
    """
    return T.reshape(vec, shape)
github tensorly / tensorly / tensorly / kruskal_tensor.py View on Github external
def __init__(self, kruskal_tensor):
        super().__init__()

        shape, rank = _validate_kruskal_tensor(kruskal_tensor)
        weights, factors = kruskal_tensor

        # Should we allow None weights?
        if weights is None:
            weights = T.ones(rank, **T.context(factors[0]))

        self.shape = shape
        self.rank = rank
        self.factors = factors
        self.weights = weights
github tensorly / tensorly / tensorly / tenalg / _khatri_rao.py View on Github external
----------
    .. [1] T.G.Kolda and B.W.Bader, "Tensor Decompositions and Applications",
       SIAM REVIEW, vol. 51, n. 3, pp. 455-500, 2009.
    """
    if skip_matrix is not None:
        matrices = [matrices[i] for i in range(len(matrices)) if i != skip_matrix]

    # Khatri-rao of only one matrix: just return that matrix
    if len(matrices) == 1:
        return matrices[0]

    n_columns = matrices[0].shape[1]

    # Optional part, testing whether the matrices have the proper size
    for i, matrix in enumerate(matrices):
        if T.ndim(matrix) != 2:
            raise ValueError('All the matrices must have exactly 2 dimensions!'
                             'Matrix {} has dimension {} != 2.'.format(
                                 i, T.ndim(matrix)))
        if matrix.shape[1] != n_columns:
            raise ValueError('All matrices must have same number of columns!'
                             'Matrix {} has {} columns != {}.'.format(
                                 i, matrix.shape[1], n_columns))

    if reverse:
        matrices = matrices[::-1]
        # Note: we do NOT use .reverse() which would reverse matrices even outside this function
        
    return T.kr(matrices, weights=weights, mask=mask)
github tensorly / tensorly / tensorly / regression / tucker_regression.py View on Github external
Parameters
        ----------
        X : ndarray of shape (n_samples, N1, ..., NS)
            tensor data
        y : array of shape (n_samples)
            labels associated with each sample

        Returns
        -------
        self
        """
        rng = check_random_state(self.random_state)

        # Initialise randomly the weights
        G = T.tensor(rng.randn(*self.weight_ranks), **T.context(X))
        W = []
        for i in range(1, T.ndim(X)):  # First dimension of X = number of samples
            W.append(T.tensor(rng.randn(X.shape[i], G.shape[i - 1]), **T.context(X)))

        # Norm of the weight tensor at each iteration
        norm_W = []

        for iteration in range(self.n_iter_max):

            # Optimise modes of W
            for i in range(len(W)):
                phi = partial_tensor_to_vec(
                            T.dot(partial_unfold(X, i),
                                  T.dot(kronecker(W, skip_matrix=i),
                                          T.transpose(unfold(G, i)))))
                # Regress phi on y: we could call a package here, e.g. scikit-learn
github tensorly / tensorly / tensorly / random / base.py View on Github external
rns = check_random_state(random_state)

    if isinstance(rank, int):
        rank = [rank for _ in shape]

    if orthogonal:
        for i, (s, r) in enumerate(zip(shape, rank)):
            if r > s:
                warnings.warn('Selected orthogonal=True, but selected a rank larger than the tensor size for mode {0}: '
                             'rank[{0}]={1} > shape[{0}]={2}.'.format(i, r, s))

    factors = []
    for (s, r) in zip(shape, rank):
        if orthogonal:
            factor = T.tensor(rns.random_sample((s, s)), **context)
            Q, _= T.qr(factor)
            factors.append(T.tensor(Q[:, :r]))
        else:
            factors.append(T.tensor(rns.random_sample((s, r)), **context))

    core = T.tensor(rns.random_sample(rank), **context)
    if full:
        return tucker_to_tensor((core, factors))
    else:
        return core, factors