How to use the tensorly.backend.shape function in tensorly

To help you get started, we’ve selected a few tensorly examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tensorly / tensorly / tensorly / kruskal_tensor.py View on Github external
Returns
    -------
    (shape, rank) : (int tuple, int)
        size of the full tensor and rank of the Kruskal tensor
    """
    if isinstance(kruskal_tensor, KruskalTensor):
        # it's already been validated at creation
        return kruskal_tensor.shape, kruskal_tensor.rank

    weights, factors = kruskal_tensor
            
    if len(factors) < 2:
        raise ValueError('A Kruskal tensor should be composed of at least two factors.'
                         'However, {} factor was given.'.format(len(factors)))

    rank = int(T.shape(factors[0])[1])
    shape = []
    for i, factor in enumerate(factors):
        current_mode_size, current_rank = T.shape(factor)
        if current_rank != rank:
            raise ValueError('All the factors of a Kruskal tensor should have the same number of column.'
                             'However, factors[0].shape[1]={} but factors[{}].shape[1]={}.'.format(
                                 rank, i, T.shape(factor)[1]))
        shape.append(current_mode_size)

    if weights is not None and len(weights) != rank:
        raise ValueError('Given factors for a rank-{} Kruskal tensor but len(weights)={}.'.format(
            rank, len(weights)))
        
    return tuple(shape), rank
github tensorly / tensorly / tensorly / tenalg / generalised_inner_product.py View on Github external
Returns
    -------
    inner_product : float if n_modes is None, tensor otherwise
    """
    # Traditional inner product
    if n_modes is None:
        if tensor1.shape != tensor2.shape:
            raise ValueError('Taking a generalised product between two tensors without specifying common modes'
                             ' is equivalent to taking inner product.'
                             'This requires tensor1.shape == tensor2.shape.'
                             'However, got tensor1.shape={} and tensor2.shape={}'.format(tensor1.shape, tensor2.shape))
        return T.sum(tensor1*tensor2)

    # Inner product along `n_modes` common modes
    shape_t1 = list(T.shape(tensor1))
    shape_t2 = list(T.shape(tensor2))
    common_modes = shape_t1[len(shape_t1) - n_modes:]
    common_size = int(np.prod(common_modes))
    output_shape = shape_t1[:-n_modes] + shape_t2[n_modes:]

    if common_modes != shape_t2[:n_modes]:
        raise ValueError('Incorrect shapes for inner product along {} common modes.'
                         'tensor_1.shape={}, tensor_2.shape={}'.format(n_modes, shape_t1, shape_t2))
    inner_product = T.dot(T.reshape(tensor1, (-1, common_size)),
                          T.reshape(tensor2, (common_size, -1)))
    return T.reshape(inner_product, output_shape)
github tensorly / tensorly / tensorly / kruskal_tensor.py View on Github external
size of the full tensor and rank of the Kruskal tensor
    """
    if isinstance(kruskal_tensor, KruskalTensor):
        # it's already been validated at creation
        return kruskal_tensor.shape, kruskal_tensor.rank

    weights, factors = kruskal_tensor
            
    if len(factors) < 2:
        raise ValueError('A Kruskal tensor should be composed of at least two factors.'
                         'However, {} factor was given.'.format(len(factors)))

    rank = int(T.shape(factors[0])[1])
    shape = []
    for i, factor in enumerate(factors):
        current_mode_size, current_rank = T.shape(factor)
        if current_rank != rank:
            raise ValueError('All the factors of a Kruskal tensor should have the same number of column.'
                             'However, factors[0].shape[1]={} but factors[{}].shape[1]={}.'.format(
                                 rank, i, T.shape(factor)[1]))
        shape.append(current_mode_size)

    if weights is not None and len(weights) != rank:
        raise ValueError('Given factors for a rank-{} Kruskal tensor but len(weights)={}.'.format(
            rank, len(weights)))
        
    return tuple(shape), rank
github tensorly / tensorly / tensorly / metrics / regression.py View on Github external
def covariance(y_true, y_pred, axis=None):
    centered_true = T.mean(y_true, axis=axis)
    centered_pred = T.mean(y_pred, axis=axis)

    if axis is not None:
        # TODO: write a function to do this..
        shape = list(T.shape(y_true))
        shape[axis] = 1
        centered_true = T.reshape(centered_true, shape)
        shape = list(T.shape(y_pred))
        shape[axis] = 1
        centered_pred = T.reshape(centered_pred, shape)

    return T.mean((y_true - centered_true)*(y_pred - centered_pred), axis=axis)
github tensorly / tensorly / tensorly / tucker_tensor.py View on Github external
'However, {} factor was given.'.format(len(factors)))

    if len(factors) != tl.ndim(core):
        raise ValueError('Tucker decompositions should have one factor per more of the core tensor.'
                         'However, core has {} modes but {} factors have been provided'.format(
                         tl.ndim(core), len(factors)))

    shape = []
    rank = []
    for i, factor in enumerate(factors):
        current_shape, current_rank = tl.shape(factor)
        if current_rank != tl.shape(core)[i]:
            raise ValueError('Factor `n` of Tucker decomposition should verify:\n'
                             'factors[n].shape[1] = core.shape[n].'
                             'However, factors[{0}].shape[1]={1} but core.shape[{0}]={2}.'.format(
                                 i, tl.shape(factor)[1], tl.shape(core)[i]))
        shape.append(current_shape)
        rank.append(current_rank)

    return tuple(shape), tuple(rank)
github tensorly / tensorly / tensorly / metrics / regression.py View on Github external
def covariance(y_true, y_pred, axis=None):
    centered_true = T.mean(y_true, axis=axis)
    centered_pred = T.mean(y_pred, axis=axis)

    if axis is not None:
        # TODO: write a function to do this..
        shape = list(T.shape(y_true))
        shape[axis] = 1
        centered_true = T.reshape(centered_true, shape)
        shape = list(T.shape(y_pred))
        shape[axis] = 1
        centered_pred = T.reshape(centered_pred, shape)

    return T.mean((y_true - centered_true)*(y_pred - centered_pred), axis=axis)