How to use the geomstats.backend.expand_dims function in geomstats

To help you get started, we’ve selected a few geomstats examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github geomstats / geomstats / geomstats / geometry / poincare_ball.py View on Github external
term_2 = \
            gs.exp((prod_alpha_sigma) ** 2) * (1 + gs.erf(prod_alpha_sigma))
        term_1 = gs.sqrt(gs.pi / 2.) * (1. / (2 ** (self.dim - 1)))
        term_2 = gs.einsum('ij,j->ij', term_2, beta)
        norm_factor = \
            term_1 * variances * gs.sum(term_2, axis=-1, keepdims=True)
        grad_term_1 = 1 / variances

        grad_term_21 = 1 / gs.sum(term_2, axis=-1, keepdims=True)

        grad_term_211 = \
            gs.exp((prod_alpha_sigma) ** 2) \
            * (1 + gs.erf(prod_alpha_sigma)) \
            * gs.einsum('ij,j->ij', sigma_repeated, alpha ** 2) * 2

        grad_term_212 = gs.repeat(gs.expand_dims((2 / gs.sqrt(gs.pi))
                                                 * alpha, axis=0),
                                  variances.shape[0], axis=0)

        grad_term_22 = grad_term_211 + grad_term_212
        grad_term_22 = gs.einsum('ij, j->ij', grad_term_22, beta)
        grad_term_22 = gs.sum(grad_term_22, axis=-1, keepdims=True)

        norm_factor_gradient = grad_term_1 + (grad_term_21 * grad_term_22)

        return gs.squeeze(norm_factor), gs.squeeze(norm_factor_gradient)
github geomstats / geomstats / geomstats / geometry / hyperbolic_space.py View on Github external
coordinates.

        Parameters
        ----------
        point : array-like, shape=[n_samples, dimension] in Poincare ball
                coordinates

        Returns
        -------
        extrinsic : array-like, shape=[n_samples, dimension + 1] in
                     extrinsic coordinate
        """
        squared_norm = gs.sum(point**2, -1)
        denominator = 1-squared_norm
        t = gs.to_ndarray((1+squared_norm)/denominator, to_ndim=2, axis=1)
        expanded_denominator = gs.expand_dims(denominator, -1)
        expanded_denominator = gs.repeat(expanded_denominator,
                                         point.shape[-1], -1)
        intrinsic = (2*point)/expanded_denominator
        return gs.concatenate([t, intrinsic], -1)
github geomstats / geomstats / geomstats / learning / embedding_data.py View on Github external
Current context embedding.
        negative_embedding: array-like, shape=[dim]
            Current negative sample embedding.

        Returns
        -------
        total_loss : int
            The current value of the loss function.
        example_grad : array-like, shape=[dim]
            The gradient of the loss function at the embedding
            of the current data sample.
        """
        n_edges, dim =\
            negative_embedding.shape[0], example_embedding.shape[-1]
        example_embedding = gs.expand_dims(example_embedding, 0)
        context_embedding = gs.expand_dims(context_embedding, 0)

        positive_distance =\
            self.manifold.metric.squared_dist(
                example_embedding, context_embedding)
        positive_loss =\
            self.log_sigmoid(-positive_distance)

        reshaped_example_embedding =\
            gs.repeat(example_embedding, n_edges, axis=0)

        negative_distance =\
            self.manifold.metric.squared_dist(
                reshaped_example_embedding, negative_embedding)
        negative_loss = self.log_sigmoid(negative_distance)

        total_loss = -(positive_loss + negative_loss.sum())
github geomstats / geomstats / geomstats / learning / kmedoids.py View on Github external
def _update_medoid_indexes(self, distances, labels, medoid_indices):

        for cluster in range(self.n_clusters):

            cluster_index = gs.where(labels == cluster)[0]

            if len(cluster_index) == 0:
                logging.warning('One cluster is empty.')
                continue

            in_cluster_distances = distances[
                cluster_index, gs.expand_dims(cluster_index, axis=-1)]

            in_cluster_all_costs = gs.sum(in_cluster_distances, axis=1)

            min_cost_index = gs.argmin(in_cluster_all_costs)

            min_cost = in_cluster_all_costs[min_cost_index]

            current_cost = in_cluster_all_costs[
                gs.argmax(cluster_index == medoid_indices[cluster])]

            if min_cost < current_cost:
                medoid_indices[cluster] = cluster_index[min_cost_index]
github geomstats / geomstats / geomstats / geometry / special_orthogonal.py View on Github external
skew_mat : array-like, shape=[..., n, n]
            Skew-symmetric matrix.

        Returns
        -------
        vec : array-like, shape=[..., dim]
            Vector.
        """
        n_skew_mats, _, _ = skew_mat.shape

        vec_dim = self.dim
        vec = gs.zeros((n_skew_mats, vec_dim))

        if self.n == 2:  # SO(2)
            vec = skew_mat[:, 0, 1]
            vec = gs.expand_dims(vec, axis=1)

        elif self.n == 3:  # SO(3)
            vec_1 = gs.to_ndarray(skew_mat[:, 2, 1], to_ndim=2, axis=1)
            vec_2 = gs.to_ndarray(skew_mat[:, 0, 2], to_ndim=2, axis=1)
            vec_3 = gs.to_ndarray(skew_mat[:, 1, 0], to_ndim=2, axis=1)
            vec = gs.concatenate([vec_1, vec_2, vec_3], axis=1)

        return vec
github geomstats / geomstats / examples / plot_expectation_maximisation_manifolds.py View on Github external
variances,
                                       plot_precision=DEFAULT_PLOT_PRECISION,
                                       save_path='',
                                       metric=None):
    """Plot Gaussian Mixture Model."""
    x_axis_samples = gs.linspace(-1, 1, plot_precision)
    y_axis_samples = gs.linspace(-1, 1, plot_precision)
    x_axis_samples, y_axis_samples = gs.meshgrid(x_axis_samples,
                                                 y_axis_samples)

    z_axis_samples = gs.zeros((plot_precision, plot_precision))

    for z_index, _ in enumerate(z_axis_samples):

        x_y_plane_mesh = gs.concatenate((
            gs.expand_dims(x_axis_samples[z_index], -1),
            gs.expand_dims(y_axis_samples[z_index], -1)),
            axis=-1)

        mesh_probabilities = PoincareBall.\
            weighted_gmm_pdf(
                mixture_coefficients,
                x_y_plane_mesh,
                means,
                variances,
                metric)

        z_axis_samples[z_index] = mesh_probabilities.sum(-1)

    fig = plt.figure('Learned Gaussian Mixture Model '
                     'via Expectation Maximisation on Poincaré Disc')
github geomstats / geomstats / geomstats / visualization.py View on Github external
def convert_to_klein_coordinates(points):
        poincare_coords = points[:, 1:] / (1 + points[:, :1])
        poincare_radius = gs.linalg.norm(
            poincare_coords, axis=1)
        poincare_angle = gs.arctan2(
            poincare_coords[:, 1], poincare_coords[:, 0])

        klein_radius = 2 * poincare_radius / (1 + poincare_radius ** 2)
        klein_angle = poincare_angle

        coords_0 = gs.expand_dims(
            klein_radius * gs.cos(klein_angle), axis=1)
        coords_1 = gs.expand_dims(
            klein_radius * gs.sin(klein_angle), axis=1)
        klein_coords = gs.concatenate([coords_0, coords_1], axis=1)
        return klein_coords
github geomstats / geomstats / examples / plot_expectation_maximisation_manifolds.py View on Github external
plot_precision=DEFAULT_PLOT_PRECISION,
                                       save_path='',
                                       metric=None):
    """Plot Gaussian Mixture Model."""
    x_axis_samples = gs.linspace(-1, 1, plot_precision)
    y_axis_samples = gs.linspace(-1, 1, plot_precision)
    x_axis_samples, y_axis_samples = gs.meshgrid(x_axis_samples,
                                                 y_axis_samples)

    z_axis_samples = gs.zeros((plot_precision, plot_precision))

    for z_index, _ in enumerate(z_axis_samples):

        x_y_plane_mesh = gs.concatenate((
            gs.expand_dims(x_axis_samples[z_index], -1),
            gs.expand_dims(y_axis_samples[z_index], -1)),
            axis=-1)

        mesh_probabilities = PoincareBall.\
            weighted_gmm_pdf(
                mixture_coefficients,
                x_y_plane_mesh,
                means,
                variances,
                metric)

        z_axis_samples[z_index] = mesh_probabilities.sum(-1)

    fig = plt.figure('Learned Gaussian Mixture Model '
                     'via Expectation Maximisation on Poincaré Disc')

    ax = fig.gca(projection='3d')
github geomstats / geomstats / geomstats / learning / embedding_data.py View on Github external
context_embedding : array-like, shape=[dim]
            Current context embedding.
        negative_embedding: array-like, shape=[dim]
            Current negative sample embedding.

        Returns
        -------
        total_loss : int
            The current value of the loss function.
        example_grad : array-like, shape=[dim]
            The gradient of the loss function at the embedding
            of the current data sample.
        """
        n_edges, dim =\
            negative_embedding.shape[0], example_embedding.shape[-1]
        example_embedding = gs.expand_dims(example_embedding, 0)
        context_embedding = gs.expand_dims(context_embedding, 0)

        positive_distance =\
            self.manifold.metric.squared_dist(
                example_embedding, context_embedding)
        positive_loss =\
            self.log_sigmoid(-positive_distance)

        reshaped_example_embedding =\
            gs.repeat(example_embedding, n_edges, axis=0)

        negative_distance =\
            self.manifold.metric.squared_dist(
                reshaped_example_embedding, negative_embedding)
        negative_loss = self.log_sigmoid(negative_distance)
github geomstats / geomstats / geomstats / special_orthogonal_group.py View on Github external
determined by the metric, to be less than pi.
        """
        if point_type is None:
            point_type = self.default_point_type

        if point_type == 'vector':
            tangent_vec = gs.to_ndarray(tangent_vec, to_ndim=2)

            if self.n == 3:
                if metric is None:
                    metric = self.left_canonical_metric
                tangent_vec_metric_norm = metric.norm(tangent_vec)
                tangent_vec_canonical_norm = gs.linalg.norm(
                                                  tangent_vec, axis=1)
                if gs.ndim(tangent_vec_canonical_norm) == 1:
                    tangent_vec_canonical_norm = gs.expand_dims(
                                   tangent_vec_canonical_norm, axis=1)

                mask_norm_0 = gs.isclose(tangent_vec_metric_norm, 0.)
                mask_canonical_norm_0 = gs.isclose(
                    tangent_vec_canonical_norm, 0.)

                mask_0 = mask_norm_0 | mask_canonical_norm_0
                mask_else = ~mask_0

                mask_0 = gs.squeeze(mask_0, axis=1)
                mask_else = gs.squeeze(mask_else, axis=1)

                coef = gs.empty_like(tangent_vec_metric_norm)
                regularized_vec = tangent_vec

                regularized_vec[mask_0] = tangent_vec[mask_0]