How to use the meshcat.geometry.PointCloud function in meshcat

To help you get started, we’ve selected a few meshcat examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github RobotLocomotion / drake / bindings / pydrake / systems / meshcat_visualizer.py View on Github external
p_PQs = point_cloud_P.xyzs()
        # Use only valid points.
        valid = np.logical_not(np.isnan(p_PQs))
        valid = np.all(valid, axis=0)  # Reduce along XYZ axis.
        p_PQs = p_PQs[:, valid]
        if point_cloud_P.has_rgbs():
            rgbs = point_cloud_P.rgbs()[:, valid]
        else:
            # Need manual broadcasting.
            count = p_PQs.shape[1]
            rgbs = np.tile(np.array([self._default_rgb]).T, (1, count))
        # pydrake `PointCloud.rgbs()` are on [0..255], while meshcat
        # `PointCloud` colors are on [0..1].
        rgbs = rgbs / 255.  # Do not use in-place so we can promote types.
        # Send to meshcat.
        self._meshcat_viz[self._name].set_object(g.PointCloud(p_PQs, rgbs))
        self._meshcat_viz[self._name].set_transform(self._X_WP.matrix())
github RobotLocomotion / spartan / modules / spartan / perception / tabletop_segmenter.py View on Github external
normal_sets.append(inlier_normals)
                points_accounted_for[nearby_inds][inlier_inds] = True

        if self.vis is not None:
            n_to_vis = 100
            best_run_k = np.argsort(scores)[-n_to_vis:]
            colors = iter(plt.cm.rainbow(np.linspace(0, 1, n_to_vis)))
            self.vis["perception"]["tabletopsegmenter"]["flippable"].delete()
            for k in best_run_k:
                print("Num %d: Score %f with %d points" %
                      (k, scores[k], point_sets[k].shape[1]))
                color = np.tile(next(colors), [point_sets[k].shape[1], 1]).T
                print("Color shape: ", color.shape)
                self.vis["perception"]["tabletopsegmenter"]["flippable"]\
                    ["%d" % k].set_object(
                        meshcat_g.PointCloud(position=point_sets[k],
                                             color=color,
                                             size=0.001))
github RobotLocomotion / spartan / modules / spartan / perception / tabletop_segmenter.py View on Github external
def draw_polydata_in_meshcat(vis, polyData, name, color=None, size=0.001,
                             with_normals=False, color_by_curvature=False):
    points = vtkNumpy.getNumpyFromVtk(polyData).T
    if color_by_curvature:
        curv = vtkNumpy.getNumpyFromVtk(polyData, "Curvatures")
        curv -= np.min(curv)
        curv /= np.max(curv)
        colors = plt.cm.rainbow(curv)
    elif color is not None:
        colors = np.tile(color[0:3], [points.shape[1], 1]).T
    else:
        colors = np.zeros(points.shape) + 1.
    vis["perception"]["tabletopsegmenter"][name].set_object(
        meshcat_g.PointCloud(position=points, color=colors, size=size))
    if with_normals:
        normals = vtkNumpy.getNumpyFromVtk(polyData, "Normals").T
        colors = np.tile([1., 0., 0.], [points.shape[1], 1]).T
        segments = np.zeros((3, normals.shape[1]*2))
        for k in range(normals.shape[1]):
            segments[:, 2*k] = points[:, k]
            segments[:, 2*k+1] = points[:, k] + normals[:, k]*0.05
        vis["perception"]["tabletopsegmenter"][name]["normals"].set_object(
            meshcat_g.LineSegments(position=segments,
                                   color=colors, linewidth=size/2.))
github RobotLocomotion / spartan / modules / spartan / perception / tabletop_segmenter.py View on Github external
vtkNumpy.getNumpyFromVtk(pd).astype(np.float64),
                np.zeros(3))
        output_cloud = np.zeros((tree.size(), 3))
        output_color = np.zeros((tree.size(), 4))
        k = 0
        for node in tree.begin_tree():
            if node.isLeaf() and tree.isNodeOccupied(node):
                output_cloud[k, :] = np.array(node.getCoordinate())
                output_color[k, :] = plt.cm.rainbow(node.getOccupancy())
                k += 1
        output_cloud.resize((k, 3))
        output_color.resize((k, 4))

        if self.vis is not None:
            self.vis["perception"]["tabletopsegmenter"]["fused_cloud"]\
                .set_object(meshcat_g.PointCloud(position=output_cloud.T,
                                                 color=output_color.T,
                                                 size=cell_size))
        return vtkNumpy.numpyToPolyData(output_cloud)
github RobotLocomotion / spartan / modules / spartan / perception / deformable_carrot_fit.py View on Github external
pts_tf = R.dot(points.T).T + T

            # As a speed hack, do a broadphase collision check first
            distances_broadphase = np.linalg.norm(pts_tf, axis=1)
            thresh = 0.05
            selector_broadphase = distances_broadphase <= thresh
            while np.sum(selector_broadphase) == 0:
                thresh *= 2.
                selector_broadphase = distances_broadphase <= thresh
            # Only bother with those within a pretty close distance
            pts_tf = pts_tf[selector_broadphase]
            # And then further randomly downselect
            np.random.shuffle(pts_tf)
            pts_tf = pts_tf[0:min(400, pts_tf.shape[0]), :]
            vis["perception"]["fit"]["points_selected"].set_object(
                meshcat_g.PointCloud(
                    position=(self.tf[:3, :3].dot(pts_tf.T).T +
                              self.tf[:3, 3]).T,
                    color=pts_tf.T,
                    size=0.001))

            closest, distance, faces = self.trimesh.nearest.on_surface(pts_tf)

            # Align a with points that are within a threshold distance
            selection = np.argsort(distance)[:pts_tf.shape[0] / 2]
            closest = closest[selection]
            pts_tf_closest = pts_tf[selection, :]
            #for i, (x1, x2) in enumerate(zip(closest, pts_tf_closest)):
            #    meshcat_draw_line_with_dots(
            #        "perception/fit/corresp/%d" % i,
            #        self.tf[:3, :3].dot(x1)+self.tf[:3, 3],
            #        self.tf[:3, :3].dot(x2)+self.tf[:3, 3])
github RobotLocomotion / spartan / modules / spartan / perception / deformable_carrot_fit.py View on Github external
import pydrake.forwarddiff as forwarddiff

import meshcat
import meshcat.geometry as meshcat_g
import meshcat.transformations as meshcat_tf

vis = meshcat.Visualizer(zmq_url="tcp://127.0.0.1:6000")
vis["perception"]["fit"].delete()

points = np.load("cloud.npy")
print("Loaded %d points from cloud.npy" % points.shape[0])
colors = points.copy()
colors -= np.min(colors, axis=0)
colors /= np.max(colors, axis=0)
vis["perception"]["fit"]["points"].set_object(
    meshcat_g.PointCloud(position=points.T,
                         color=colors.T,
                         size=0.001))


def vectorized_ad_helper(function, x):
    x_ad = np.empty(x.shape, dtype=AutoDiffXd)
    for i in range(x.size):
        der = np.zeros(x.size)
        der[i] = 1
        x_ad.flat[i] = AutoDiffXd(x.flat[i], der)
    y_ad = function(x_ad)
    return y_ad


def meshcat_draw_line_with_dots(path, x1, x2, N=50, size=0.0001,
                                color=[1., 1., 1.]):