How to use the trimesh.load function in trimesh

To help you get started, we’ve selected a few trimesh examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github cselab / Mirheo / tests / rigids / create_from_mesh.py View on Github external
def create_from_mesh_file(density, fname, niter):
    import trimesh
    m = trimesh.load(fname);
    # TODO diagonalize
    inertia = [row[i] for i, row in enumerate(m.moment_inertia)]
    return create_from_mesh(density, m.vertices.tolist(), m.faces.tolist(), inertia, niter)
github mmatl / pyrender / tests / unit / test_offscreen.py View on Github external
def test_offscreen_renderer(tmpdir):

    # Fuze trimesh
    fuze_trimesh = trimesh.load('examples/models/fuze.obj')
    fuze_mesh = Mesh.from_trimesh(fuze_trimesh)

    # Drill trimesh
    drill_trimesh = trimesh.load('examples/models/drill.obj')
    drill_mesh = Mesh.from_trimesh(drill_trimesh)
    drill_pose = np.eye(4)
    drill_pose[0,3] = 0.1
    drill_pose[2,3] = -np.min(drill_trimesh.vertices[:,2])

    # Wood trimesh
    wood_trimesh = trimesh.load('examples/models/wood.obj')
    wood_mesh = Mesh.from_trimesh(wood_trimesh)

    # Water bottle trimesh
    bottle_gltf = trimesh.load('examples/models/WaterBottle.glb')
    bottle_trimesh = bottle_gltf.geometry[list(bottle_gltf.geometry.keys())[0]]
github ThibaultGROUEIX / 3D-CODED / inference / correspondences.py View on Github external
def compute_correspondances(self, source_p, source_reconstructed_p, target_p, target_reconstructed_p, path):
        """
        Given 2 meshes, and their reconstruction, compute correspondences between the 2 meshes through neireast neighbors
        :param source_p: path for source mesh
        :param source_reconstructed_p: path for source mesh reconstructed
        :param target_p: path for target mesh
        :param target_reconstructed_p: path for target mesh reconstructed
        :return: None but save a file with correspondences
        """
        # inputs are all filepaths
        with torch.no_grad():
            source = trimesh.load(source_p, process=False)
            source_reconstructed = trimesh.load(source_reconstructed_p, process=False)
            target = trimesh.load(target_p, process=False)
            target_reconstructed = trimesh.load(target_reconstructed_p, process=False)

            # project on source_reconstructed
            self.neigh.fit(source_reconstructed.vertices)
            idx_knn = self.neigh.kneighbors(source.vertices, return_distance=False)

            # correspondances throught template
            closest_points = target_reconstructed.vertices[idx_knn]
            closest_points = np.mean(closest_points, 1, keepdims=False)

            # project on target
            if self.project_on_target:
                print("projection on target...")
                self.neigh.fit(target.vertices)
                idx_knn = self.neigh.kneighbors(closest_points, return_distance=False)
                closest_points = target.vertices[idx_knn]
                closest_points = np.mean(closest_points, 1, keepdims=False)
github mmatl / urdfpy / urdfpy / utils.py View on Github external
def load_meshes(filename):
    """Loads triangular meshes from a file.

    Parameters
    ----------
    filename : str
        Path to the mesh file.

    Returns
    -------
    meshes : list of :class:`~trimesh.base.Trimesh`
        The meshes loaded from the file.
    """
    meshes = trimesh.load(filename)

    # If we got a scene, dump the meshes
    if isinstance(meshes, trimesh.Scene):
        meshes = list(meshes.dump())
        meshes = [g for g in meshes if isinstance(g, trimesh.Trimesh)]

    if isinstance(meshes, (list, tuple, set)):
        meshes = list(meshes)
        if len(meshes) == 0:
            raise ValueError('At least one mesh must be pmeshesent in file')
        for r in meshes:
            if not isinstance(r, trimesh.Trimesh):
                raise TypeError('Could not load meshes from file')
    elif isinstance(meshes, trimesh.Trimesh):
        meshes = [meshes]
    else:
github musyoku / gqn-dataset-renderer / opengl / rooms_free_camera_with_object_rotations.py View on Github external
bg_color=np.array([153 / 255, 226 / 255, 249 / 255]),
        ambient_light=np.array([0.5, 0.5, 0.5, 1.0]))

    floor_trimesh = trimesh.load("objects/floor.obj")
    mesh = Mesh.from_trimesh(floor_trimesh)
    node = Node(
        mesh=mesh,
        rotation=pyrender.quaternion.from_pitch(-math.pi / 2),
        translation=np.array([0, 0, 0]))
    texture_path = random.choice(floor_textures)
    set_random_texture(node, texture_path, intensity=0.8)
    scene.add_node(node)

    texture_path = random.choice(wall_textures)

    wall_trimesh = trimesh.load("objects/wall.obj")
    mesh = Mesh.from_trimesh(wall_trimesh)
    node = Node(mesh=mesh, translation=np.array([0, 1.15, -3.5]))
    set_random_texture(node, texture_path)
    scene.add_node(node)

    mesh = Mesh.from_trimesh(wall_trimesh)
    node = Node(
        mesh=mesh,
        rotation=pyrender.quaternion.from_yaw(math.pi),
        translation=np.array([0, 1.15, 3.5]))
    set_random_texture(node, texture_path)
    scene.add_node(node)

    mesh = Mesh.from_trimesh(wall_trimesh)
    node = Node(
        mesh=mesh,
github musyoku / gqn-dataset-renderer / opengl / mnist_dice_ring_camera.py View on Github external
def place_dice(scene, mnist_images, discrete_position=False,
               rotate_dice=False):
    dice_trimesh = trimesh.load("{}/dice.obj".format(object_directory))
    mesh = Mesh.from_trimesh(dice_trimesh, smooth=False)
    node = Node(
        mesh=mesh,
        scale=np.array([0.75, 0.75, 0.75]),
        translation=np.array([0, 0.75, 0]))
    texture_image = generate_mnist_texture(mnist_images)
    primitive = node.mesh.primitives[0]
    primitive.material.baseColorTexture.source = texture_image
    primitive.material.baseColorTexture.sampler.minFilter = GL_LINEAR_MIPMAP_LINEAR

    directions = [-1.0, 0.0, 1.0]
    available_positions = []
    for z in directions:
        for x in directions:
            available_positions.append((x, z))
    xz = np.array(random.choice(available_positions))
github musyoku / gqn-dataset-renderer / opengl / rooms_free_camera_no_object_rotations.py View on Github external
def build_scene(colors, floor_textures, wall_textures, objects):
    scene = Scene(
        bg_color=np.array([153 / 255, 226 / 255, 249 / 255]),
        ambient_light=np.array([0.5, 0.5, 0.5, 1.0]))

    floor_trimesh = trimesh.load("objects/floor.obj")
    mesh = Mesh.from_trimesh(floor_trimesh)
    node = Node(
        mesh=mesh,
        rotation=pyrender.quaternion.from_pitch(-math.pi / 2),
        translation=np.array([0, 0, 0]))
    texture_path = random.choice(floor_textures)
    set_random_texture(node, texture_path, intensity=0.8)
    scene.add_node(node)

    texture_path = random.choice(wall_textures)

    wall_trimesh = trimesh.load("objects/wall.obj")
    mesh = Mesh.from_trimesh(wall_trimesh)
    node = Node(mesh=mesh, translation=np.array([0, 1.15, -3.5]))
    set_random_texture(node, texture_path)
    scene.add_node(node)
github ThibaultGROUEIX / 3D-CODED / inference / correspondences.py View on Github external
def reconstruct(self, input_p):
        """
        Recontruct a 3D shape by deforming a template
        :param input_p: input path
        :return: None (but save reconstruction)
        """
        print("Reconstructing ", input_p)
        input = trimesh.load(input_p, process=False)
        scalefactor = 1.0
        if self.scale:
            input, scalefactor = my_utils.scale(input,
                                                self.mesh_ref_LR)  # scale input to have the same volume as mesh_ref_LR
        if self.uniformize:
            input = my_utils.uniformize(input)
        if self.clean:
            input = my_utils.clean(input)  # remove points that doesn't belong to any edges


        my_utils.test_orientation(input)
        mesh, meshReg = self.run(input, scalefactor, input_p)

        if not self.HR:
            red = self.red_LR
            green = self.green_LR
github RobotLocomotion / spartan / modules / spartan / utils / decompose_mesh_to_urdf.py View on Github external
def do_convex_decomposition_to_urdf(obj_filename, obj_mass, output_directory, do_visualization=False, scale=1.0, color=[0.75, 0.75, 0.75], **kwargs):
  mesh = trimesh.load(obj_filename)
  mesh.apply_scale(scale) # applies physical property scaling
  
  if (do_visualization):
    print("Showing input mesh...")
    mesh.show()

  mesh.density = obj_mass / mesh.volume
  decomposed_mesh = export_urdf(mesh, output_directory, color=color, **kwargs)

  print("Input mesh had ", len(mesh.faces), " faces and ", len(mesh.vertices), " verts")
  print("Output mesh has ", len(decomposed_mesh.faces), " faces and ", len(decomposed_mesh.vertices), " verts")

  if (do_visualization):
    print("Showing output mesh...")
    decomposed_mesh.show()
github mvandermerwe / PointSDF / mise.py View on Github external
# Center mesh.
    vertices[:,0] -= voxel_size * (((final_voxel_resolution) / 2) + 1)
    vertices[:,1] -= voxel_size * (((final_voxel_resolution) / 2) + 1)
    vertices[:,2] -= voxel_size * (((final_voxel_resolution) / 2) + 1)

    vertices[:,0] -= centroid_diff[0]
    vertices[:,1] -= centroid_diff[1]
    vertices[:,2] -= centroid_diff[2]
    
    #save_file = os.path.join(save_path, view + '.off')
    mcubes.export_obj(vertices, triangles, save_path)

    # Display mesh.
    if verbose:
        gen_mesh = trimesh.load(save_path)
        gen_mesh.show()

    return None # convert_to_sparse_voxel_grid(voxels, threshold=0.5)