How to use the laspy.file function in laspy

To help you get started, we’ve selected a few laspy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Oslandia / py3dtiles / py3dtiles / points / task / las_reader.py View on Github external
def run(_id, filename, offset_scale, portion, queue, projection, verbose):
    '''
    Reads points from a las file
    '''
    try:
        f = laspy.file.File(filename, mode='r')

        point_count = portion[1] - portion[0]

        step = min(point_count, max((point_count) // 10, 100000))

        indices = [i for i in range(math.ceil((point_count) / step))]

        color_scale = offset_scale[3]

        file_points = f.get_points()['point']
        X = file_points['X']
        Y = file_points['Y']
        Z = file_points['Z']
        # todo: attributes
        if 'red' in f.point_format.lookup:
            RED = file_points['red']
github strawlab / python-pcl / examples / external / laspy / visualization_test_rgb.py View on Github external
def main():
    # RGB : NG
    # f = file.File('28XXX10000075-18.las', mode='r')
    f = file.File('28W0608011101-1.las', mode='r')
    # f = file.File('28XXX00020001-1.las', mode='r')
    # f = file.File('simple1_4.las', mode='r')

    # check las file version
    # RGB contains
    if f._header.data_format_id in (2, 3, 5):
        red = (f.red)
        green = (f.green)
        blue = (f.blue)
        # 16bit to convert 8bit data(data Storage First 8 bits case)
        red = np.right_shift(red, 8).astype(np.uint8)
        green = np.right_shift(green, 8).astype(np.uint8)
        blue = np.right_shift(blue, 8).astype(np.uint8)
        # (data Storage After 8 bits case)
        # red = red.astype(np.uint8)
        # green = green.astype(np.uint8)
github laspy / laspy / misc / select_classification.py View on Github external
def f():
    sys.path.append("../")
    
    from laspy import file as File
    inFile = File.File(sys.argv[1],mode= "r")
    outFile = File.File(sys.argv[2],mode= "w", header = inFile.header)
    cls =[int(x) for x in sys.argv[3].split(",")]
    #outFile.writer.set_padding(outFile.header.data_offset)

    vf = np.vectorize(lambda x: x in cls)
    print("Writing")
    outData = inFile.reader.get_points()[vf(inFile.raw_classification)]
    outFile.writer.set_points(outData)
    #outFile.writer.data_provider._mmap.write(inFile.reader.get_raw_point(i))
    print("Closing")
    inFile.close()
    outFile.close(ignore_header_changes = True)
github laspy / laspy / laspy / tools / lascopy.py View on Github external
if (diff > 0):
                print("Extra Bytes Detected.")

            new_header.data_record_length = laspy.util.Format(point_format).rec_len + ((diff > 0)*diff)
            evlrs = inFile.header.evlrs
            if file_version != "1.4" and old_file_version == "1.4":
                print("Warning: input file has version 1.4, and output file does not. This may cause truncation of header data.")
                new_header.point_return_count = inFile.header.legacy_point_return_count
                new_header.point_records_count = inFile.header.legacy_point_records_count
            if not (file_version in ["1.3", "1.4"]) and old_file_version in ["1.3", "1.4"]:
                print("Stripping any EVLRs")
                evlrs = []
            if (file_version == "1.3" and len(inFile.header.evlrs) > 1):
                print("Too many EVLRs for format 1.3, keeping the first one.")
                evlrs = inFile.header.evlrs[0]
            outFile = laspy.file.File(self.args.out_file[0], header = new_header, mode = "w", vlrs = inFile.header.vlrs, evlrs = evlrs)
            if outFile.point_format.rec_len != outFile.header.data_record_length:
                pass
        except Exception as error:
            print("There was an error instantiating the output file.")
            print(error)
            quit()

        ## Copy point dimensions. 
        try:
            for dimension in inFile.point_format.specs:
                if dimension.name in outFile.point_format.lookup:
                    ## Skip sub_byte field record bytes if incompatible
                    if (not SUB_BYTE_COMPATIBLE and dimension.name in ("raw_classification", 
                        "classification_flags", "classification_byte", "flag_byte")):
                        continue
                    outFile.writer.set_dimension(dimension.name, inFile.reader.get_dimension(dimension.name))
github laspy / laspy / misc / profiler.py View on Github external
#!/usr/bin/env python
import sys
import cProfile
sys.path.append("../")

from laspy import file as File
inFile = File.File(sys.argv[1],mode= "r")
print("File length: " + str(len(inFile)) + " points to be copied.")
outFile = File.File(sys.argv[2],mode= "w", header = inFile.header)

spec = inFile.reader.point_format.lookup.keys()

def f():
    outFile.X = inFile.X
    outFile.Y = inFile.Y

cProfile.run("f()")

#for x in spec:
#    print(x)
#    outFile.writer.set_dimension(x, inFile.reader.get_dimension(x))


inFile.close()
outFile.close()
github DTMilodowski / LiDAR_canopy / src / scottish_understory / generate_LiDAR_metrics_raster_carbomap_highlands_site1.py View on Github external
PAI = np.zeros((rows,cols))*np.nan
PAI_1_2m = np.zeros((rows,cols))*np.nan
PAI_2_5m = np.zeros((rows,cols))*np.nan
PAI_5m_up = np.zeros((rows,cols))*np.nan
PAD = np.zeros((rows,cols,layers))*np.nan
pulse_dens = np.zeros((rows,cols))
n_ground = np.zeros((rows,cols))

# Phase three - loop through las tiles and gradually fill the array
laz_files = io.find_las_files_by_polygon(las_list,bbox)
n_files = len(laz_files)
for i in range(0,n_files):
    print("Processing tile %i of %i" % (i+1,n_files))
    # get bbox of specific tile
    lasFile = las.file.File(laz_files[i],mode='r-')
    max_xyz = lasFile.header.max
    min_xyz = lasFile.header.min
    lasFile.close()

    # buffer this bounding box with the search radius
    E = max_xyz[0]+radius
    N = max_xyz[1]+radius
    W = min_xyz[0]-radius
    S = min_xyz[1]-radius

    # Read in LiDAR points for region of interest
    polygon = np.asarray([[W,N],[E,N],[E,S],[W,S]])
    lidar_pts, starting_ids_for_trees, trees = io.load_lidar_data_by_polygon(las_list,
                                polygon,laz_files=False,max_pts_per_tree = 5*10**5)
    N_trees = len(trees)
github brycefrank / pyfor / normalize.py View on Github external
def df_to_las(df, out_path, header, zcol='z'):
    """Exports normalized points to new las."""
    import laspy

    outfile = laspy.file.File(out_path, mode="w", header = header)
    outfile.x = df['x']
    outfile.y = df['y']
    outfile.z = df[zcol]
    outfile.intensity = df['int']
    outfile.return_num = df['ret']
github driving-behavior / DBNet / provider.py View on Github external
x_out2.append(self.x_train2[index])
                y_out.append(self.Y_train[index])
                self.train_pointer += batch_size

        elif description == "val":
            if not self.cache_val:
                print ("Loading validation data ...")
                for i in range(0, self.num_val):
                    with Image.open(self.X_val1[i]) as img:
                        self.x_val1.append(scipy.misc.imresize(img, shape) / 255.0)
                    # too many opened files
                    """
                    self.x_val1.append(scipy.misc.imresize(scipy.misc.imread(
                                self.X_val1[i]), shape) / 255.0)
                    """
                    infile = laspy.file.File(self.X_val2[i])
                    data = np.vstack([infile.X, infile.Y, infile.Z]).transpose()
                    infile.close()
                    self.x_val2.append(data)
                    self.cache_val = True
                print ("Finished loading!")
                    
            for i in range(0, batch_size):
                index = (self.val_pointer + i) % len(self.X_val1)
                x_out1.append(self.x_val1[index])
                x_out2.append(self.x_val2[index])
                y_out.append(self.Y_val[index])
                self.val_pointer += batch_size

        elif description == "test":
            if not self.cache_test:
                print ("Loading testing data ...")
github brycefrank / pyfor / pyfor / collection.py View on Github external
def _get_bounding_box(self, las_path):
        """
        Vectorized function to get a bounding box from an individual las path.

        :param las_path: The path of the las file to retrieve a bounding box from.
        :return: A tuple (minx, maxx, miny, maxy) of bounding box coordinates.
        """
        # segmentation of point clouds
        pc = laspy.file.File(las_path)
        min_x, max_x = pc.header.min[0], pc.header.max[0]
        min_y, max_y = pc.header.min[1], pc.header.max[1]
        pc.header.reader.close()
        return((min_x, max_x, min_y, max_y))
github DTMilodowski / LiDAR_canopy / src / thetford_canopy_structure / generate_LiDAR_metrics_raster_Thetford.py View on Github external
rows_ii = np.arange(y_coords.size)
cols_jj = np.arange(x_coords.size)

PAI = np.zeros((rows,cols))*np.nan
PAD = np.zeros((rows,cols,layers))*np.nan
Shannon = np.zeros((rows,cols))*np.nan
pulse_dens = np.zeros((rows,cols))
n_ground = np.zeros((rows,cols))

# Phase three - loop through las tiles and gradually fill the array
laz_files = io.find_las_files_by_polygon(laz_list,bbox)
n_files = len(laz_files)
for i in range(0,n_files):
    print "Processing tile %i of %i" % (i+1,n_files)
    # get bbox of specific tile
    lasFile = las.file.File(laz_files[i],mode='r-')
    max_xyz = lasFile.header.max
    min_xyz = lasFile.header.min
    lasFile.close()
    """
    plt.plot([max_xyz[0],min_xyz[0],min_xyz[0],max_xyz[0],max_xyz[0]],[max_xyz[1],max_xyz[1],min_xyz[1],min_xyz[1],max_xyz[1]],'-')
    plt.plot([W_,W_,E_,E_,W_],[N_,S_,S_,N_,N_],'--')
    plt.axis('equal');plt.show()
    """
    # buffer this bounding box with the search radius
    E = max_xyz[0]+radius
    N = max_xyz[1]+radius
    W = min_xyz[0]-radius
    S = min_xyz[1]-radius

    # Read in LiDAR points for region of interest
    polygon = np.asarray([[W,N],[E,N],[E,S],[W,S]])