def create_df_hd5(file_dir, filename, column_names):
    inFile = File(file_dir + filename, mode='r')
    raw = inFile.get_points()
    df = raw_to_df(raw, column_names)
    df = scale_and_offset(df, inFile.header, append_to_df=True)
    hdf_name = 'las_points_' + filename[34:40] + '.lz'
    df.to_hdf(file_dir + hdf_name, key='df', complevel=1, complib='lzo')
def create_df_pickle(file_dir, filename, column_names):
    inFile = File(file_dir + filename, mode='r')
    raw = inFile.get_points()
    df = raw_to_df(raw, column_names)
    df = scale_and_offset(df, inFile.header, append_to_df=True)
    pickle_name = 'las_points_' + filename[34:40] + '.pkl'
    df.to_pickle(file_dir + pickle_name)
예제 #3
0
def init(files, color_scale=None, srs_in=None, srs_out=None, fraction=100):
    aabb = None
    total_point_count = 0
    pointcloud_file_portions = []
    avg_min = np.array([0., 0., 0.])

    for filename in files:
        try:
            f = File(filename, mode='r')
        except Exception as e:
            print('Error opening {filename}. Skipping.'.format(**locals()))
            print(e)
            continue
        avg_min += (np.array(f.header.min) / len(files))

        if aabb is None:
            aabb = np.array([f.header.get_min(), f.header.get_max()])
        else:
            bb = np.array([f.header.get_min(), f.header.get_max()])
            aabb[0] = np.minimum(aabb[0], bb[0])
            aabb[1] = np.maximum(aabb[1], bb[1])

        count = int(f.header.count * fraction / 100)
        total_point_count += count

        # read the first points red channel
        if color_scale is None:
            if 'red' in f.point_format.lookup:
                color_test_field = 'red'
                if np.max(f.get_points()['point'][color_test_field]
                          [0:min(10000, f.header.count)]) > 255:
                    color_scale = 1.0 / 255
            else:
                color_test_field = 'intensity'
                color_scale = 1.0 / 255

        _1M = min(count, 1000000)
        steps = math.ceil(count / _1M)
        portions = [(i * _1M, min(count, (i + 1) * _1M)) for i in range(steps)]
        for p in portions:
            pointcloud_file_portions += [(filename, p)]

        # if (srs_out is not None and srs_in is None):
        #     # NOTE: decode is necessary because in python3.5, json cannot decode bytes. Remove this once 3.5 is EOL
        #     output = subprocess.check_output(['pdal', 'info', '--summary', filename]).decode('utf-8')
        #     summary = json.loads(output)['summary']
        #     if 'srs' not in summary or 'proj4' not in summary['srs'] or not summary['srs']['proj4']:
        #         raise SrsInMissingException('\'{}\' file doesn\'t contain srs information. Please use the --srs_in option to declare it.'.format(filename))
        #     srs_in = summary['srs']['proj4']

    return {
        'portions': pointcloud_file_portions,
        'aabb': aabb,
        'color_scale': color_scale,
        'srs_in': srs_in,
        'point_count': total_point_count,
        'avg_min': avg_min
    }
예제 #4
0
def init(files, color_scale=None, srs_in=None, srs_out=None, fraction=100):
    aabb = None
    total_point_count = 0
    pointcloud_file_portions = []
    avg_min = np.array([0., 0., 0.])

    for filename in files:
        try:
            f = File(filename, mode='r')
        except Exception as e:
            print('Error opening {filename}. Skipping.'.format(**locals()))
            print(e)
            continue
        avg_min += (np.array(f.header.min) / len(files))

        if aabb is None:
            aabb = np.array([f.header.get_min(), f.header.get_max()])
        else:
            bb = np.array([f.header.get_min(), f.header.get_max()])
            aabb[0] = np.minimum(aabb[0], bb[0])
            aabb[1] = np.maximum(aabb[1], bb[1])

        count = int(f.header.count * fraction / 100)
        total_point_count += count

        # read the first points red channel
        if color_scale is None:
            if 'red' in f.point_format.lookup:
                color_test_field = 'red'
                if np.max(f.get_points()['point'][color_test_field][0:min(10000, f.header.count)]) > 255:
                        color_scale = 1.0 / 255
            else:
                color_test_field = 'intensity'
                color_scale = 1.0 / 255

        _1M = min(count, 1000000)
        steps = math.ceil(count / _1M)
        portions = [(i * _1M, min(count, (i + 1) * _1M)) for i in range(steps)]
        for p in portions:
            pointcloud_file_portions += [(filename, p)]

        if (srs_out is not None and srs_in is None):
            f = liblas.file.File(filename)
            if (f.header.srs.proj4 is not None
                    and f.header.srs.proj4 != ''):
                srs_in = pyproj.Proj(f.header.srs.proj4)
            else:
                raise Exception('\'{}\' file doesn\'t contain srs information. Please use the --srs_in option to declare it.'.format(filename))

    return {
        'portions': pointcloud_file_portions,
        'aabb': aabb,
        'color_scale': color_scale,
        'srs_in': srs_in,
        'point_count': total_point_count,
        'avg_min': avg_min
    }
예제 #5
0
def read_las_file(file_dir, filename, column_names):
    '''
    takes .las file as input, generates dataframe
    Inputs:
    file_dir, filename: corresponding to the .las file
    columns_names: dependent on the LAS version
    
    Output:
    df: Dataframe containing original columns plus scaled xyz coords
    '''
    inFile = File(file_dir + filename, mode='r')
    raw = inFile.get_points()
    df = raw_to_df(raw, column_names)
    df = scale_and_offset(df, inFile.header, append_to_df=True)
    return df
예제 #6
0
    def _update(self):

        f = FileLAS(self.filename_, mode='r')
        points = f.get_points()
        name = points.dtype.fields.keys()

        x = f.get_x_scaled()
        y = f.get_y_scaled()
        z = f.get_z_scaled()

        #Check is the LAS File contains red property
        if 'red' in points.dtype.fields[name[0]][0].fields.keys():
            red = np.int32(255.0 * f.red / 65536.0)
            green = np.int32(255.0 * f.green / 65536.0)
            blue = np.int32(255.0 * f.blue / 65536.0)
            self.data_ = np.c_[x, y, z, red, green, blue]
        else:
            N = f.X.shape[0]
            color = 128 * np.ones((N, ), dtype=np.int32)
            self.data_ = np.c_[x, y, z, color, color, color]
예제 #7
0
    def _update(self):

        f = FileLAS(self.filename_, mode='r')
        points = f.get_points()
        name = points.dtype.fields.keys()

        x = f.get_x_scaled()
        y = f.get_y_scaled()
        z = f.get_z_scaled()

        #Check is the LAS File contains red property
        if 'red' in points.dtype.fields[name[0]][0].fields.keys():
            red = np.int32(255.0*f.red/65536.0)
            green = np.int32(255.0*f.green/65536.0)
            blue = np.int32(255.0*f.blue/65536.0)
            self.data_ = np.c_[x, y, z, red, green, blue]
        else:
            N = f.X.shape[0]
            color = 128*np.ones((N,), dtype=np.int32)
            self.data_ = np.c_[x, y, z, color, color, color]
예제 #8
0
import numpy as np
import laspy
from laspy.file import File
#import numpy as np
#file_name='simple.las'
file_name='points.las'
inFile = File(file_name, mode='r')
print("inFile type",type(inFile))
I = inFile.Classification == 2
##
h = inFile.header
print("Version=",h.major_version,".",h.minor_version)
##
pts = inFile.get_points()
print("points type",type(pts))
print("points shape",pts.shape)

intensity = inFile.get_intensity()
print("intensity points shape",intensity.shape)

flag_byte = inFile.get_flag_byte()
print("flag_byte shape",flag_byte.shape)

pt_ret_count = inFile.point_return_count()
print("type of point return count", type(pt_ret_count))

ret_num = inFile.get_return_num()
print("ret_num shape",ret_num.shape)
print("ret_num shape 100",ret_num[100].shape)
print("First returned number",ret_num[0],"\n")
print("First returned number",ret_num[0],"\n")
예제 #9
0
def main():
	inFile = File(file_dir+filename, mode='r')
	raw = inFile.get_points()
	df = raw_to_df(raw,columns_point_cloud)

	df.to_csv("filename.txt")
예제 #10
0
class LiDAR(object):
    def __init__(self,
                 in_lidar_path,
                 out_path,
                 partials_create,
                 terrain=False,
                 surfaces=False):
        """ Init variables
        """
        self.terrain = terrain
        self.surfaces = surfaces
        if self.terrain:
            self.class_flag = 2
        elif self.surfaces:
            # TODO
            pass

        self.in_lidar_path = in_lidar_path
        self.path, full_name = os.path.split(in_lidar_path)
        filename, ext = os.path.splitext(full_name)
        if ext.lower() == '.las':
            self.laz = False
            self.in_las_path = self.in_lidar_path
        else:
            pass

        self.files_utils = files_and_dirs_funs.DirAndPaths()
        self.name, self.extension = self.files_utils.init(full_name)
        self.partials_create = partials_create
        self.templates_dict = self.files_utils.file_templates(self.name)
        self.out_path = out_path
        self.read_las_file()
        self.get_all_points()
        self.get_scaled_points()
        self.get_file_extent()
        self.get_file_density()
        self.get_points_arrays()

    def process(self):

        if self.partials_create and not self.surfaces:
            self.out_dir = os.path.join(self.out_path, 'intermediate_results',
                                        'las')
            self.files_utils.create_dir(self.out_dir)

            self.write_las_file()

        if self.lidar_arrays_list[0].shape == (0, 2):
            raise ValueError(u"Error: An error has occurred. The selected" +
                             u" file is not valid for the process, it is" +
                             u" possibly not classified.\nPlease, solve" +
                             u" it and restart the process!")

        return [
            self.lidar_arrays_list, self.las_file_extent,
            self.density['ground_dens_class']
        ]

    def read_las_file(self):
        """ Read the input LiDAR file in las format. Not laz format
        """
        self.in_file = File(self.in_las_path, mode='r')
        self.scale = self.in_file.header.scale
        self.offset = self.in_file.header.offset

    def get_all_points(self):
        """ Get points for file (points information and coordinates)
        """
        self.points_array = self.in_file.get_points()
        self.points_number = len(self.in_file)

    def get_scaled_points(self):
        """ Get the coordinates scalated
        """
        x = self.in_file.X
        y = self.in_file.Y
        z = self.in_file.Z

        self.x_dimension = x * self.scale[0] + self.offset[0]
        self.y_dimension = y * self.scale[1] + self.offset[1]
        self.z_dimension = z * self.scale[-1] + self.offset[-1]

    def get_file_extent(self):
        """ Get extent of the lidar file
        """

        self.las_file_extent = [(max(self.x_dimension), max(self.y_dimension)),
                                (max(self.x_dimension), min(self.y_dimension)),
                                (min(self.x_dimension), max(self.y_dimension)),
                                (min(self.x_dimension), min(self.y_dimension))]

    # for raster_geotransform= (min(self.x_dimension, max(self.y_dimension)))
    # or the same self.las_file_extent[2]

    def get_ground_points(self):
        """ Function to get the number of ground points. 
            Source: laspy documentation
        """
        num_returns = self.in_file.num_returns
        return_num = self.in_file.return_num
        ground_points = self.in_file.points[num_returns == return_num]
        self.ground_points_number = len(ground_points)

    def get_file_density(self):
        """ Compute points density only with ground points -class: 2-. 
        """
        self.get_ground_points()

        self.density = {}
        self.file_sup_m2 = (max(self.x_dimension) - min(self.x_dimension)) *\
                            (max(self.y_dimension) - min(self.y_dimension))
        # density of all lidar returns
        self.density['all_dens'] = self.points_number / self.file_sup_m2

        class_2_points, _ = self.get_points_by_class(2)
        class_0_points, _ = self.get_points_by_class(0)
        class_1_points, _ = self.get_points_by_class(1)
        class_7_points, _ = self.get_points_by_class(7)
        class_8_points, _ = self.get_points_by_class(8)
        # density of only ground points filtered by returns
        self.density['ground_dens_ret'] = (self.ground_points_number /
                                           self.file_sup_m2)
        # # density of only ground points filtered by class: 2
        self.density['ground_dens_class'] = ((len(class_2_points)) /
                                             self.file_sup_m2)
        # density of lidar file excluding classes 0, 1, 7 and 8. ¿Where is overlap class?
        self.density['util_points'] = (
            (self.points_number - len(class_0_points) - len(class_1_points) -
             len(class_7_points) - len(class_8_points)) / self.file_sup_m2)

        # compare ground points density (filtered points by class vs filtered points by returns)
        if self.density['ground_dens_ret'] == self.density[
                'ground_dens_class']:
            return True
        else:
            return False  # pass

    def get_points_by_class(self, classif=2):
        """ Get points array with the given classification id (ASPRS classes)
        """
        class_points_bool = self.in_file.Classification == classif
        return self.points_array[class_points_bool], class_points_bool

    def get_points_arrays(self):
        """ Creates arrays for a given class (default=2) with the coordinates
            of the points classificated by that class flag
        """
        #        class_flags = 2, 3, 4, 5 para suelo, vegetación baja, media y alta respectivamente
        if self.terrain:
            class_2_points, class_2_bool = self.get_points_by_class(
                self.class_flag)
            size = class_2_points.shape[0]
            x_array = self.x_dimension[class_2_bool].reshape(size, 1)
            y_array = self.y_dimension[class_2_bool].reshape(size, 1)
            z_array = self.z_dimension[class_2_bool]

        elif self.surfaces:
            # Guardo el archivo para poder leerlo

            self.out_dir = os.path.join(self.out_path, 'intermediate_results',
                                        'las')

            filename = ('Surfaces_' +
                        self.templates_dict['las'].format(self.name))

            full_path = os.path.join(self.out_dir, filename)

            self.files_utils.create_dir(self.out_dir)

            out_file = File(full_path, mode='w', header=self.in_file.header)
            out_file.points = self.in_file.points[self.in_file.return_num == 1]
            out_file.close()

            #leo el archivo
            in_file = File(full_path, mode='r')
            scale = in_file.header.scale
            offset = in_file.header.offset

            x = in_file.X
            y = in_file.Y
            z = in_file.Z

            x_dimension = x * scale[0] + offset[0]
            y_dimension = y * scale[1] + offset[1]
            z_dimension = z * scale[-1] + offset[-1]

            size = x_dimension.shape[0]

            x_array = x_dimension.reshape(size, 1)
            y_array = y_dimension.reshape(size, 1)
            z_array = z_dimension

            # Cerrar archivo para poder eliminarlo
            in_file.close()

            if not self.partials_create:
                self.files_utils.remove_temp_file(full_path)
                try:
                    self.files_utils.remove_temp_dir(self.out_dir)
                except OSError:
                    pass

        xy_array = np.concatenate((x_array, y_array), axis=1)
        self.lidar_arrays_list = [xy_array, z_array]

    def write_las_file(self):
        """ Create and write a new lidar file with the desirable points
        """
        if self.surfaces:
            self.out_full_path = os.path.join(
                self.out_dir,
                ('Surfaces_' + self.templates_dict['las'].format(self.name)))

        elif self.terrain:
            self.out_full_path = os.path.join(
                self.out_dir,
                ('Terrain_' + self.templates_dict['las'].format(self.name)))

        out_file = File(self.out_full_path,
                        mode='w',
                        header=self.in_file.header)
        if self.terrain:
            class_2_points, class_2_bool = self.get_points_by_class(
                self.class_flag)
            out_file.points = self.in_file.points[class_2_bool]

        elif self.surfaces:
            out_file.points = self.in_file.points[self.in_file.return_num == 1]

        out_file.close()
예제 #11
0
class LiDAR(object):
    def __init__(self, in_las_path, out_path, partials_create):
        """ Init variables
        """
        self.in_las_path = in_las_path
        self.out_path = out_path
        self.partials_create = partials_create

        _, self.filename = os.path.split(in_las_path)

        self.dirs = dir_fns.DirAndPaths(self.filename, out_path)

        self.read_las_file()
        self.get_all_points()
        self.get_scaled_points()
        self.get_file_extent()

        self.get_terrain_points_array()
        self.get_first_returns_array()
        self.in_file.close()

        self.lidar_results = [
            self.terrain_arrays_list, self.surfaces_arrays_list,
            self.las_file_extent
        ]

# TODO: Si .laz descomprimir

    def read_las_file(self):
        """ Read the input LiDAR file in las format. Not laz format
        """
        try:
            self.in_file = File(self.in_las_path, mode='r')
        except OSError:
            raise OSError(u"LiDAR Forestry Height can't open the file.\n" +
                          u"Please try again or with other LiDAR file")

        self.scale = self.in_file.header.scale
        self.offset = self.in_file.header.offset

    def get_all_points(self):
        """ Get points for file (points information and coordinates)
        """
        self.points_array = self.in_file.get_points()
        self.points_number = len(self.in_file)

    def get_file_extent(self):
        """ Get extent of the lidar file
        """

        self.las_file_extent = [(max(self.x_dimension), max(self.y_dimension)),
                                (max(self.x_dimension), min(self.y_dimension)),
                                (min(self.x_dimension), max(self.y_dimension)),
                                (min(self.x_dimension), min(self.y_dimension))]

    def get_scaled_points(self):
        """ Get the coordinates scalated
        """
        x = self.in_file.X
        y = self.in_file.Y
        z = self.in_file.Z

        self.x_dimension = x * self.scale[0] + self.offset[0]
        self.y_dimension = y * self.scale[1] + self.offset[1]
        self.z_dimension = z * self.scale[-1] + self.offset[-1]

    def get_points_by_class(self, classif=2):
        """ Get points array with the given classification id (ASPRS classes)
        """
        class_points_bool = self.in_file.Classification == classif
        return self.points_array[class_points_bool], class_points_bool

    def get_terrain_points_array(self):
        """ Creates arrays for a given class (default=2) with the coordinates
            of the points classificated by that class flag
        """
        self.class_flag = 2
        class_2_points, class_2_bool = self.get_points_by_class(
            self.class_flag)
        size = class_2_points.shape[0]
        x_array = self.x_dimension[class_2_bool].reshape(size, 1)
        y_array = self.y_dimension[class_2_bool].reshape(size, 1)
        z_array = self.z_dimension[class_2_bool]

        xy_array = np.concatenate((x_array, y_array), axis=1)
        self.terrain_arrays_list = [xy_array, z_array]

        if self.partials_create:
            full_path = self.dirs.out_paths['las_surfaces']
            self.dirs.create_dir(self.dirs.out_dirs['las'])
            self.write_las_file()

    def get_first_returns_array(self):

        # Guardo el archivo para poder leerlo

        if self.partials_create:
            full_path = self.dirs.out_paths['las_surfaces']
            self.dirs.create_dir(self.dirs.out_dirs['las'])

        else:
            full_path = self.dirs.temp_full_paths['las_surfaces']
            self.dirs.create_dir(self.dirs.temp_dirs['temp_dir'])

        out_file = File(full_path, mode='w', header=self.in_file.header)
        out_file.points = self.in_file.points[self.in_file.return_num == 1]
        out_file.close()

        #leo el archivo
        in_file = File(full_path, mode='r')
        scale = in_file.header.scale
        offset = in_file.header.offset

        x = in_file.X
        y = in_file.Y
        z = in_file.Z

        x_dimension = x * scale[0] + offset[0]
        y_dimension = y * scale[1] + offset[1]
        z_dimension = z * scale[-1] + offset[-1]

        size = x_dimension.shape[0]

        x_array = x_dimension.reshape(size, 1)
        y_array = y_dimension.reshape(size, 1)
        z_array = z_dimension

        # Cerrar archivo para poder eliminarlo
        in_file.close()
        if not self.partials_create:
            self.dirs.remove_temp_file(full_path)
            self.dirs.remove_temp_dir(self.dirs.temp_dirs['temp_dir'])

        xy_array = np.concatenate((x_array, y_array), axis=1)
        self.surfaces_arrays_list = [xy_array, z_array]

    def write_las_file(self):
        """ Create and write a new lidar file with the desirable points
        """

        self.dirs.set_output_dir()
        full_path = self.dirs.out_paths['las_terrain']
        self.dirs.create_dir(self.dirs.out_dirs['las'])

        out_file = File(full_path, mode='w', header=self.in_file.header)
        class_2_points, class_2_bool = self.get_points_by_class(
            self.class_flag)
        out_file.points = self.in_file.points[class_2_bool]
        out_file.close()