Exemplo n.º 1
0
def main(args):
    '''
    Main function
    '''
    try:
        pargs = parser.parse_args(args[1:])
    except TypeError as error_msg:
        print(str(error_msg))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    wkt = constants.tilename_to_extent(kmname, return_wkt=True)
    print("Running %s on block: %s, %s" % (PROGNAME, kmname, time.asctime()))

    reporter = report.ReportUniqueDates(pargs.use_local)

    las = laspy.read(pargs.las_file)

    datetimes = find_unique_days(las.gps_time)
    datestrings = [d.strftime('%Y%m%d') for d in datetimes]

    unique_dates = ';'.join(datestrings)
    min_date = np.min(las.gps_time)
    max_date = np.max(las.gps_time)

    reporter.report(kmname,
                    str(to_datetime(min_date)),
                    str(to_datetime(max_date)),
                    unique_dates,
                    len(datestrings),
                    wkt_geom=wkt)

    return 0
Exemplo n.º 2
0
def main(args):
    '''
    Core functionality. Called by qc_wrap.py and __file__
    '''
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as error_msg:
        print(str(error_msg))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print('Running %s on block: %s, %s' % (PROGNAME, kmname, time.asctime()))
    if not os.path.exists(pargs.outdir):
        os.mkdir(pargs.outdir)

    path = pargs.las_file
    filename = os.path.basename(path)
    out_path = os.path.join(pargs.outdir, filename)

    las_in = laspy.read(path)
    las_out = laspy.LasData(las_in.header)

    points = las_in.points
    las_out.points = points

    xy = np.column_stack((las_in.x, las_in.y))
    geoid = grid.fromGDAL(GEOID_GRID, upcast=True)
    geoid_offset = geoid.interpolate(xy)

    # Apply vertical offset from geoid grid
    las_out.z -= geoid_offset

    las_out.write(out_path)

    return 0
def init(filename):

    if filename is None:
        raise Exception("File name not provided")

    with open(filename, 'rb') as f:
        in_file = laspy.read(f)

    point_count = in_file.header.point_count
    m_out_views = []

    return in_file, point_count, m_out_views
Exemplo n.º 4
0
def get_from_cache_or_download(lidar_tile_filename,
                               cache_dir,
                               bounds,
                               logger=None):
    local_tile_filepath = path.join(cache_dir, lidar_tile_filename)
    if not path.exists(local_tile_filepath):
        lidar_uri = BASE_URI + lidar_tile_filename
        if logger is not None:
            logger.info("Downloading LIDAR data from %s to %s", lidar_uri,
                        local_tile_filepath)
        request.urlretrieve(lidar_uri, local_tile_filepath)

    las = laspy.read(local_tile_filepath)
    c = np.array(las.classification)
    x = np.array(las.x)
    y = np.array(las.y)

    cond = ((c == 4) ^ (c == 5)) & ((x >= bounds.left)
                                    & (x <= bounds.right)
                                    & (y >= bounds.bottom)
                                    & (y <= bounds.top))
    return pd.DataFrame({"class_val": c[cond], "x": x[cond], "y": y[cond]})
Exemplo n.º 5
0
def main(args):
    '''
    Core function. Called either stand-alone or from qc_wrap.
    '''
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as error_str:
        print(str(error_str))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" % (PROGNAME, kmname, time.asctime()))
    cell_size = pargs.cs
    ncols_f = TILE_SIZE / cell_size
    ncols = int(ncols_f)
    if ncols != ncols_f:
        print("TILE_SIZE: %d must be divisible by cell size..." % (TILE_SIZE))
        usage()
        return 1

    print("Using cell size: %.2f" % cell_size)
    use_local = pargs.use_local
    if pargs.schema is not None:
        report.set_schema(pargs.schema)

    reporter = report.ReportDensity(use_local)
    outdir = pargs.outdir

    if not os.path.exists(outdir):
        os.mkdir(outdir)

    lasname = pargs.las_file
    waterconnection = pargs.ref_data
    outname_base = "den_{0:.0f}_".format(
        cell_size) + os.path.splitext(os.path.basename(lasname))[0] + ".tif"
    outname = os.path.join(outdir, outname_base)
    print("Reading %s, writing %s" % (lasname, outname))

    try:
        (x_min, y_min, x_max, y_max) = constants.tilename_to_extent(kmname)
    except Exception as error_str:
        print("Exception: %s" % str(error_str))
        print("Bad 1km formatting of las file: %s" % lasname)
        return 1

    las_file = laspy.read(lasname)

    nx = int((x_max - x_min) / cell_size)
    ny = int((y_max - y_min) / cell_size)
    ds_grid = gdal.GetDriverByName('GTiff').Create(outname, nx, ny, 1, gdal.GDT_Float32)
    georef = (x_min, cell_size, 0, y_max, 0, -cell_size)
    ds_grid.SetGeoTransform(georef)
    band = ds_grid.GetRasterBand(1)
    band.SetNoDataValue(ND_VAL)

    # make local copies so we don't have to call the x and y getter functions
    # of las_file a nx*ny times
    xs = las_file.x
    ys = las_file.y

    # determine densities
    den_grid = np.ndarray(shape=(nx, ny), dtype=float)
    for i in range(nx):
        for j in range(ny):
            I = np.ones(las_file.header.point_count, dtype=bool)

            if i < nx-1:
                I &= np.logical_and(xs >= x_min+i*cell_size, xs < x_min+(i+1)*cell_size)
            else:
                I &= np.logical_and(xs >= x_min+i*cell_size, xs <= x_min+(i+1)*cell_size)

            if j < ny-1:
                I &= np.logical_and(ys >= y_min+j*cell_size, ys < y_min+(j+1)*cell_size)
            else:
                I &= np.logical_and(ys >= y_min+j*cell_size, ys <= y_min+(j+1)*cell_size)

            den_grid[ny-j-1][i] = np.sum(I) / (cell_size*cell_size)

    band.WriteArray(den_grid)

    t1 = time.process_time()
    if pargs.lakesql is None and pargs.seasql is None:
        print('No layer selection specified!')
        print('Assuming that all water polys are in first layer of connection...')
        lake_mask = vector_io.burn_vector_layer(
            waterconnection,
            georef,
            den_grid.shape,
            None,
            None,
        )
    else:
        lake_mask = np.zeros(den_grid.shape, dtype=np.bool)
        if pargs.lakesql is not None:
            print("Burning lakes...")
            lake_mask |= vector_io.burn_vector_layer(
                waterconnection,
                georef,
                den_grid.shape,
                None,
                pargs.lakesql,
            )
        if pargs.seasql is not None:
            print("Burning sea...")
            lake_mask |= vector_io.burn_vector_layer(
                waterconnection,
                georef,
                den_grid.shape,
                None,
                pargs.seasql,
            )

    t2 = time.process_time()
    print("Burning 'water' took: %.3f s" % (t2 - t1))

    # what to do with nodata??
    nd_mask = (den_grid == ND_VAL)
    den_grid[den_grid == ND_VAL] = 0
    n_lake = lake_mask.sum()
    print("Number of no-data densities: %d" % (nd_mask.sum()))
    print("Number of water cells       : %d" % (n_lake))
    if n_lake < den_grid.size:
        not_lake = den_grid[np.logical_not(lake_mask)]
        den = not_lake.min()
        mean_den = not_lake.mean()
    else:
        den = ALL_LAKE
        mean_den = ALL_LAKE
    print("Minumum density            : %.2f" % den)

    wkt = constants.tilename_to_extent(kmname, return_wkt=True)
    reporter.report(kmname, den, mean_den, cell_size, wkt_geom=wkt)

    return 0
Exemplo n.º 6
0
 def __init__(self, bytes):
     self.las = laspy.read(bytes)