Example #1
0
    def repair(self, points):
        path = os.path.join(self.params['path'], self.kmname + '_floating.bin')

        if os.path.exists(path) and os.path.getsize(path) <= 0:
            return None

        pc = pointcloud.fromBinary(path)
        georef = [self.extent[0], CS_BURN, 0, self.extent[3], 0, -CS_BURN]
        ncols = int((self.extent[2] - self.extent[0]) / CS_BURN)
        nrows = int((self.extent[3] - self.extent[1]) / CS_BURN)

        assert (ncols * CS_BURN + self.extent[0]) == self.extent[2]
        assert (nrows * CS_BURN + self.extent[1]) == self.extent[3]

        if self.params['exclude_all']:  # use sql_include as a whitelist
            mask = np.zeros((nrows, ncols), dtype=np.bool)
        else:
            mask = np.ones((nrows, ncols), dtype=np.bool)
            for sql in self.params['sql_exclude']:
                mask_ = vector_io.burn_vector_layer(self.params['cstr'],
                                                    georef, (nrows, ncols),
                                                    layersql=sql)
                mask[mask_] = 0

        class_maps = []
        for c in self.params[
                'sql_include']:  # explicitely included with desired class
            sql = self.params['sql_include'][c]
            mask_ = vector_io.burn_vector_layer(self.params['cstr'],
                                                georef, (nrows, ncols),
                                                layersql=sql)
            mask[mask_] = 1
            class_maps.append((mask_, c))

        pc = pc.cut_to_grid_mask(mask, georef)
        rc = np.ones((pc.size, ), dtype=np.float64) * RECLASS_DEFAULT

        for M, c in class_maps:
            MM = pc.get_grid_mask(M, georef)
            rc[MM] = c

        xyc = np.column_stack((pc.xy, rc))

        return (points, xyc)
Example #2
0
    def repair(self, points):
        georef = [
            self.extent[0], CS_BURN_BUILD, 0, self.extent[3], 0, -CS_BURN_BUILD
        ]
        ncols = int((self.extent[2] - self.extent[0]) / CS_BURN_BUILD)
        nrows = int((self.extent[3] - self.extent[1]) / CS_BURN_BUILD)

        assert (ncols * CS_BURN_BUILD + self.extent[0]) == self.extent[2]
        assert (nrows * CS_BURN_BUILD + self.extent[1]) == self.extent[3]

        build_mask = vector_io.burn_vector_layer(
            self.params['cstr'],
            georef,
            (nrows, ncols),
            layersql=self.params['sql'],
            all_touched=False,
        )

        if not build_mask.any():
            return

        pc = pointcloud.fromLaspy(self.las)
        pc = pc.cut_to_class(list(BUILDING_RECLASS.keys()))
        pc = pc.cut_to_grid_mask(build_mask, georef)

        if pc.size <= 0:
            return

        xyc = np.empty((0, 3), dtype=np.float64)
        for c in BUILDING_RECLASS:
            pc_ = pc.cut_to_class(c)
            rc = np.ones((pc_.size, ), dtype=np.float64) * BUILDING_RECLASS[c]
            xyc_ = np.column_stack((pc_.xy, rc))
            xyc = np.vstack((xyc, xyc_))

        return (points, xyc)
Example #3
0
def main(args):
    '''
    Core function. Called either stand-alone or from qc_wrap.
    '''
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as error_str:
        print(str(error_str))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" % (PROGNAME, kmname, time.asctime()))
    cell_size = pargs.cs
    ncols_f = TILE_SIZE / cell_size
    ncols = int(ncols_f)
    if ncols != ncols_f:
        print("TILE_SIZE: %d must be divisible by cell size..." % (TILE_SIZE))
        usage()
        return 1

    print("Using cell size: %.2f" % cell_size)
    use_local = pargs.use_local
    if pargs.schema is not None:
        report.set_schema(pargs.schema)

    reporter = report.ReportDensity(use_local)
    outdir = pargs.outdir

    if not os.path.exists(outdir):
        os.mkdir(outdir)

    lasname = pargs.las_file
    waterconnection = pargs.ref_data
    outname_base = "den_{0:.0f}_".format(cell_size) + os.path.splitext(
        os.path.basename(lasname))[0] + ".tif"
    outname = os.path.join(outdir, outname_base)
    print("Reading %s, writing %s" % (lasname, outname))

    try:
        (x_min, y_min, x_max, y_max) = constants.tilename_to_extent(kmname)
    except Exception as error_str:
        print("Exception: %s" % str(error_str))
        print("Bad 1km formatting of las file: %s" % lasname)
        return 1

    las_file = laspy.file.File(lasname, mode='r')

    nx = int((x_max - x_min) / cell_size)
    ny = int((y_max - y_min) / cell_size)
    ds_grid = gdal.GetDriverByName('GTiff').Create(outname, nx, ny, 1,
                                                   gdal.GDT_Float32)
    georef = (x_min, cell_size, 0, y_max, 0, -cell_size)
    ds_grid.SetGeoTransform(georef)
    band = ds_grid.GetRasterBand(1)
    band.SetNoDataValue(ND_VAL)

    # make local copies so we don't have to call the x and y getter functions
    # of las_file a nx*ny times
    xs = las_file.x
    ys = las_file.y

    # determine densities
    den_grid = np.ndarray(shape=(nx, ny), dtype=float)
    for i in range(nx):
        for j in range(ny):
            I = np.ones(las_file.header.count, dtype=bool)

            if i < nx - 1:
                I &= np.logical_and(xs >= x_min + i * cell_size,
                                    xs < x_min + (i + 1) * cell_size)
            else:
                I &= np.logical_and(xs >= x_min + i * cell_size,
                                    xs <= x_min + (i + 1) * cell_size)

            if j < ny - 1:
                I &= np.logical_and(ys >= y_min + j * cell_size,
                                    ys < y_min + (j + 1) * cell_size)
            else:
                I &= np.logical_and(ys >= y_min + j * cell_size,
                                    ys <= y_min + (j + 1) * cell_size)

            den_grid[ny - j - 1][i] = np.sum(I) / (cell_size * cell_size)

    band.WriteArray(den_grid)
    las_file.close()

    t1 = time.clock()
    if pargs.lakesql is None and pargs.seasql is None:
        print('No layer selection specified!')
        print(
            'Assuming that all water polys are in first layer of connection...'
        )
        lake_mask = vector_io.burn_vector_layer(
            waterconnection,
            georef,
            den_grid.shape,
            None,
            None,
        )
    else:
        lake_mask = np.zeros(den_grid.shape, dtype=np.bool)
        if pargs.lakesql is not None:
            print("Burning lakes...")
            lake_mask |= vector_io.burn_vector_layer(
                waterconnection,
                georef,
                den_grid.shape,
                None,
                pargs.lakesql,
            )
        if pargs.seasql is not None:
            print("Burning sea...")
            lake_mask |= vector_io.burn_vector_layer(
                waterconnection,
                georef,
                den_grid.shape,
                None,
                pargs.seasql,
            )

    t2 = time.clock()
    print("Burning 'water' took: %.3f s" % (t2 - t1))

    # what to do with nodata??
    nd_mask = (den_grid == ND_VAL)
    den_grid[den_grid == ND_VAL] = 0
    n_lake = lake_mask.sum()
    print("Number of no-data densities: %d" % (nd_mask.sum()))
    print("Number of water cells       : %d" % (n_lake))
    if n_lake < den_grid.size:
        not_lake = den_grid[np.logical_not(lake_mask)]
        den = not_lake.min()
        mean_den = not_lake.mean()
    else:
        den = ALL_LAKE
        mean_den = ALL_LAKE
    print("Minumum density            : %.2f" % den)

    wkt = constants.tilename_to_extent(kmname, return_wkt=True)
    reporter.report(kmname, den, mean_den, cell_size, wkt_geom=wkt)

    return 0
Example #4
0
def setup_masks(fargs, nrows, ncols, georef):
    '''
    Set up masks for water and buildings

    Arguments:
        fargs:          Arguments from layer definitions.
        nrows:          Number of row in masks.
        ncols:          Number of columns in masks.
        georef:     Georeference for masks.

    Returns:
        water_mask, lake_mask, sea_mask and build_mask
    '''
    water_mask = np.zeros((nrows, ncols), dtype=np.bool)
    lake_raster = None
    sea_mask = None
    build_mask = None

    if fargs["LAKE_LAYER"] is not None:
        map_cstr, sql = fargs["LAKE_LAYER"]
        water_mask |= vector_io.burn_vector_layer(
            map_cstr,
            georef,
            (nrows, ncols),
            layersql=sql)

    if fargs["LAKE_Z_LAYER"] is not None:
        map_cstr, sql = fargs["LAKE_Z_LAYER"]
        lake_raster = vector_io.burn_vector_layer(
            map_cstr,
            georef,
            (nrows, ncols),
            layersql=sql,
            nd_val=ND_VAL,
            attr=fargs["LAKE_Z_ATTR"],
            dtype=np.float32)

    if fargs["RIVER_LAYER"] is not None:
        map_cstr, sql = fargs["RIVER_LAYER"]
        water_mask |= vector_io.burn_vector_layer(
            map_cstr,
            georef,
            (nrows, ncols),
            layersql=sql)

    if fargs["SEA_LAYER"] is not None:
        map_cstr, sql = fargs["SEA_LAYER"]
        sea_mask = vector_io.burn_vector_layer(
            map_cstr,
            georef,
            (nrows, ncols),
            layersql=sql)
        water_mask |= sea_mask

    if fargs["BUILD_LAYER"] is not None:
        map_cstr, sql = fargs["BUILD_LAYER"]
        build_mask = vector_io.burn_vector_layer(
            map_cstr,
            georef,
            (nrows, ncols),
            layersql=sql)

    return water_mask, lake_raster, sea_mask, build_mask