Esempio n. 1
0
 kmname = constants.get_tilename(pargs.las_file)
 print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
 lasname = pargs.las_file
 polyname = pargs.poly_data
 use_local = pargs.use_local
 if pargs.schema is not None:
     report.set_schema(pargs.schema)
 reporter = report.ReportBuildingAbsposCheck(use_local)
 ##################################
 pc = pointcloud.fromAny(lasname).cut_to_class(cut_to_classes)
 try:
     extent = np.asarray(constants.tilename_to_extent(kmname))
 except Exception, e:
     print("Could not get extent from tilename.")
     extent = None
 polys = vector_io.get_geometries(polyname, pargs.layername, pargs.layersql,
                                  extent)
 fn = 0
 sl = "-" * 65
 for poly in polys:
     n_corners_found = 0
     fn += 1
     print("%s\nChecking feature %d\n%s\n" % (sl, fn, sl))
     a_poly = array_geometry.ogrgeom2array(poly)
     pcp = pc.cut_to_polygon(a_poly)
     if pcp.get_size() < 500:
         print("Few points in polygon...")
         continue
     a_poly = a_poly[0]
     all_post = np.zeros_like(a_poly)  #array of vertices found
     all_pre = np.zeros_like(
         a_poly)  #array of vertices in polygon, correpsonding to found...
Esempio n. 2
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))

    shoes = vector_io.get_geometries(pargs.horse_ds, pargs.layername,
                                     pargs.layersql, extent)
    outname = os.path.join(pargs.outdir, "dhym_" + kmname + ".tif")
    if len(shoes) == 0:
        print("No shoes, man!")
        shutil.copy(pargs.dem_tile, outname)
        return 0
    # We always interpolate values from the large dataset (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    mesh_xy = pointcloud.mesh_as_points(dtm.shape, dtm.geo_ref)
    dem_ds = gdal.Open(pargs.dem_all)
    dem_band = dem_ds.GetRasterBand(1)
    ndval = dem_band.GetNoDataValue()
    georef = np.asarray(dem_ds.GetGeoTransform())

    #if True:
    #  import matplotlib
    #  matplotlib.use("Qt4Agg")
    #  import matplotlib.pyplot as plt

    for shoe in shoes:
        arr = array_geometry.ogrline2array(shoe, flatten=True)
        assert (arr.shape[0] == 4)

        # okie dokie - now load a small raster around the horseshoe
        # the shoes can have quite long 'sides' (extruders),
        # however the two 'ends' should be small enough to keep in
        # memory - so load two grids along the two 'ends'
        cm, scale, nsteps, H, Hinv = get_transformation_params(arr)
        small_grids = []
        for e in ((0, 3), (1, 2)):
            xy = arr[e, :]  # take the corresponding edge
            ll = xy.min(axis=0)
            ur = xy.max(axis=0)

            # map to pixel-space
            ll_pix = grid.user2array(georef, ll)
            ur_pix = grid.user2array(georef, ur)
            xwin, mywin = (ur_pix - ll_pix)  #negative ywin

            # Buffer grid slightly - can do with less I suppose...
            xoff = max(0, int(ll_pix[0]) - 2)
            yoff = max(0, int(ur_pix[1]) - 2)
            xwin = min(int(xwin + 1), dem_ds.RasterXSize - xoff - 4) + 4
            ywin = min(int(1 - mywin), dem_ds.RasterYSize - yoff - 4) + 4
            # If not completely contained in large raster - continue??
            assert (xoff >= 0 and yoff >= 0 and xwin >= 1 and ywin >= 1)  #hmmm
            piece = dem_band.ReadAsArray(xoff, yoff, xwin,
                                         ywin).astype(np.float64)

            # What to do with nodata-values??
            N = (piece == ndval)
            if N.any():
                print("WARNING: setting nodata values to 0!!!")
                piece[N] = 0

            piece_georef = georef.copy()
            piece_georef[0] += xoff * georef[1]
            piece_georef[3] += yoff * georef[5]
            small_grids.append(grid.Grid(piece, piece_georef, ndval))

        # Make sure that the grid is 'fine' enough - since the projective transformation
        # will distort distances across the lines we want to subdivide
        cs = 1 / float(nsteps)

        # check numerical diff
        moved = np.array(((0, cs), (1, cs), (1, 1 - cs), (0, 1 - cs)))
        tmoved = inverse_transform(moved, cm, scale, Hinv)
        delta = arr - tmoved
        ndelta = np.sqrt(np.sum(delta**2, axis=1))
        nrows = int(nsteps * ndelta.max()) + 1

        # construct the  vertical two lines, along the two 'ends', in projective space
        hspace, cs = np.linspace(1, 0, nrows, endpoint=True, retstep=True)
        cs = -cs
        l1 = np.zeros((nrows, 2), dtype=np.float64)
        l1[:, 1] = hspace
        l2 = np.ones((nrows, 2), dtype=np.float64)
        l2[:, 1] = hspace
        tl1 = inverse_transform(l1, cm, scale, Hinv)
        tl2 = inverse_transform(l2, cm, scale, Hinv)
        z1 = small_grids[0].interpolate(tl1)
        z2 = small_grids[1].interpolate(tl2)
        assert ((z1 != ndval).all())
        assert ((z2 != ndval).all())

        # now construct a psudo-grid in 'projective space'
        Z = np.column_stack((z1, z2))
        pseudo_georef = [-0.5, 1.0, 0, 1 + 0.5 * cs, 0, -cs]
        pseudo_grid = grid.Grid(Z, pseudo_georef, ndval)

        # Transform input points!
        # first cut to bounding box of shoe
        M = np.logical_and(mesh_xy >= arr.min(axis=0),
                           mesh_xy <= arr.max(axis=0)).all(axis=1)
        print("Number of points in bb: %d" % M.sum())

        xy_small = mesh_xy[M]
        txy = transform(xy_small, cm, scale, H)
        N = np.logical_and(txy >= 0, txy <= 1).all(axis=1)
        xy_in_grid = txy[N]
        print("Number of points in shoe: %d" % xy_in_grid.shape[0])
        new_z = pseudo_grid.interpolate(xy_in_grid)

        # Construct new mask as N is 'relative' to M
        MM = np.zeros((mesh_xy.shape[0]), dtype=np.bool)
        MM[M] = N
        MM = MM.reshape(dtm.shape)
        dtm.grid[MM] = new_z

        # OLD STUFF FOR TRIANGULATION APPROACH
        # N1 = np.arange(0,nsteps-1)
        # N2 = N1 + 1
        # N3 = N1+nsteps
        # N4 = N3+1
        # T1 = np.column_stack((N1,N3,N4))
        # T2 = np.column_stack((N4,N2,N1))
        # T  = np.vstack((T1,T2))
        #
        # plt.figure()
        # plt.triplot(xy[:,0], xy[:,1], T)
        # plt.plot(arr[:,0], arr[:,1],    color = "green")
        # plt.plot(l1[:,0], l1[:,1], ".", color = "blue", ms = 10)
        # plt.plot(l2[:,0], l2[:,1], ".", color = "red",  ms = 10)
        # plt.show()
    dtm.save(outname,
             dco=["TILED=YES", "COMPRESS=DEFLATE", "PREDICTOR=3", "ZLEVEL=9"])
Esempio n. 3
0
#a usage function will be import by wrapper to print usage for test - otherwise ArgumentParser will handle that...
def usage():
    parser.print_help()


def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception, e:
        print(str(e))
        return 1
    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))
    shoes = vector_io.get_geometries(pargs.horse_ds, pargs.layername,
                                     pargs.layersql, extent)
    outname = os.path.join(pargs.outdir, "dhym_lines_" + kmname + ".tif")
    if len(shoes) == 0:
        print("No shoes, man!")
        shutil.copy(pargs.dem_tile, outname)
        return 0
    #We allways interpolate values from the large ds (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    cs = dtm.geo_ref[1]
    mesh_xy = pointcloud.mesh_as_points(dtm.shape, dtm.geo_ref)
    dem_ds = gdal.Open(pargs.dem_all)
    dem_band = dem_ds.GetRasterBand(1)
    ndval = dem_band.GetNoDataValue()
    georef = np.asarray(dem_ds.GetGeoTransform())
    m_drv = ogr.GetDriverByName("Memory")
    line_ds = m_drv.CreateDataSource("dummy")
Esempio n. 4
0
def main(args):
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    polyname = pargs.build_polys
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" %
          (os.path.basename(args[0]), kmname, time.asctime()))
    use_local = pargs.use_local
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportRoofridgeCheck(use_local)
    cut_class = pargs.cut_class
    print("Using class(es): %s" % (cut_class))
    # default step values for search...
    steps1 = 32
    steps2 = 14
    search_factor = pargs.search_factor
    if search_factor != 1:
        # can turn search steps up or down
        steps1 = int(search_factor * steps1)
        steps2 = int(search_factor * steps2)
        print("Incresing search factor by: %.2f" % search_factor)
        print(
            "Running time will increase exponentionally with search factor...")
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class).cut_to_z_interval(
        Z_MIN, Z_MAX)
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None
    polys = vector_io.get_geometries(polyname, pargs.layername, pargs.layersql,
                                     extent)
    fn = 0
    sl = "+" * 60
    is_sloppy = pargs.sloppy
    use_all = pargs.use_all
    for poly in polys:
        print(sl)
        fn += 1
        print("Checking feature number %d" % fn)
        a_poly = array_geometry.ogrgeom2array(poly)
        # secret argument to use all buildings...
        if (len(a_poly) > 1 or
                a_poly[0].shape[0] != 5) and (not use_all) and (not is_sloppy):
            print("Only houses with 4 corners accepted... continuing...")
            continue
        pcp = pc.cut_to_polygon(a_poly)
        # hmmm, these consts should perhaps be made more visible...
        if (pcp.get_size() < 500 and (not is_sloppy)) or (pcp.get_size() < 10):
            print("Few points in polygon...")
            continue
        # Go to a more numerically stable coord system - from now on only consider outer ring...
        a_poly = a_poly[0]
        xy_t = a_poly.mean(axis=0)
        a_poly -= xy_t
        pcp.xy -= xy_t
        pcp.triangulate()
        geom = pcp.get_triangle_geometry()
        m = geom[:, 1].mean()
        sd = geom[:, 1].std()
        if (m > 1.5 or 0.5 * sd > m) and (not is_sloppy):
            print("Feature %d, bad geometry...." % fn)
            print(m, sd)
            continue
        planes = cluster(pcp, steps1, steps2)
        if len(planes) < 2:
            print("Feature %d, didn't find enough planes..." % fn)
        pair, equation = find_planar_pairs(planes)
        if pair is not None:
            p1 = planes[pair[0]]
            p2 = planes[pair[1]]
            z1 = p1[0] * pcp.xy[:, 0] + p1[1] * pcp.xy[:, 1] + p1[2]
            z2 = p2[0] * pcp.xy[:, 0] + p2[1] * pcp.xy[:, 1] + p2[2]
            print("%s" % ("*" * 60))
            print("Statistics for feature %d" % fn)
            if DEBUG:
                plot3d(pcp.xy, pcp.z, z1, z2)
            intersections, distances, rotations = get_intersections(
                a_poly, equation)
            if intersections.shape[0] == 2:
                line_x = intersections[:, 0]
                line_y = intersections[:, 1]
                z_vals = p1[0] * intersections[:, 0] + p1[
                    1] * intersections[:, 1] + p1[2]
                if abs(z_vals[0] - z_vals[1]) > 0.01:
                    print("Numeric instabilty for z-calculation...")
                z_val = float(np.mean(z_vals))
                print("Z for intersection is %.2f m" % z_val)
                if abs(equation[1]) > 1e-3:
                    a = -equation[0] / equation[1]
                    b = equation[2] / equation[1]
                    line_y = a * line_x + b
                elif abs(equation[0]) > 1e-3:
                    a = -equation[1] / equation[0]
                    b = equation[2] / equation[0]
                    line_x = a * line_y + b
                if DEBUG:
                    plot_intersections(a_poly, intersections, line_x, line_y)
                # transform back to real coords
                line_x += xy_t[0]
                line_y += xy_t[1]
                wkt = "LINESTRING(%.3f %.3f %.3f, %.3f %.3f %.3f)" % (
                    line_x[0], line_y[0], z_val, line_x[1], line_y[1], z_val)
                print("WKT: %s" % wkt)
                reporter.report(kmname,
                                rotations[0],
                                distances[0],
                                distances[1],
                                wkt_geom=wkt)
            else:
                print(
                    "Hmmm - something wrong, didn't get exactly two intersections..."
                )
Esempio n. 5
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" %(progname,kmname,time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))


    outname = os.path.join(pargs.outdir,  "dhym_" + kmname + ".tif")

    if os.path.exists(outname) and not pargs.overwrite:
        print("File already exists - skipping...")
        return 0


    # We always interpolate values from the large dataset (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    dem_ds   =  gdal.Open(pargs.dem_all)
    dem_band =  dem_ds.GetRasterBand(1)
    ndval    =  dem_band.GetNoDataValue()
    georef   =  np.asarray(dem_ds.GetGeoTransform())
    cell_res = min(georef[1], -georef[5] ) #the minimal cell size

    #get the geometries!
    # a list of (geometry_as_np_array, 'grid1', 'grid2') - the grids should be objects with an interpolate method. The first represents the line 0-3, while the other one represents 1-2.
    # and yes, keep all that in memory. Buy some more memory if you need it, man!
    stuff_to_handle=[]
    #get relevant geometries and process 'em
    if pargs.horsesql is not None:
        #fetch the horseshoes
        shoes  = vector_io.get_geometries(pargs.vector_ds, layersql =  pargs.horsesql, extent= extent )
        #for the horse shoes we want to read z from the dtm!
        for shoe in shoes:
            arr = array_geometry.ogrline2array(shoe,  flatten = True)
            assert(arr.shape[0]==4)
            g1=get_dtm_piece(arr[(0,3),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
            g2=get_dtm_piece(arr[(1,2),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
            stuff_to_handle.append((arr,g1,g2))
        del shoes
    for sql,own_z in ((pargs.linesql_own_z,True),(pargs.linesql_dtm_z,False)):
        #handle the two line types in one go...
        if sql is not None:
            #fetch the 3d lines
            lines= vector_io.get_geometries(pargs.vector_ds, layersql =  sql , extent= extent)
            print("%d features in "%len(lines)+sql)
            for line in lines:
                arr = array_geometry.ogrline2array(line,  flatten = not own_z)
                if own_z:
                    assert (arr.shape[1]==3) #should be a 3d geometry!!!
                    z=arr[:,2]
                #construct a horse shoe pr line segment!
                #should we assert that there are exactly two points?
                #We can handle the general case, but it gets tricky to determine what the interpolation should mean (z1 and z2 from endpoints of linestring and interpolating via relative length?)
                xy=arr[:,:2]
                n= xy.shape[0] - 1 # number of segments.
                #SO: for now assert that n==1, only one segment. Else modify what the grids should be for the 'inner' vertices...
                assert(n==1)
                #vectorice - even though there should be only one segment. Will handle the general case...
                N=array_geometry.linestring_displacements(xy)*(cell_res) #displacement vectors - probably too broad with all touched!!!!
                buf_left=xy+N
                buf_right=xy-N
                for i in range(n): # be prepared to handle the general case!!!
                    shoe=np.vstack((buf_left[i],buf_left[i+1],buf_right[i+1],buf_right[i])) # a horseshoe which is open in the 'first end'
                    if own_z:
                        g1=ConstantGrid(z[i])
                        g2=ConstantGrid(z[i+1])
                    else:
                        g1=get_dtm_piece(shoe[(0,3),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
                        g2=get_dtm_piece(shoe[(1,2),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
                    stuff_to_handle.append((shoe,g1,g2))
            del lines
    if len(stuff_to_handle)==0:
        print("No features to burn, copying dtm...")
        shutil.copy(pargs.dem_tile,outname)
        dem_band=None
        dem_ds=None
        return 0
    t1=time.time()
    if pargs.burn_as_lines:
        print("Burning as lines...")
        m_drv=ogr.GetDriverByName("Memory")
        line_ds = m_drv.CreateDataSource( "dummy")
        layer = line_ds.CreateLayer( "lines", osr.SpatialReference(dtm.srs), ogr.wkbLineString25D)
        create_3d_lines(stuff_to_handle,layer,cell_res*0.6,ndval) #will add 3d lines to layer (in place) - increase resolution to cell_res*0.8 for fewer lines
        print("Number of lines: %d" %layer.GetFeatureCount())
        #ok - layer created, Burn it!!
        layer.ResetReading()
        arr=vector_io.just_burn_layer(layer,dtm.geo_ref,dtm.shape,nd_val=ndval,dtype=np.float32,all_touched=True,burn3d=True)
        M=(arr!=ndval)
        assert M.any()
        if pargs.debug:
            drv=ogr.GetDriverByName("SQLITE")
            drv.CopyDataSource(line_ds,os.path.join(pargs.outdir,"dalines_"+kmname+".sqlite"))
        layer=None
        line_ds=None
        dtm.grid[M]=arr[M]
    else:
        mesh_xy  =  pointcloud.mesh_as_points(dtm.shape,  dtm.geo_ref)
        print("Burning using projective transformation...")
        burn_projective(stuff_to_handle,dtm,cell_res,ndval,mesh_xy)
    t2=time.time()
    print("Burning took: %.3fs" %(t2-t1))
    dtm.save(outname,dco = ["TILED=YES", "COMPRESS=DEFLATE", "PREDICTOR=3", "ZLEVEL=9"])
Esempio n. 6
0
def main(args):
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    polyname = pargs.build_polys
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" % (os.path.basename(args[0]), kmname, time.asctime()))
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportRoofridgeStripCheck(pargs.use_local)
    cut_class = pargs.cut_class
    # default step values for search...
    steps1 = 30
    steps2 = 13
    search_factor = pargs.search_factor
    if search_factor != 1:
        # can turn search steps up or down
        steps1 = int(search_factor * steps1)
        steps2 = int(search_factor * steps2)
        print("Incresing search factor by: %.2f" % search_factor)
        print("Running time will increase exponentionally with search factor...")
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class).cut_to_z_interval(Z_MIN, Z_MAX)
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None
    polys = vector_io.get_geometries(polyname, pargs.layername, pargs.layersql, extent)
    fn = 0
    sl = "+" * 60
    is_sloppy = pargs.sloppy
    use_all = pargs.use_all
    for poly in polys:
        print(sl)
        fn += 1
        print("Checking feature number %d" % fn)
        a_poly = array_geometry.ogrgeom2array(poly)
        # secret argument to use all buildings...
        if (len(a_poly) > 1 or a_poly[0].shape[0] != 5) and (not use_all) and (not is_sloppy):
            print("Only houses with 4 corners accepted... continuing...")
            continue
        pcp = pc.cut_to_polygon(a_poly)
        strips = pcp.get_pids()
        if len(strips) != 2:
            print("Not exactly two overlapping strips... continuing...")
            continue
        # Go to a more numerically stable coord system - from now on only consider outer ring...
        a_poly = a_poly[0]
        xy_t = a_poly.mean(axis=0)  # center of mass system
        a_poly -= xy_t
        lines = []  # for storing the two found lines...
        for sid in strips:
            print("-*-" * 15)
            print("Looking at strip %d" % sid)
            pcp_ = pcp.cut_to_strip(sid)
            # hmmm, these consts should perhaps be made more visible...
            if (pcp_.get_size() < 500 and (not is_sloppy)) or (pcp_.get_size() < 10):
                print("Few points in polygon... %d" % pcp_.get_size())
                continue
            pcp_.xy -= xy_t
            pcp_.triangulate()
            geom = pcp_.get_triangle_geometry()
            m = geom[:, 1].mean()
            sd = geom[:, 1].std()
            if (m > 1.5 or 0.5 * sd > m) and (not is_sloppy):
                print("Feature %d, strip %d, bad geometry...." % (fn, sid))
                break
            planes = cluster(pcp_, steps1, steps2)
            if len(planes) < 2:
                print("Feature %d, strip %d, didn't find enough planes..." % (fn, sid))
            pair, equation = find_planar_pairs(planes)
            if pair is not None:
                p1 = planes[pair[0]]
                print("%s" % ("*" * 60))
                print("Statistics for feature %d" % fn)

                # Now we need to find some points on the line near the house... (0,0) is
                # the center of mass
                norm_normal = equation[0]**2 + equation[1]**2
                if norm_normal < 1e-10:
                    print("Numeric instablity, small normal")
                    break
                # this should be on the line
                cm_line = np.asarray(equation[:2]) * (equation[2] / norm_normal)
                line_dir = np.asarray((-equation[1], equation[0])) / (sqrt(norm_normal))
                end1 = cm_line + line_dir * LINE_RAD
                end2 = cm_line - line_dir * LINE_RAD
                intersections = np.vstack((end1, end2))
                line_x = intersections[:, 0]
                line_y = intersections[:, 1]
                z_vals = p1[0] * intersections[:, 0] + p1[1] * intersections[:, 1] + p1[2]
                if abs(z_vals[0] - z_vals[1]) > 0.01:
                    print("Numeric instabilty for z-calculation...")
                z_val = float(np.mean(z_vals))
                print("Z for intersection is %.2f m" % z_val)
                # transform back to real coords
                line_x += xy_t[0]
                line_y += xy_t[1]
                wkt = "LINESTRING(%.3f %.3f %.3f, %.3f %.3f %.3f)" % (
                    line_x[0], line_y[0], z_val, line_x[1], line_y[1], z_val)
                print("WKT: %s" % wkt)
                lines.append([sid, wkt, z_val, cm_line, line_dir])

        if len(lines) == 2:
            # check for parallelity
            id1 = lines[0][0]
            id2 = lines[1][0]
            z1 = lines[0][2]
            z2 = lines[1][2]
            if abs(z1 - z2) > 0.5:
                print("Large difference in z-values for the two lines!")
            else:
                ids = "{0:d}_{1:d}".format(id1, id2)
                inner_prod = (lines[0][4] * lines[1][4]).sum()
                inner_prod = max(-1, inner_prod)
                inner_prod = min(1, inner_prod)

                if DEBUG:
                    print("Inner product: %.4f" % inner_prod)

                ang = abs(degrees(acos(inner_prod)))
                if ang > 175:
                    ang = abs(180 - ang)
                if ang < 15:
                    v = (lines[0][3] - lines[1][3])
                    d = np.sqrt((v**2).sum())
                    if d < 5:
                        for line in lines:
                            reporter.report(kmname, id1, id2, ids, d, ang,
                                            line[2], wkt_geom=line[1])
                    else:
                        print("Large distance between centers %s, %s, %.2f" %
                              (lines[0][3], lines[1][3], d))
                else:
                    print("Pair found - but not very well aligned - angle: %.2f" % ang)
        else:
            print("Pair not found...")
Esempio n. 7
0
	kmname=constants.get_tilename(pargs.las_file)
	print("Running %s on block: %s, %s" %(progname,kmname,time.asctime()))
	lasname=pargs.las_file
	polyname=pargs.poly_data
	use_local=pargs.use_local
	if pargs.schema is not None:
		report.set_schema(pargs.schema)
	reporter=report.ReportBuildingRelposCheck(use_local)
	##################################
	pc=pointcloud.fromAny(lasname).cut_to_z_interval(-10,200).cut_to_class(cut_to_classes)
	try:
		extent=np.asarray(constants.tilename_to_extent(kmname))
	except Exception,e:
		print("Could not get extent from tilename.")
		extent=None
	polys=vector_io.get_geometries(polyname)
	fn=0
	sl="-"*65
	pcs=dict()
	for id in pc.get_pids():
		print("%s\n" %("+"*70))
		print("Strip id: %d" %id)
		pc_=pc.cut_to_strip(id)
		if pc_.get_size()>500:
			pcs[id]=pc_
		else:
			print("Not enough points....")
	del pc
	done=[]
	for id1 in pcs:
		pc1=pcs[id1]
Esempio n. 8
0
 try:
     extent = np.asarray(constants.tilename_to_extent(kmname))
 except Exception, e:
     print("Could not get extent from tilename.")
     extent = None
 pc_ref = None  #base reference pointcloud
 pc_refs = []  #list of possibly 'cropped' pointclouds...
 if pargs.multipoints:
     ftype = "multipoints"
     explode = False
 elif pargs.lines:
     ftype = "lines"
     explode = True
 geoms = vector_io.get_geometries(pointname,
                                  pargs.layername,
                                  pargs.layersql,
                                  extent,
                                  explode=explode)
 for geom in geoms:
     xyz = array_geometry.ogrgeom2array(geom, flatten=False)
     if xyz.shape[0] > 0:
         pc_refs.append(pointcloud.Pointcloud(xyz[:, :2], xyz[:, 2]))
 print("Found %d non-empty geometries" % len(pc_refs))
 if len(pc_refs) == 0:
     print("No input geometries in intersection...")
 if pargs.ftype is not None:
     ftype = pargs.ftype
 cut_input_to = pargs.cut_to
 print("Cutting input pointcloud to class %d" % cut_input_to)
 pc = pointcloud.fromAny(lasname).cut_to_class(
     cut_input_to)  #what to cut to here...??
Esempio n. 9
0
                layername=None,
                layersql=None):
    is_roads = buffer_dist is not None  #'hacky' signal that its roads we're checking
    print("Starting zcheck_base run at %s" % time.asctime())
    tstart = time.clock()
    kmname = constants.get_tilename(lasname)
    pc = pointcloud.fromAny(lasname)
    t2 = time.clock()
    tread = t2 - tstart
    print("Reading data took %.3f ms" % (tread * 1e3))
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception, e:
        print("Could not get extent from tilename.")
        extent = None
    geometries = vector_io.get_geometries(vectorname, layername, layersql,
                                          extent)
    pcs = dict()
    for id in pc.get_pids():
        print("%s\n" % ("+" * 70))
        print("Strip id: %d" % id)
        pc_ = pc.cut_to_strip(id).cut_to_class(cut_class)
        if pc_.get_size() > 50:
            pcs[id] = pc_
            pcs[id].triangulate()
            pcs[id].calculate_validity_mask(angle_tolerance, xy_tolerance,
                                            z_tolerance)
        else:
            print("Not enough points....")

    del pc
    done = []
Esempio n. 10
0
def main(args):
    '''
    Run road delta check. Invoked from either command line or qc_wrap.py
    '''
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    linename = pargs.lines
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" % (os.path.basename(args[0]), kmname, time.asctime()))
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportDeltaRoads(pargs.use_local)
    cut_class = pargs.cut_class
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class)
    if pc.get_size() < 5:
        print("Too few points to bother..")
        return 1

    pc.triangulate()
    geom = pc.get_triangle_geometry()
    print("Using z-steepnes limit {0:.2f} m".format(pargs.zlim))
    mask = np.logical_and(geom[:, 1] < XY_MAX, geom[:, 2] > pargs.zlim)
    geom = geom[mask]  # save for reporting
    if not mask.any():
        print("No steep triangles found...")
        return 0

    # only the centers of the interesting triangles
    centers = pc.triangulation.get_triangle_centers()[mask]
    print("{0:d} steep triangles in tile.".format(centers.shape[0]))
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None

    lines = vector_io.get_geometries(linename, pargs.layername, pargs.layersql, extent)
    feature_count = 0
    for line in lines:
        xy = array_geometry.ogrline2array(line, flatten=True)
        if xy.shape[0] == 0:
            print("Seemingly an unsupported geometry...")
            continue

        # select the triangle centers which lie within line_buffer of the road segment
        mask = array_geometry.points_in_buffer(centers, xy, LINE_BUFFER)
        critical = centers[mask]

        print("*" * 50)
        print("{0:d} steep centers along line {1:d}".format(critical.shape[0], feature_count))
        feature_count += 1

        if critical.shape[0] > 0:
            z_box = geom[mask][:, 2]
            z1 = z_box.max()
            z2 = z_box.min()
            wkt = "MULTIPOINT("
            for point in critical:
                wkt += "{0:.2f} {1:.2f},".format(point[0], point[1])
            wkt = wkt[:-1] + ")"
            reporter.report(kmname, z1, z2, wkt_geom=wkt)
Esempio n. 11
0
    if pargs.type is not None:
        ptype = pargs.type
    else:
        ptype = "undefined"
    if below_poly:
        print("Only using points which lie below polygon mean z!")
    pc = pointcloud.fromAny(pargs.las_file)
    print("Classes in pointcloud: %s" % pc.get_classes())

    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None

    polygons = vector_io.get_geometries(pargs.ref_data, pargs.layername, pargs.layersql, extent)
    feature_count = 0
    use_local = pargs.use_local

    if pargs.schema is not None:
        report.set_schema(pargs.schema)

    reporter = report.ReportClassCheck(use_local)
    for polygon in polygons:
        if below_poly:
            if polygon.GetCoordinateDimension() < 3:
                print("Error: polygon not 3D - below_poly does not make sense!")
                continue
            a_polygon3d = array_geometry.ogrpoly2array(polygon, flatten=False)[0]
            #warping loop here....
            if pargs.toE: