Ejemplo n.º 1
0
def main(args):
    try:
        pargs=parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" % (progname,kmname,time.asctime()))

    out_file = os.path.join(pargs.out_dir, os.path.basename(pargs.las_file))

    if not os.path.exists(pargs.out_dir):
        print pargs.out_dir
        os.makedirs(os.path.abspath(pargs.out_dir))

    # create pipeline
    pipeline = 'temp_{tile}.json'.format(tile=kmname)
    create_pdal_pipeline(pargs, pipeline, out_file, )

    # apply pipeline
    call = '{pdal_bin} pipeline {json_pipeline}'.format(pdal_bin=PDAL, json_pipeline=pipeline)
    subprocess.call(call)

    # clean up...
    os.remove(pipeline)
    return 0
Ejemplo n.º 2
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))

    tif_image = kmname + '.tif'
    out_file = os.path.join(pargs.out_dir, os.path.basename(pargs.las_file))

    if not os.path.exists(pargs.out_dir):
        print pargs.out_dir
        os.makedirs(os.path.abspath(pargs.out_dir))

    # Get image from WMS
    get_georef_image_wms(kmname, pargs.wms_url, pargs.wms_layer, tif_image,
                         pargs.px_size)

    # create pipeline
    pipeline = 'temp_{tile}.json'.format(tile=kmname)
    create_pdal_pipeline(pipeline, pargs.las_file, out_file, tif_image)

    # apply colorization filter with pdal translate
    call = '{pdal_bin} pipeline {json_pipeline}'.format(pdal_bin=PDAL,
                                                        json_pipeline=pipeline)
    subprocess.call(call)

    # clean up...
    os.remove(tif_image)
    os.remove(pipeline)
    return 0
Ejemplo n.º 3
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" %(progname, kmname, time.asctime()))

    if not os.path.exists(pargs.out_dir):
        os.mkdir(pargs.out_dir)

    outpath = os.path.join(pargs.out_dir,kmname + '.laz')

    cmd = 'laszip -i ' + pargs.las_file + ' -o ' + outpath

    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    out, err = p.communicate()

    if err:
        raise Exception(err)
        return 1

    return 0
Ejemplo n.º 4
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" %(progname,kmname,time.asctime()))
Ejemplo n.º 5
0
def main(args):
    '''
    Main function, invoked from either command line or qc_wrap
    '''
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    kmname = constants.get_tilename(lasname)
    msg = "Running %s on block: %s, %s"
    print(msg % (os.path.basename(args[0]), kmname, time.asctime()))

    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportSpikes(pargs.use_local)

    if pargs.zlim < 0:
        print("zlim must be positive!")
        usage()

    if pargs.slope < 0 or pargs.slope >= 90:
        print("Specify a slope angle in the range 0->90 degrees.")
        usage()

    cut_class = pargs.cut_class
    print("Cutting to class (terrain) {0:d}".format(cut_class))
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class)
    if pc.get_size() < 10:
        print("Too few points in pointcloud.")
        return

    print("Sorting spatially...")
    pc.sort_spatially(FILTER_RAD)
    slope_arg = np.tan(np.radians(pargs.slope))**2
    msg = "Using steepnes parameters: angle: {0:.2f} degrees, delta-z: {1:.2f}"
    print(msg.format(pargs.slope, pargs.zlim))
    print("Filtering, radius: {0:.2f}".format(FILTER_RAD))
    dz = pc.spike_filter(FILTER_RAD, slope_arg, pargs.zlim)
    mask = (dz != 0)
    dz = dz[mask]
    pc = pc.cut(mask)
    print("Spikes: {0:d}".format(mask.sum()))
    for i in xrange(pc.size):
        x, y = pc.xy[i]
        z = pc.z[i]
        mdz = dz[i]
        c = pc.c[i]
        pid = pc.pid[i]
        print("spike: x: {0:.2f} y: {1:.2f} mean-dz: {2:.2f}".format(x, y, mdz))
        wkt_geom = "POINT({0:.2f} {1:.2f})".format(x, y)
        reporter.report(kmname, FILTER_RAD, mdz, x, y, z, c, pid, wkt_geom=wkt_geom)
Ejemplo n.º 6
0
def generate_task_list(pargs, las):
    '''
    Create list of tasks to process in main.

    Input:
    ------

    pargs:          Arguments from parser
    las:            Laspy object of input file. Assumed ead-only.
    points:         Writable copy of las.points. Will be modified when running tasks.

    Returns:
    --------

    List of BaseRepairMan tasks to be executed.
    '''

    fargs = {}  # dict for holding reference names
    tasks = []  # a list of the tasks that we wanna do...

    # basic setup stuff
    kmname = constants.get_tilename(pargs.las_file)
    extent = constants.tilename_to_extent(kmname)

    # should probably not be printed from here...
    print('Running %s on block: %s, %s' % (PROGNAME, kmname, time.asctime()))

    if pargs.json_tasks is not None:
        task_def = pargs.json_tasks
        if task_def.endswith('.json'):
            with open(task_def) as task_def_file:
                fargs = json.load(task_def_file)
        else:
            fargs = json.loads(task_def)
        # test the defined json tasks and see if it's one of the valid tasks
        for task in fargs:
            task_name = task[0]
            task_def = task[1]

            if not task_name in TASKS:
                raise ValueError('Name "' + task_name +
                                 '" not mapped to any task')

            task_class = TASKS[task_name]
            tasks.append(task_class(las, kmname, extent,
                                    task_def))  # append the task

    return tasks
Ejemplo n.º 7
0
def zcheck_base(lasname,
                vectorname,
                angle_tolerance,
                xy_tolerance,
                z_tolerance,
                cut_class,
                reporter,
                buffer_dist=None,
                layername=None,
                layersql=None):
    is_roads = buffer_dist is not None  #'hacky' signal that its roads we're checking
    print("Starting zcheck_base run at %s" % time.asctime())
    tstart = time.clock()
    kmname = constants.get_tilename(lasname)
    pc = pointcloud.fromAny(lasname)
    t2 = time.clock()
    tread = t2 - tstart
    print("Reading data took %.3f ms" % (tread * 1e3))
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception, e:
        print("Could not get extent from tilename.")
        extent = None
Ejemplo n.º 8
0
def main(args):
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    use_local = pargs.use_local
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    if pargs.height is not None:
        reporter = report.ReportClouds(use_local)
        CS = 4
        CELL_COUNT_LIM = 4
    else:
        reporter = report.ReportAutoBuilding(use_local)
        CS = 1
        CELL_COUNT_LIM = 2
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" %
          (os.path.basename(args[0]), kmname, time.asctime()))
    try:
        xul, yll, xlr, yul = constants.tilename_to_extent(kmname)
    except Exception, e:
        print("Exception: %s" % str(e))
        print("Bad 1km formatting of las file name: %s" % lasname)
        return 1
Ejemplo n.º 9
0
			
		
	
	


def main(args):
	try:
		pargs=parser.parse_args(args[1:])
	except Exception,e:
		print(str(e))
		return 1
	#standard dhmqc idioms....#
	lasname=pargs.las_file
	pointname=pargs.las_ref_file
	kmname=constants.get_tilename(lasname)
	print("Running %s on block: %s, %s" %(os.path.basename(args[0]),kmname,time.asctime()))
	try:
		xul,yll,xur,yul=constants.tilename_to_extent(kmname)
	except Exception,e:
		print("Exception: %s" %str(e))
		print("Bad 1km formatting of las file: %s" %lasname)
		return 1
	outdir=pargs.outdir
	if not os.path.exists(outdir):
		os.mkdir(outdir)
	cut_to=pargs.cut_to
	cs=pargs.cs
	ncols_f=TILE_SIZE/cs
	ncols=int(ncols_f)
	nrows=ncols  #tiles are square (for now)
Ejemplo n.º 10
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))

    shoes = vector_io.get_geometries(pargs.horse_ds, pargs.layername,
                                     pargs.layersql, extent)
    outname = os.path.join(pargs.outdir, "dhym_" + kmname + ".tif")
    if len(shoes) == 0:
        print("No shoes, man!")
        shutil.copy(pargs.dem_tile, outname)
        return 0
    # We always interpolate values from the large dataset (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    mesh_xy = pointcloud.mesh_as_points(dtm.shape, dtm.geo_ref)
    dem_ds = gdal.Open(pargs.dem_all)
    dem_band = dem_ds.GetRasterBand(1)
    ndval = dem_band.GetNoDataValue()
    georef = np.asarray(dem_ds.GetGeoTransform())

    #if True:
    #  import matplotlib
    #  matplotlib.use("Qt4Agg")
    #  import matplotlib.pyplot as plt

    for shoe in shoes:
        arr = array_geometry.ogrline2array(shoe, flatten=True)
        assert (arr.shape[0] == 4)

        # okie dokie - now load a small raster around the horseshoe
        # the shoes can have quite long 'sides' (extruders),
        # however the two 'ends' should be small enough to keep in
        # memory - so load two grids along the two 'ends'
        cm, scale, nsteps, H, Hinv = get_transformation_params(arr)
        small_grids = []
        for e in ((0, 3), (1, 2)):
            xy = arr[e, :]  # take the corresponding edge
            ll = xy.min(axis=0)
            ur = xy.max(axis=0)

            # map to pixel-space
            ll_pix = grid.user2array(georef, ll)
            ur_pix = grid.user2array(georef, ur)
            xwin, mywin = (ur_pix - ll_pix)  #negative ywin

            # Buffer grid slightly - can do with less I suppose...
            xoff = max(0, int(ll_pix[0]) - 2)
            yoff = max(0, int(ur_pix[1]) - 2)
            xwin = min(int(xwin + 1), dem_ds.RasterXSize - xoff - 4) + 4
            ywin = min(int(1 - mywin), dem_ds.RasterYSize - yoff - 4) + 4
            # If not completely contained in large raster - continue??
            assert (xoff >= 0 and yoff >= 0 and xwin >= 1 and ywin >= 1)  #hmmm
            piece = dem_band.ReadAsArray(xoff, yoff, xwin,
                                         ywin).astype(np.float64)

            # What to do with nodata-values??
            N = (piece == ndval)
            if N.any():
                print("WARNING: setting nodata values to 0!!!")
                piece[N] = 0

            piece_georef = georef.copy()
            piece_georef[0] += xoff * georef[1]
            piece_georef[3] += yoff * georef[5]
            small_grids.append(grid.Grid(piece, piece_georef, ndval))

        # Make sure that the grid is 'fine' enough - since the projective transformation
        # will distort distances across the lines we want to subdivide
        cs = 1 / float(nsteps)

        # check numerical diff
        moved = np.array(((0, cs), (1, cs), (1, 1 - cs), (0, 1 - cs)))
        tmoved = inverse_transform(moved, cm, scale, Hinv)
        delta = arr - tmoved
        ndelta = np.sqrt(np.sum(delta**2, axis=1))
        nrows = int(nsteps * ndelta.max()) + 1

        # construct the  vertical two lines, along the two 'ends', in projective space
        hspace, cs = np.linspace(1, 0, nrows, endpoint=True, retstep=True)
        cs = -cs
        l1 = np.zeros((nrows, 2), dtype=np.float64)
        l1[:, 1] = hspace
        l2 = np.ones((nrows, 2), dtype=np.float64)
        l2[:, 1] = hspace
        tl1 = inverse_transform(l1, cm, scale, Hinv)
        tl2 = inverse_transform(l2, cm, scale, Hinv)
        z1 = small_grids[0].interpolate(tl1)
        z2 = small_grids[1].interpolate(tl2)
        assert ((z1 != ndval).all())
        assert ((z2 != ndval).all())

        # now construct a psudo-grid in 'projective space'
        Z = np.column_stack((z1, z2))
        pseudo_georef = [-0.5, 1.0, 0, 1 + 0.5 * cs, 0, -cs]
        pseudo_grid = grid.Grid(Z, pseudo_georef, ndval)

        # Transform input points!
        # first cut to bounding box of shoe
        M = np.logical_and(mesh_xy >= arr.min(axis=0),
                           mesh_xy <= arr.max(axis=0)).all(axis=1)
        print("Number of points in bb: %d" % M.sum())

        xy_small = mesh_xy[M]
        txy = transform(xy_small, cm, scale, H)
        N = np.logical_and(txy >= 0, txy <= 1).all(axis=1)
        xy_in_grid = txy[N]
        print("Number of points in shoe: %d" % xy_in_grid.shape[0])
        new_z = pseudo_grid.interpolate(xy_in_grid)

        # Construct new mask as N is 'relative' to M
        MM = np.zeros((mesh_xy.shape[0]), dtype=np.bool)
        MM[M] = N
        MM = MM.reshape(dtm.shape)
        dtm.grid[MM] = new_z

        # OLD STUFF FOR TRIANGULATION APPROACH
        # N1 = np.arange(0,nsteps-1)
        # N2 = N1 + 1
        # N3 = N1+nsteps
        # N4 = N3+1
        # T1 = np.column_stack((N1,N3,N4))
        # T2 = np.column_stack((N4,N2,N1))
        # T  = np.vstack((T1,T2))
        #
        # plt.figure()
        # plt.triplot(xy[:,0], xy[:,1], T)
        # plt.plot(arr[:,0], arr[:,1],    color = "green")
        # plt.plot(l1[:,0], l1[:,1], ".", color = "blue", ms = 10)
        # plt.plot(l2[:,0], l2[:,1], ".", color = "red",  ms = 10)
        # plt.show()
    dtm.save(outname,
             dco=["TILED=YES", "COMPRESS=DEFLATE", "PREDICTOR=3", "ZLEVEL=9"])
Ejemplo n.º 11
0
def main(args):
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    polyname = pargs.build_polys
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" %
          (os.path.basename(args[0]), kmname, time.asctime()))
    use_local = pargs.use_local
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportRoofridgeCheck(use_local)
    cut_class = pargs.cut_class
    print("Using class(es): %s" % (cut_class))
    # default step values for search...
    steps1 = 32
    steps2 = 14
    search_factor = pargs.search_factor
    if search_factor != 1:
        # can turn search steps up or down
        steps1 = int(search_factor * steps1)
        steps2 = int(search_factor * steps2)
        print("Incresing search factor by: %.2f" % search_factor)
        print(
            "Running time will increase exponentionally with search factor...")
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class).cut_to_z_interval(
        Z_MIN, Z_MAX)
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None
    polys = vector_io.get_geometries(polyname, pargs.layername, pargs.layersql,
                                     extent)
    fn = 0
    sl = "+" * 60
    is_sloppy = pargs.sloppy
    use_all = pargs.use_all
    for poly in polys:
        print(sl)
        fn += 1
        print("Checking feature number %d" % fn)
        a_poly = array_geometry.ogrgeom2array(poly)
        # secret argument to use all buildings...
        if (len(a_poly) > 1 or
                a_poly[0].shape[0] != 5) and (not use_all) and (not is_sloppy):
            print("Only houses with 4 corners accepted... continuing...")
            continue
        pcp = pc.cut_to_polygon(a_poly)
        # hmmm, these consts should perhaps be made more visible...
        if (pcp.get_size() < 500 and (not is_sloppy)) or (pcp.get_size() < 10):
            print("Few points in polygon...")
            continue
        # Go to a more numerically stable coord system - from now on only consider outer ring...
        a_poly = a_poly[0]
        xy_t = a_poly.mean(axis=0)
        a_poly -= xy_t
        pcp.xy -= xy_t
        pcp.triangulate()
        geom = pcp.get_triangle_geometry()
        m = geom[:, 1].mean()
        sd = geom[:, 1].std()
        if (m > 1.5 or 0.5 * sd > m) and (not is_sloppy):
            print("Feature %d, bad geometry...." % fn)
            print(m, sd)
            continue
        planes = cluster(pcp, steps1, steps2)
        if len(planes) < 2:
            print("Feature %d, didn't find enough planes..." % fn)
        pair, equation = find_planar_pairs(planes)
        if pair is not None:
            p1 = planes[pair[0]]
            p2 = planes[pair[1]]
            z1 = p1[0] * pcp.xy[:, 0] + p1[1] * pcp.xy[:, 1] + p1[2]
            z2 = p2[0] * pcp.xy[:, 0] + p2[1] * pcp.xy[:, 1] + p2[2]
            print("%s" % ("*" * 60))
            print("Statistics for feature %d" % fn)
            if DEBUG:
                plot3d(pcp.xy, pcp.z, z1, z2)
            intersections, distances, rotations = get_intersections(
                a_poly, equation)
            if intersections.shape[0] == 2:
                line_x = intersections[:, 0]
                line_y = intersections[:, 1]
                z_vals = p1[0] * intersections[:, 0] + p1[
                    1] * intersections[:, 1] + p1[2]
                if abs(z_vals[0] - z_vals[1]) > 0.01:
                    print("Numeric instabilty for z-calculation...")
                z_val = float(np.mean(z_vals))
                print("Z for intersection is %.2f m" % z_val)
                if abs(equation[1]) > 1e-3:
                    a = -equation[0] / equation[1]
                    b = equation[2] / equation[1]
                    line_y = a * line_x + b
                elif abs(equation[0]) > 1e-3:
                    a = -equation[1] / equation[0]
                    b = equation[2] / equation[0]
                    line_x = a * line_y + b
                if DEBUG:
                    plot_intersections(a_poly, intersections, line_x, line_y)
                # transform back to real coords
                line_x += xy_t[0]
                line_y += xy_t[1]
                wkt = "LINESTRING(%.3f %.3f %.3f, %.3f %.3f %.3f)" % (
                    line_x[0], line_y[0], z_val, line_x[1], line_y[1], z_val)
                print("WKT: %s" % wkt)
                reporter.report(kmname,
                                rotations[0],
                                distances[0],
                                distances[1],
                                wkt_geom=wkt)
            else:
                print(
                    "Hmmm - something wrong, didn't get exactly two intersections..."
                )
Ejemplo n.º 12
0
parser.add_argument("outdir", help="Output directory for resulting DEM files")
parser.add_argument("-debug", help="TODO")


#a usage function will be import by wrapper to print usage for test - otherwise ArgumentParser will handle that...
def usage():
    parser.print_help()


def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception, e:
        print(str(e))
        return 1
    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))
    shoes = vector_io.get_geometries(pargs.horse_ds, pargs.layername,
                                     pargs.layersql, extent)
    outname = os.path.join(pargs.outdir, "dhym_lines_" + kmname + ".tif")
    if len(shoes) == 0:
        print("No shoes, man!")
        shutil.copy(pargs.dem_tile, outname)
        return 0
    #We allways interpolate values from the large ds (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    cs = dtm.geo_ref[1]
    mesh_xy = pointcloud.mesh_as_points(dtm.shape, dtm.geo_ref)
    dem_ds = gdal.Open(pargs.dem_all)
    dem_band = dem_ds.GetRasterBand(1)
Ejemplo n.º 13
0
def main(args):
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    polyname = pargs.build_polys
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" % (os.path.basename(args[0]), kmname, time.asctime()))
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportRoofridgeStripCheck(pargs.use_local)
    cut_class = pargs.cut_class
    # default step values for search...
    steps1 = 30
    steps2 = 13
    search_factor = pargs.search_factor
    if search_factor != 1:
        # can turn search steps up or down
        steps1 = int(search_factor * steps1)
        steps2 = int(search_factor * steps2)
        print("Incresing search factor by: %.2f" % search_factor)
        print("Running time will increase exponentionally with search factor...")
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class).cut_to_z_interval(Z_MIN, Z_MAX)
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None
    polys = vector_io.get_geometries(polyname, pargs.layername, pargs.layersql, extent)
    fn = 0
    sl = "+" * 60
    is_sloppy = pargs.sloppy
    use_all = pargs.use_all
    for poly in polys:
        print(sl)
        fn += 1
        print("Checking feature number %d" % fn)
        a_poly = array_geometry.ogrgeom2array(poly)
        # secret argument to use all buildings...
        if (len(a_poly) > 1 or a_poly[0].shape[0] != 5) and (not use_all) and (not is_sloppy):
            print("Only houses with 4 corners accepted... continuing...")
            continue
        pcp = pc.cut_to_polygon(a_poly)
        strips = pcp.get_pids()
        if len(strips) != 2:
            print("Not exactly two overlapping strips... continuing...")
            continue
        # Go to a more numerically stable coord system - from now on only consider outer ring...
        a_poly = a_poly[0]
        xy_t = a_poly.mean(axis=0)  # center of mass system
        a_poly -= xy_t
        lines = []  # for storing the two found lines...
        for sid in strips:
            print("-*-" * 15)
            print("Looking at strip %d" % sid)
            pcp_ = pcp.cut_to_strip(sid)
            # hmmm, these consts should perhaps be made more visible...
            if (pcp_.get_size() < 500 and (not is_sloppy)) or (pcp_.get_size() < 10):
                print("Few points in polygon... %d" % pcp_.get_size())
                continue
            pcp_.xy -= xy_t
            pcp_.triangulate()
            geom = pcp_.get_triangle_geometry()
            m = geom[:, 1].mean()
            sd = geom[:, 1].std()
            if (m > 1.5 or 0.5 * sd > m) and (not is_sloppy):
                print("Feature %d, strip %d, bad geometry...." % (fn, sid))
                break
            planes = cluster(pcp_, steps1, steps2)
            if len(planes) < 2:
                print("Feature %d, strip %d, didn't find enough planes..." % (fn, sid))
            pair, equation = find_planar_pairs(planes)
            if pair is not None:
                p1 = planes[pair[0]]
                print("%s" % ("*" * 60))
                print("Statistics for feature %d" % fn)

                # Now we need to find some points on the line near the house... (0,0) is
                # the center of mass
                norm_normal = equation[0]**2 + equation[1]**2
                if norm_normal < 1e-10:
                    print("Numeric instablity, small normal")
                    break
                # this should be on the line
                cm_line = np.asarray(equation[:2]) * (equation[2] / norm_normal)
                line_dir = np.asarray((-equation[1], equation[0])) / (sqrt(norm_normal))
                end1 = cm_line + line_dir * LINE_RAD
                end2 = cm_line - line_dir * LINE_RAD
                intersections = np.vstack((end1, end2))
                line_x = intersections[:, 0]
                line_y = intersections[:, 1]
                z_vals = p1[0] * intersections[:, 0] + p1[1] * intersections[:, 1] + p1[2]
                if abs(z_vals[0] - z_vals[1]) > 0.01:
                    print("Numeric instabilty for z-calculation...")
                z_val = float(np.mean(z_vals))
                print("Z for intersection is %.2f m" % z_val)
                # transform back to real coords
                line_x += xy_t[0]
                line_y += xy_t[1]
                wkt = "LINESTRING(%.3f %.3f %.3f, %.3f %.3f %.3f)" % (
                    line_x[0], line_y[0], z_val, line_x[1], line_y[1], z_val)
                print("WKT: %s" % wkt)
                lines.append([sid, wkt, z_val, cm_line, line_dir])

        if len(lines) == 2:
            # check for parallelity
            id1 = lines[0][0]
            id2 = lines[1][0]
            z1 = lines[0][2]
            z2 = lines[1][2]
            if abs(z1 - z2) > 0.5:
                print("Large difference in z-values for the two lines!")
            else:
                ids = "{0:d}_{1:d}".format(id1, id2)
                inner_prod = (lines[0][4] * lines[1][4]).sum()
                inner_prod = max(-1, inner_prod)
                inner_prod = min(1, inner_prod)

                if DEBUG:
                    print("Inner product: %.4f" % inner_prod)

                ang = abs(degrees(acos(inner_prod)))
                if ang > 175:
                    ang = abs(180 - ang)
                if ang < 15:
                    v = (lines[0][3] - lines[1][3])
                    d = np.sqrt((v**2).sum())
                    if d < 5:
                        for line in lines:
                            reporter.report(kmname, id1, id2, ids, d, ang,
                                            line[2], wkt_geom=line[1])
                    else:
                        print("Large distance between centers %s, %s, %.2f" %
                              (lines[0][3], lines[1][3], d))
                else:
                    print("Pair found - but not very well aligned - angle: %.2f" % ang)
        else:
            print("Pair not found...")
Ejemplo n.º 14
0
    parser.print_help()


def main(args):
    '''
    Main script functionality. Can be invoked from either the command line
    or via qc_wrap.py
    '''

    try:
        pargs = parser.parse_args(args[1:])
    except Exception, error_msg:
        print(str(error_msg))
        return 1

    kmname = get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" % (PROGNAME, kmname, time.asctime()))
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportClassCount(pargs.use_local)
    pc = pointcloud.fromAny(pargs.las_file)
    n_points_total = pc.get_size()
    if n_points_total == 0:
        print(
            "Something is terribly terribly wrong here! Simon - vi skal melde en fjel"
        )

    pc_temp = pc.cut_to_class(constants.created_unused)
    n_created_unused = pc_temp.get_size()

    pc_temp = pc.cut_to_class(constants.surface)
Ejemplo n.º 15
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" %(progname,kmname,time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))


    outname = os.path.join(pargs.outdir,  "dhym_" + kmname + ".tif")

    if os.path.exists(outname) and not pargs.overwrite:
        print("File already exists - skipping...")
        return 0


    # We always interpolate values from the large dataset (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    dem_ds   =  gdal.Open(pargs.dem_all)
    dem_band =  dem_ds.GetRasterBand(1)
    ndval    =  dem_band.GetNoDataValue()
    georef   =  np.asarray(dem_ds.GetGeoTransform())
    cell_res = min(georef[1], -georef[5] ) #the minimal cell size

    #get the geometries!
    # a list of (geometry_as_np_array, 'grid1', 'grid2') - the grids should be objects with an interpolate method. The first represents the line 0-3, while the other one represents 1-2.
    # and yes, keep all that in memory. Buy some more memory if you need it, man!
    stuff_to_handle=[]
    #get relevant geometries and process 'em
    if pargs.horsesql is not None:
        #fetch the horseshoes
        shoes  = vector_io.get_geometries(pargs.vector_ds, layersql =  pargs.horsesql, extent= extent )
        #for the horse shoes we want to read z from the dtm!
        for shoe in shoes:
            arr = array_geometry.ogrline2array(shoe,  flatten = True)
            assert(arr.shape[0]==4)
            g1=get_dtm_piece(arr[(0,3),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
            g2=get_dtm_piece(arr[(1,2),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
            stuff_to_handle.append((arr,g1,g2))
        del shoes
    for sql,own_z in ((pargs.linesql_own_z,True),(pargs.linesql_dtm_z,False)):
        #handle the two line types in one go...
        if sql is not None:
            #fetch the 3d lines
            lines= vector_io.get_geometries(pargs.vector_ds, layersql =  sql , extent= extent)
            print("%d features in "%len(lines)+sql)
            for line in lines:
                arr = array_geometry.ogrline2array(line,  flatten = not own_z)
                if own_z:
                    assert (arr.shape[1]==3) #should be a 3d geometry!!!
                    z=arr[:,2]
                #construct a horse shoe pr line segment!
                #should we assert that there are exactly two points?
                #We can handle the general case, but it gets tricky to determine what the interpolation should mean (z1 and z2 from endpoints of linestring and interpolating via relative length?)
                xy=arr[:,:2]
                n= xy.shape[0] - 1 # number of segments.
                #SO: for now assert that n==1, only one segment. Else modify what the grids should be for the 'inner' vertices...
                assert(n==1)
                #vectorice - even though there should be only one segment. Will handle the general case...
                N=array_geometry.linestring_displacements(xy)*(cell_res) #displacement vectors - probably too broad with all touched!!!!
                buf_left=xy+N
                buf_right=xy-N
                for i in range(n): # be prepared to handle the general case!!!
                    shoe=np.vstack((buf_left[i],buf_left[i+1],buf_right[i+1],buf_right[i])) # a horseshoe which is open in the 'first end'
                    if own_z:
                        g1=ConstantGrid(z[i])
                        g2=ConstantGrid(z[i+1])
                    else:
                        g1=get_dtm_piece(shoe[(0,3),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
                        g2=get_dtm_piece(shoe[(1,2),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
                    stuff_to_handle.append((shoe,g1,g2))
            del lines
    if len(stuff_to_handle)==0:
        print("No features to burn, copying dtm...")
        shutil.copy(pargs.dem_tile,outname)
        dem_band=None
        dem_ds=None
        return 0
    t1=time.time()
    if pargs.burn_as_lines:
        print("Burning as lines...")
        m_drv=ogr.GetDriverByName("Memory")
        line_ds = m_drv.CreateDataSource( "dummy")
        layer = line_ds.CreateLayer( "lines", osr.SpatialReference(dtm.srs), ogr.wkbLineString25D)
        create_3d_lines(stuff_to_handle,layer,cell_res*0.6,ndval) #will add 3d lines to layer (in place) - increase resolution to cell_res*0.8 for fewer lines
        print("Number of lines: %d" %layer.GetFeatureCount())
        #ok - layer created, Burn it!!
        layer.ResetReading()
        arr=vector_io.just_burn_layer(layer,dtm.geo_ref,dtm.shape,nd_val=ndval,dtype=np.float32,all_touched=True,burn3d=True)
        M=(arr!=ndval)
        assert M.any()
        if pargs.debug:
            drv=ogr.GetDriverByName("SQLITE")
            drv.CopyDataSource(line_ds,os.path.join(pargs.outdir,"dalines_"+kmname+".sqlite"))
        layer=None
        line_ds=None
        dtm.grid[M]=arr[M]
    else:
        mesh_xy  =  pointcloud.mesh_as_points(dtm.shape,  dtm.geo_ref)
        print("Burning using projective transformation...")
        burn_projective(stuff_to_handle,dtm,cell_res,ndval,mesh_xy)
    t2=time.time()
    print("Burning took: %.3fs" %(t2-t1))
    dtm.save(outname,dco = ["TILED=YES", "COMPRESS=DEFLATE", "PREDICTOR=3", "ZLEVEL=9"])
Ejemplo n.º 16
0
        piece = band.ReadAsArray(int(slices1[1].start), int(slices1[0].start),
                                 int(slices1[1].stop - slices1[1].start),
                                 int(slices1[0].stop - slices1[0].start))
        print(str(piece.shape))
        g0.grid[slices0[0], slices0[1]] = piece
        ds = None
    return g0, vert_expansions, hor_expansions


def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception, e:
        print(str(e))
        return 1
    kmname = constants.get_tilename(pargs.tile_name)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    if pargs.tiledb is not None:
        G, v_expansions, h_expansions = get_extended_tile(pargs.tiledb, kmname)
    else:
        v_expansions = h_expansions = {-1: False, 1: False}
        G = grid.fromGDAL(pargs.tile_name)
    if pargs.ZT:
        method = 1
    else:
        method = 0
    H = G.get_hillshade(azimuth=pargs.azimuth,
                        height=pargs.height,
                        z_factor=pargs.zfactor,
                        method=method)
    for pos in (-1, 1):
Ejemplo n.º 17
0
parser.add_argument("las_file",help="input 1km las tile.")
parser.add_argument("ref_data",help="Reference data (path, connection string etc).")

def usage():
    parser.print_help()




def main(args):
    try:
        pargs=parser.parse_args(args[1:])
    except Exception,e:
        print(str(e))
        return 1
    kmname=constants.get_tilename(pargs.las_file)
    print("Running %s on block: %s, %s" %(progname,kmname,time.asctime()))
    lasname=pargs.las_file
    linename=pargs.ref_data
    use_local=pargs.use_local
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter=report.ReportLineOutliers(use_local)
    try:
        extent=np.asarray(constants.tilename_to_extent(kmname))
    except Exception,e:
        print("Could not get extent from tilename.")
        raise e
    lines=vector_io.get_features(linename,pargs.layername,pargs.layersql,extent)
    print("Found %d features in %s" %(len(lines),linename))
    if len(lines)==0:
Ejemplo n.º 18
0
def main(args):
    '''
    Run road delta check. Invoked from either command line or qc_wrap.py
    '''
    pargs = parser.parse_args(args[1:])
    lasname = pargs.las_file
    linename = pargs.lines
    kmname = constants.get_tilename(lasname)
    print("Running %s on block: %s, %s" % (os.path.basename(args[0]), kmname, time.asctime()))
    if pargs.schema is not None:
        report.set_schema(pargs.schema)
    reporter = report.ReportDeltaRoads(pargs.use_local)
    cut_class = pargs.cut_class
    pc = pointcloud.fromAny(lasname).cut_to_class(cut_class)
    if pc.get_size() < 5:
        print("Too few points to bother..")
        return 1

    pc.triangulate()
    geom = pc.get_triangle_geometry()
    print("Using z-steepnes limit {0:.2f} m".format(pargs.zlim))
    mask = np.logical_and(geom[:, 1] < XY_MAX, geom[:, 2] > pargs.zlim)
    geom = geom[mask]  # save for reporting
    if not mask.any():
        print("No steep triangles found...")
        return 0

    # only the centers of the interesting triangles
    centers = pc.triangulation.get_triangle_centers()[mask]
    print("{0:d} steep triangles in tile.".format(centers.shape[0]))
    try:
        extent = np.asarray(constants.tilename_to_extent(kmname))
    except Exception:
        print("Could not get extent from tilename.")
        extent = None

    lines = vector_io.get_geometries(linename, pargs.layername, pargs.layersql, extent)
    feature_count = 0
    for line in lines:
        xy = array_geometry.ogrline2array(line, flatten=True)
        if xy.shape[0] == 0:
            print("Seemingly an unsupported geometry...")
            continue

        # select the triangle centers which lie within line_buffer of the road segment
        mask = array_geometry.points_in_buffer(centers, xy, LINE_BUFFER)
        critical = centers[mask]

        print("*" * 50)
        print("{0:d} steep centers along line {1:d}".format(critical.shape[0], feature_count))
        feature_count += 1

        if critical.shape[0] > 0:
            z_box = geom[mask][:, 2]
            z1 = z_box.max()
            z2 = z_box.min()
            wkt = "MULTIPOINT("
            for point in critical:
                wkt += "{0:.2f} {1:.2f},".format(point[0], point[1])
            wkt = wkt[:-1] + ")"
            reporter.report(kmname, z1, z2, wkt_geom=wkt)