Esempio n. 1
0
def get_extended_tile(tile_db, tilename):
    drv = gdal.GetDriverByName("Gtiff")
    con = db.connect(tile_db)
    cur = con.cursor()
    cur.execute("select path,row,col from coverage where tile_name=?",
                (tilename, ))
    data = cur.fetchone()
    path, row, col = data
    print("Reading " + path)
    g0 = grid.fromGDAL(path)
    cur.execute(
        "select path,row,col from coverage where abs(row-?)<2 and abs(col-?)<2",
        (row, col))
    data = cur.fetchall()
    vert_expansions = {-1: False, 1: False}  #top,bottom
    hor_expansions = {-1: False, 1: False}  #left,right
    for path, r, c in data:
        if r == row and c == col:
            continue
        dr = r - row
        dc = c - col
        if dr != 0 and not vert_expansions[dr]:
            #print "vexp",dr
            vert_expansions[dr] = True
            g0.expand_vert(dr, pixel_buf)
            #print g0.geo_ref[3]
        if dc != 0 and not hor_expansions[dc]:
            #print "hexp",dc
            hor_expansions[dc] = True
            g0.expand_hor(dc, pixel_buf)
            #print g0.geo_ref[0]
        print("Reading " + path + " at %d,%d" % (dr, dc))
        #print g0.shape
        ds = gdal.Open(path)
        band = ds.GetRasterBand(1)
        geo_ref = ds.GetGeoTransform()
        assert (geo_ref[1] == g0.geo_ref[1] and geo_ref[5] == g0.geo_ref[5])
        slices0, slices1 = grid.intersect_grid_extents(
            g0.geo_ref, g0.shape, geo_ref, (ds.RasterYSize, ds.RasterXSize))
        assert (slices0 is not None)
        piece = band.ReadAsArray(int(slices1[1].start), int(slices1[0].start),
                                 int(slices1[1].stop - slices1[1].start),
                                 int(slices1[0].stop - slices1[0].start))
        print(str(piece.shape))
        g0.grid[slices0[0], slices0[1]] = piece
        ds = None
    return g0, vert_expansions, hor_expansions
Esempio n. 2
0
     print("Could not get extent from tilename.")
     raise e
 lines=vector_io.get_features(linename,pargs.layername,pargs.layersql,extent)
 print("Found %d features in %s" %(len(lines),linename))
 if len(lines)==0:
     return 2
 cut_input_to=pargs.cut_to
 print("Reading "+lasname+"....")
 pc=pointcloud.fromAny(lasname).cut_to_class(cut_input_to) #what to cut to here...??
 if pargs.debug:
     print("Cutting input pointcloud to class %d" %cut_input_to)
 if pc.get_size()<5:
     print("Few points in pointcloud!!")
     return 3
 if pargs.toH:
     geoid=grid.fromGDAL(GEOID_GRID,upcast=True)
     print("Using geoid from %s to warp to orthometric heights." %GEOID_GRID)
     pc.toH(geoid)
 print("Sorting...")
 pc.sort_spatially(pargs.srad)
 print("Starting loop..")
 n_found=0
 for line in lines:
     if pargs.id_attr is not None:
         line_id=line.GetFieldAsString(pargs.id_attr)
     else:
         line_id=""
     #explode
     geom=line.GetGeometryRef()
     ng=geom.GetGeometryCount()
     geoms_here=[geom]
Esempio n. 3
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))

    shoes = vector_io.get_geometries(pargs.horse_ds, pargs.layername,
                                     pargs.layersql, extent)
    outname = os.path.join(pargs.outdir, "dhym_" + kmname + ".tif")
    if len(shoes) == 0:
        print("No shoes, man!")
        shutil.copy(pargs.dem_tile, outname)
        return 0
    # We always interpolate values from the large dataset (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    mesh_xy = pointcloud.mesh_as_points(dtm.shape, dtm.geo_ref)
    dem_ds = gdal.Open(pargs.dem_all)
    dem_band = dem_ds.GetRasterBand(1)
    ndval = dem_band.GetNoDataValue()
    georef = np.asarray(dem_ds.GetGeoTransform())

    #if True:
    #  import matplotlib
    #  matplotlib.use("Qt4Agg")
    #  import matplotlib.pyplot as plt

    for shoe in shoes:
        arr = array_geometry.ogrline2array(shoe, flatten=True)
        assert (arr.shape[0] == 4)

        # okie dokie - now load a small raster around the horseshoe
        # the shoes can have quite long 'sides' (extruders),
        # however the two 'ends' should be small enough to keep in
        # memory - so load two grids along the two 'ends'
        cm, scale, nsteps, H, Hinv = get_transformation_params(arr)
        small_grids = []
        for e in ((0, 3), (1, 2)):
            xy = arr[e, :]  # take the corresponding edge
            ll = xy.min(axis=0)
            ur = xy.max(axis=0)

            # map to pixel-space
            ll_pix = grid.user2array(georef, ll)
            ur_pix = grid.user2array(georef, ur)
            xwin, mywin = (ur_pix - ll_pix)  #negative ywin

            # Buffer grid slightly - can do with less I suppose...
            xoff = max(0, int(ll_pix[0]) - 2)
            yoff = max(0, int(ur_pix[1]) - 2)
            xwin = min(int(xwin + 1), dem_ds.RasterXSize - xoff - 4) + 4
            ywin = min(int(1 - mywin), dem_ds.RasterYSize - yoff - 4) + 4
            # If not completely contained in large raster - continue??
            assert (xoff >= 0 and yoff >= 0 and xwin >= 1 and ywin >= 1)  #hmmm
            piece = dem_band.ReadAsArray(xoff, yoff, xwin,
                                         ywin).astype(np.float64)

            # What to do with nodata-values??
            N = (piece == ndval)
            if N.any():
                print("WARNING: setting nodata values to 0!!!")
                piece[N] = 0

            piece_georef = georef.copy()
            piece_georef[0] += xoff * georef[1]
            piece_georef[3] += yoff * georef[5]
            small_grids.append(grid.Grid(piece, piece_georef, ndval))

        # Make sure that the grid is 'fine' enough - since the projective transformation
        # will distort distances across the lines we want to subdivide
        cs = 1 / float(nsteps)

        # check numerical diff
        moved = np.array(((0, cs), (1, cs), (1, 1 - cs), (0, 1 - cs)))
        tmoved = inverse_transform(moved, cm, scale, Hinv)
        delta = arr - tmoved
        ndelta = np.sqrt(np.sum(delta**2, axis=1))
        nrows = int(nsteps * ndelta.max()) + 1

        # construct the  vertical two lines, along the two 'ends', in projective space
        hspace, cs = np.linspace(1, 0, nrows, endpoint=True, retstep=True)
        cs = -cs
        l1 = np.zeros((nrows, 2), dtype=np.float64)
        l1[:, 1] = hspace
        l2 = np.ones((nrows, 2), dtype=np.float64)
        l2[:, 1] = hspace
        tl1 = inverse_transform(l1, cm, scale, Hinv)
        tl2 = inverse_transform(l2, cm, scale, Hinv)
        z1 = small_grids[0].interpolate(tl1)
        z2 = small_grids[1].interpolate(tl2)
        assert ((z1 != ndval).all())
        assert ((z2 != ndval).all())

        # now construct a psudo-grid in 'projective space'
        Z = np.column_stack((z1, z2))
        pseudo_georef = [-0.5, 1.0, 0, 1 + 0.5 * cs, 0, -cs]
        pseudo_grid = grid.Grid(Z, pseudo_georef, ndval)

        # Transform input points!
        # first cut to bounding box of shoe
        M = np.logical_and(mesh_xy >= arr.min(axis=0),
                           mesh_xy <= arr.max(axis=0)).all(axis=1)
        print("Number of points in bb: %d" % M.sum())

        xy_small = mesh_xy[M]
        txy = transform(xy_small, cm, scale, H)
        N = np.logical_and(txy >= 0, txy <= 1).all(axis=1)
        xy_in_grid = txy[N]
        print("Number of points in shoe: %d" % xy_in_grid.shape[0])
        new_z = pseudo_grid.interpolate(xy_in_grid)

        # Construct new mask as N is 'relative' to M
        MM = np.zeros((mesh_xy.shape[0]), dtype=np.bool)
        MM[M] = N
        MM = MM.reshape(dtm.shape)
        dtm.grid[MM] = new_z

        # OLD STUFF FOR TRIANGULATION APPROACH
        # N1 = np.arange(0,nsteps-1)
        # N2 = N1 + 1
        # N3 = N1+nsteps
        # N4 = N3+1
        # T1 = np.column_stack((N1,N3,N4))
        # T2 = np.column_stack((N4,N2,N1))
        # T  = np.vstack((T1,T2))
        #
        # plt.figure()
        # plt.triplot(xy[:,0], xy[:,1], T)
        # plt.plot(arr[:,0], arr[:,1],    color = "green")
        # plt.plot(l1[:,0], l1[:,1], ".", color = "blue", ms = 10)
        # plt.plot(l2[:,0], l2[:,1], ".", color = "red",  ms = 10)
        # plt.show()
    dtm.save(outname,
             dco=["TILED=YES", "COMPRESS=DEFLATE", "PREDICTOR=3", "ZLEVEL=9"])
Esempio n. 4
0
     pargs = parser.parse_args(args[1:])
 except Exception, e:
     print(str(e))
     return 1
 kmname = constants.get_tilename(pargs.dem_tile)
 print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
 extent = np.asarray(constants.tilename_to_extent(kmname))
 shoes = vector_io.get_geometries(pargs.horse_ds, pargs.layername,
                                  pargs.layersql, extent)
 outname = os.path.join(pargs.outdir, "dhym_lines_" + kmname + ".tif")
 if len(shoes) == 0:
     print("No shoes, man!")
     shutil.copy(pargs.dem_tile, outname)
     return 0
 #We allways interpolate values from the large ds (vrt) which is not changed in the loop below.
 dtm = grid.fromGDAL(pargs.dem_tile)
 cs = dtm.geo_ref[1]
 mesh_xy = pointcloud.mesh_as_points(dtm.shape, dtm.geo_ref)
 dem_ds = gdal.Open(pargs.dem_all)
 dem_band = dem_ds.GetRasterBand(1)
 ndval = dem_band.GetNoDataValue()
 georef = np.asarray(dem_ds.GetGeoTransform())
 m_drv = ogr.GetDriverByName("Memory")
 line_ds = m_drv.CreateDataSource("dummy")
 layer = line_ds.CreateLayer("lines", None, ogr.wkbLineString25D)
 layerdefn = layer.GetLayerDefn()
 #if True:
 #  import matplotlib
 #  matplotlib.use("Qt4Agg")
 #   mport matplotlib.pyplot as plt
 for shoe in shoes:
Esempio n. 5
0
def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception as e:
        print(str(e))
        return 1

    kmname = constants.get_tilename(pargs.dem_tile)
    print("Running %s on block: %s, %s" %(progname,kmname,time.asctime()))
    extent = np.asarray(constants.tilename_to_extent(kmname))


    outname = os.path.join(pargs.outdir,  "dhym_" + kmname + ".tif")

    if os.path.exists(outname) and not pargs.overwrite:
        print("File already exists - skipping...")
        return 0


    # We always interpolate values from the large dataset (vrt) which is not changed in the loop below.
    dtm = grid.fromGDAL(pargs.dem_tile)
    dem_ds   =  gdal.Open(pargs.dem_all)
    dem_band =  dem_ds.GetRasterBand(1)
    ndval    =  dem_band.GetNoDataValue()
    georef   =  np.asarray(dem_ds.GetGeoTransform())
    cell_res = min(georef[1], -georef[5] ) #the minimal cell size

    #get the geometries!
    # a list of (geometry_as_np_array, 'grid1', 'grid2') - the grids should be objects with an interpolate method. The first represents the line 0-3, while the other one represents 1-2.
    # and yes, keep all that in memory. Buy some more memory if you need it, man!
    stuff_to_handle=[]
    #get relevant geometries and process 'em
    if pargs.horsesql is not None:
        #fetch the horseshoes
        shoes  = vector_io.get_geometries(pargs.vector_ds, layersql =  pargs.horsesql, extent= extent )
        #for the horse shoes we want to read z from the dtm!
        for shoe in shoes:
            arr = array_geometry.ogrline2array(shoe,  flatten = True)
            assert(arr.shape[0]==4)
            g1=get_dtm_piece(arr[(0,3),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
            g2=get_dtm_piece(arr[(1,2),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
            stuff_to_handle.append((arr,g1,g2))
        del shoes
    for sql,own_z in ((pargs.linesql_own_z,True),(pargs.linesql_dtm_z,False)):
        #handle the two line types in one go...
        if sql is not None:
            #fetch the 3d lines
            lines= vector_io.get_geometries(pargs.vector_ds, layersql =  sql , extent= extent)
            print("%d features in "%len(lines)+sql)
            for line in lines:
                arr = array_geometry.ogrline2array(line,  flatten = not own_z)
                if own_z:
                    assert (arr.shape[1]==3) #should be a 3d geometry!!!
                    z=arr[:,2]
                #construct a horse shoe pr line segment!
                #should we assert that there are exactly two points?
                #We can handle the general case, but it gets tricky to determine what the interpolation should mean (z1 and z2 from endpoints of linestring and interpolating via relative length?)
                xy=arr[:,:2]
                n= xy.shape[0] - 1 # number of segments.
                #SO: for now assert that n==1, only one segment. Else modify what the grids should be for the 'inner' vertices...
                assert(n==1)
                #vectorice - even though there should be only one segment. Will handle the general case...
                N=array_geometry.linestring_displacements(xy)*(cell_res) #displacement vectors - probably too broad with all touched!!!!
                buf_left=xy+N
                buf_right=xy-N
                for i in range(n): # be prepared to handle the general case!!!
                    shoe=np.vstack((buf_left[i],buf_left[i+1],buf_right[i+1],buf_right[i])) # a horseshoe which is open in the 'first end'
                    if own_z:
                        g1=ConstantGrid(z[i])
                        g2=ConstantGrid(z[i+1])
                    else:
                        g1=get_dtm_piece(shoe[(0,3),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
                        g2=get_dtm_piece(shoe[(1,2),:],dem_band, georef, ndval) #the 'small' piece for the line from p0 to p3.
                    stuff_to_handle.append((shoe,g1,g2))
            del lines
    if len(stuff_to_handle)==0:
        print("No features to burn, copying dtm...")
        shutil.copy(pargs.dem_tile,outname)
        dem_band=None
        dem_ds=None
        return 0
    t1=time.time()
    if pargs.burn_as_lines:
        print("Burning as lines...")
        m_drv=ogr.GetDriverByName("Memory")
        line_ds = m_drv.CreateDataSource( "dummy")
        layer = line_ds.CreateLayer( "lines", osr.SpatialReference(dtm.srs), ogr.wkbLineString25D)
        create_3d_lines(stuff_to_handle,layer,cell_res*0.6,ndval) #will add 3d lines to layer (in place) - increase resolution to cell_res*0.8 for fewer lines
        print("Number of lines: %d" %layer.GetFeatureCount())
        #ok - layer created, Burn it!!
        layer.ResetReading()
        arr=vector_io.just_burn_layer(layer,dtm.geo_ref,dtm.shape,nd_val=ndval,dtype=np.float32,all_touched=True,burn3d=True)
        M=(arr!=ndval)
        assert M.any()
        if pargs.debug:
            drv=ogr.GetDriverByName("SQLITE")
            drv.CopyDataSource(line_ds,os.path.join(pargs.outdir,"dalines_"+kmname+".sqlite"))
        layer=None
        line_ds=None
        dtm.grid[M]=arr[M]
    else:
        mesh_xy  =  pointcloud.mesh_as_points(dtm.shape,  dtm.geo_ref)
        print("Burning using projective transformation...")
        burn_projective(stuff_to_handle,dtm,cell_res,ndval,mesh_xy)
    t2=time.time()
    print("Burning took: %.3fs" %(t2-t1))
    dtm.save(outname,dco = ["TILED=YES", "COMPRESS=DEFLATE", "PREDICTOR=3", "ZLEVEL=9"])
Esempio n. 6
0
    return g0, vert_expansions, hor_expansions


def main(args):
    try:
        pargs = parser.parse_args(args[1:])
    except Exception, e:
        print(str(e))
        return 1
    kmname = constants.get_tilename(pargs.tile_name)
    print("Running %s on block: %s, %s" % (progname, kmname, time.asctime()))
    if pargs.tiledb is not None:
        G, v_expansions, h_expansions = get_extended_tile(pargs.tiledb, kmname)
    else:
        v_expansions = h_expansions = {-1: False, 1: False}
        G = grid.fromGDAL(pargs.tile_name)
    if pargs.ZT:
        method = 1
    else:
        method = 0
    H = G.get_hillshade(azimuth=pargs.azimuth,
                        height=pargs.height,
                        z_factor=pargs.zfactor,
                        method=method)
    for pos in (-1, 1):
        if h_expansions[pos]:
            H.shrink_hor(pos, pixel_buf)
        if v_expansions[pos]:
            H.shrink_vert(pos, pixel_buf)
    outname = os.path.join(
        pargs.outdir, "hs_" +