Пример #1
0
def filterMatchingImages(imginfo_list, params):
    imginfo_list2 = []

    for iinfo in imginfo_list:
        #print(iinfo.srcfp, iinfo.proj)
        isSame = True
        p = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        p.ImportFromWkt(iinfo.proj)
        rp = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        rp.ImportFromWkt(params.proj)
        if p.IsSame(rp) is False:
            isSame = False
            logger.debug("Image projection differs from mosaic params: %s",
                         iinfo.srcfp)
        if iinfo.bands != params.bands and not (params.force_pan_to_multi is True and iinfo.bands == 1) and not \
                (params.include_all_ms is True):
            isSame = False
            logger.debug("Image band count differs from mosaic params: %s",
                         iinfo.srcfp)
        if iinfo.datatype != params.datatype:
            isSame = False
            logger.debug("Image datatype differs from mosaic params: %s",
                         iinfo.srcfp)

        if isSame is True:
            imginfo_list2.append(iinfo)

    return imginfo_list2
Пример #2
0
def build_tiles_shp(mosaicname, tiles, params):
    tiles_shp = mosaicname + "_tiles.shp"
    if os.path.isfile(tiles_shp):
        logger.info("Tiles shapefile already exists: %s", os.path.basename(tiles_shp))
    else:
        logger.info("Creating shapefile of tiles: %s", os.path.basename(tiles_shp))
        fields = [('ROW', ogr.OFTInteger, 4),
                  ('COL', ogr.OFTInteger, 4),
                  ("TILENAME", ogr.OFTString, 100),
                  ('XMIN', ogr.OFTReal, 0),
                  ('XMAX', ogr.OFTReal, 0),
                  ('YMIN', ogr.OFTReal, 0),
                  ('YMAX', ogr.OFTReal, 0)]
                  
        OGR_DRIVER = "ESRI Shapefile"
        ogrDriver = ogr.GetDriverByName(OGR_DRIVER)
        if ogrDriver is None:
            logger.error("OGR: Driver %s is not available", OGR_DRIVER)
            sys.exit(-1)
    
        if os.path.isfile(tiles_shp):
            ogrDriver.DeleteDataSource(tiles_shp)
        vds = ogrDriver.CreateDataSource(tiles_shp)
        if vds is None:
            logger.error("Could not create shp")
            sys.exit(-1)
        
        shpd, shpn = os.path.split(tiles_shp)
        shpbn, shpe = os.path.splitext(shpn)
        
        rp = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        rp.ImportFromWkt(params.proj)
        
        lyr = vds.CreateLayer(shpbn, rp, ogr.wkbPolygon)
        if lyr is None:
            logger.error("ERROR: Failed to create layer: %s", shpbn)
            sys.exit(-1)
        
        for fld, fdef, flen in fields:
            field_defn = ogr.FieldDefn(fld, fdef)
            if fdef == ogr.OFTString:
                field_defn.SetWidth(flen)
            if lyr.CreateField(field_defn) != 0:
                logger.error("ERROR: Failed to create field: %s", fld)
        
        for t in tiles:
            feat = ogr.Feature(lyr.GetLayerDefn())
            feat.SetField("TILENAME", os.path.basename(t.name))
            feat.SetField("ROW", t.j)
            feat.SetField("COL", t.i)
            feat.SetField("XMIN", t.xmin)
            feat.SetField("XMAX", t.xmax)
            feat.SetField("YMIN", t.ymin)
            feat.SetField("YMAX", t.ymax)
            feat.SetGeometry(t.geom)
            
            if lyr.CreateFeature(feat) != 0:
                logger.error("ERROR: Could not create feature for tile %s", t)
            feat.Destroy()
Пример #3
0
def build_shp(contribs, shp, args, params):
    logger.info("Creating shapefile of image boundaries: %s", shp)

    fields = (
        ("IMAGENAME", ogr.OFTString, 100),
        ("SENSOR", ogr.OFTString, 10),
        ("ACQDATE", ogr.OFTString, 10),
        ("CAT_ID", ogr.OFTString, 30),
        ("RESOLUTION", ogr.OFTReal, 0),
        ("OFF_NADIR", ogr.OFTReal, 0),
        ("SUN_ELEV", ogr.OFTReal, 0),
        ("SUN_AZ", ogr.OFTReal, 0),
        ("SAT_ELEV", ogr.OFTReal, 0),
        ("SAT_AZ", ogr.OFTReal, 0),
        ("CLOUDCOVER", ogr.OFTReal, 0),
        ("TDI", ogr.OFTReal, 0),
        ("DATE_DIFF", ogr.OFTReal, 0),
        ("SCORE", ogr.OFTReal, 0),
    )

    if args.calc_stats is True:
        fields = fields + (("STATS_MIN", ogr.OFTString, 80),
                           ("STATS_MAX", ogr.OFTString, 80),
                           ("STATS_STD", ogr.OFTString, 80),
                           ("STATS_MEAN", ogr.OFTString, 80),
                           ("STATS_PXCT", ogr.OFTString, 80))

    if params.median_remove is True:
        fields = fields + (("MEDIAN", ogr.OFTString, 80), )

    OGR_DRIVER = "ESRI Shapefile"

    ogrDriver = ogr.GetDriverByName(OGR_DRIVER)
    if ogrDriver is None:
        logger.info("OGR: Driver %s is not available", OGR_DRIVER)
        sys.exit(-1)

    if os.path.isfile(shp):
        ogrDriver.DeleteDataSource(shp)
    vds = ogrDriver.CreateDataSource(shp)
    if vds is None:
        logger.info("Could not create shp")
        sys.exit(-1)

    shpd, shpn = os.path.split(shp)
    shpbn, shpe = os.path.splitext(shpn)

    rp = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
    rp.ImportFromWkt(params.proj)

    lyr = vds.CreateLayer(shpbn, rp, ogr.wkbPolygon)
    if lyr is None:
        logger.info("ERROR: Failed to create layer: %s", shpbn)
        sys.exit(-1)

    for fld, fdef, flen in fields:
        field_defn = ogr.FieldDefn(fld, fdef)
        if fdef == ogr.OFTString:
            field_defn.SetWidth(flen)
        if lyr.CreateField(field_defn) != 0:
            logger.info("ERROR: Failed to create field: %s", fld)

    for iinfo, geom in contribs:

        feat = ogr.Feature(lyr.GetLayerDefn())

        feat.SetField("IMAGENAME", iinfo.srcfn)
        feat.SetField("SENSOR", iinfo.sensor)
        feat.SetField("ACQDATE", iinfo.acqdate.strftime("%Y-%m-%d"))
        feat.SetField("CAT_ID", iinfo.catid)
        feat.SetField("OFF_NADIR", iinfo.ona)
        feat.SetField("SUN_ELEV", iinfo.sunel)
        feat.SetField("SUN_AZ", iinfo.sunaz)
        feat.SetField("SAT_ELEV", iinfo.satel)
        feat.SetField("SAT_AZ", iinfo.sataz)
        feat.SetField("CLOUDCOVER", iinfo.cloudcover)
        feat.SetField("SCORE", iinfo.score)

        tdi = iinfo.tdi if iinfo.tdi else 0
        feat.SetField("TDI", tdi)

        date_diff = iinfo.date_diff if iinfo.date_diff else -9999
        feat.SetField("DATE_DIFF", date_diff)

        res = ((iinfo.xres + iinfo.yres) / 2.0) if iinfo.xres else 0
        feat.SetField("RESOLUTION", res)

        if args.calc_stats:
            if len(iinfo.stat_dct) > 0:
                min_list = []
                max_list = []
                mean_list = []
                stdev_list = []
                px_cnt_list = []
                keys = list(iinfo.stat_dct.keys())
                keys.sort()
                for band in keys:
                    imin, imax, imean, istdev = iinfo.stat_dct[band]
                    ipx_cnt = iinfo.datapixelcount_dct[band]
                    min_list.append(str(imin))
                    max_list.append(str(imax))
                    mean_list.append(str(imean))
                    stdev_list.append(str(istdev))
                    px_cnt_list.append(str(ipx_cnt))

                feat.SetField("STATS_MIN", ",".join(min_list))
                feat.SetField("STATS_MAX", ",".join(max_list))
                feat.SetField("STATS_MEAN", ",".join(mean_list))
                feat.SetField("STATS_STD", ",".join(stdev_list))
                feat.SetField("STATS_PXCT", ",".join(px_cnt_list))

        if params.median_remove is True:
            keys = list(iinfo.median.keys())
            keys.sort()
            median_list = [str(iinfo.median[band]) for band in keys]
            feat.SetField("MEDIAN", ",".join(median_list))
            #logger.info("median = %s", ",".join(median_list))

        feat.SetGeometry(geom)

        if lyr.CreateFeature(feat) != 0:
            logger.info("ERROR: Could not create feature for image %s",
                        iinfo.srcfn)

        feat.Destroy()
Пример #4
0
def build_archive(src, scratch, args):

    logger.info("Packaging Raster: {}".format(src))
    raster = dem.SetsmDem(src)
    dstfp = raster.archive
    dstdir, dstfn = os.path.split(raster.archive)
    #print dstfn
    #print dstfp

    try:
        raster.get_dem_info()
    except RuntimeError as e:
        logger.error(e)
    else:
        process = True

        ## get raster density if not precomputed
        if raster.density is None:
            try:
                raster.compute_density_and_statistics()
            except RuntimeError as e:
                logger.warning(e)

        if args.filter_dems or args.force_filter_dems:
            # filter dems with area < 5.5 sqkm and density < .1

            area = raster.geom.Area()
            # logger.info(raster.density)
            if area < 5500000:
                logger.info("Raster area {} falls below threshold: {}".format(
                    area, raster.srcfp))
                process = False
            elif raster.density < 0.1:
                logger.info(
                    "Raster density {} falls below threshold: {}".format(
                        raster.density, raster.srcfp))
                process = False

            if not process:
                logger.info('Removing {}'.format(raster.srcfp))
                to_remove = glob.glob(raster.srcfp[:-8] + '*')
                for f in to_remove:
                    #logger.info('Removing {}'.format(f))
                    os.remove(f)

        if process:
            #### Build mdf
            if not os.path.isfile(raster.mdf) or args.overwrite:
                if os.path.isfile(raster.mdf):
                    if not args.dryrun:
                        os.remove(raster.mdf)
                try:
                    if not args.dryrun:
                        raster.write_mdf_file(args.lsf)
                except RuntimeError as e:
                    logger.error(e)

            #### Build Readme
            if not os.path.isfile(raster.readme) or args.overwrite:
                if os.path.isfile(raster.readme):
                    if not args.dryrun:
                        os.remove(raster.readme)
                if not args.dryrun:
                    raster.write_readme_file()

            #### Build Archive
            if not args.mdf_only:

                if os.path.isfile(dstfp) and args.overwrite is True:
                    if not args.dryrun:
                        try:
                            os.remove(dstfp)
                        except:
                            print("Cannot replace archive: %s" % dstfp)

                if not os.path.isfile(dstfp):

                    if args.lsf:
                        components = (
                            os.path.basename(raster.srcfp).replace(
                                "dem.tif", "dem_smooth.tif"),  # dem
                            os.path.basename(raster.matchtag),  # matchtag
                            os.path.basename(raster.mdf),  # mdf
                            os.path.basename(raster.readme),  # readme
                            os.path.basename(raster.browse),  # browse
                            # index shp files
                        )
                    else:
                        components = (
                            os.path.basename(raster.srcfp),  # dem
                            os.path.basename(raster.matchtag),  # matchtag
                            os.path.basename(raster.mdf),  # mdf
                            os.path.basename(raster.readme),  # readme
                            os.path.basename(raster.browse),  # browse
                            # index shp files
                        )

                    optional_components = [
                        os.path.basename(r) for r in raster.reg_files
                    ]  #reg

                    os.chdir(dstdir)
                    #logger.info(os.getcwd())

                    k = 0
                    existing_components = sum([
                        int(os.path.isfile(component))
                        for component in components
                    ])
                    ### check if exists, print
                    #logger.info(existing_components)
                    if existing_components == len(components):

                        ## Build index
                        index = os.path.join(scratch,
                                             raster.stripid + "_index.shp")

                        ## create dem index shp: <strip_id>_index.shp
                        try:
                            index_dir, index_lyr = utils.get_source_names(
                                index)
                        except RuntimeError as e:
                            logger.error("{}: {}".format(index, e))

                        if os.path.isfile(index):
                            ogrDriver.DeleteDataSource(index)

                        if not os.path.isfile(index):
                            ds = ogrDriver.CreateDataSource(index)
                            if ds is not None:

                                lyr = ds.CreateLayer(index_lyr, tgt_srs,
                                                     ogr.wkbPolygon)

                                if lyr is not None:

                                    for field_def in utils.DEM_ATTRIBUTE_DEFINITIONS_BASIC:

                                        field = ogr.FieldDefn(
                                            field_def.fname, field_def.ftype)
                                        field.SetWidth(field_def.fwidth)
                                        field.SetPrecision(
                                            field_def.fprecision)
                                        lyr.CreateField(field)

                                    #print raster.stripid
                                    feat = ogr.Feature(lyr.GetLayerDefn())

                                    ## Set fields
                                    feat.SetField("DEM_ID", raster.stripid)
                                    feat.SetField("PAIRNAME", raster.pairname)
                                    feat.SetField("SENSOR1", raster.sensor1)
                                    feat.SetField("SENSOR2", raster.sensor2)
                                    feat.SetField(
                                        "ACQDATE1",
                                        raster.acqdate1.strftime("%Y-%m-%d"))
                                    feat.SetField(
                                        "ACQDATE2",
                                        raster.acqdate2.strftime("%Y-%m-%d"))
                                    feat.SetField("CATALOGID1", raster.catid1)
                                    feat.SetField("CATALOGID2", raster.catid2)
                                    feat.SetField("ND_VALUE", raster.ndv)
                                    feat.SetField("DEM_NAME", raster.srcfn)
                                    feat.SetField("ALGM_VER",
                                                  raster.algm_version)
                                    res = (raster.xres + raster.yres) / 2.0
                                    feat.SetField("DEM_RES", res)
                                    feat.SetField("DENSITY", raster.density)

                                    #### Set fields if populated (will not be populated if metadata file is not found)
                                    if raster.creation_date:
                                        feat.SetField(
                                            "CR_DATE",
                                            raster.creation_date.strftime(
                                                "%Y-%m-%d"))

                                    ## transfrom and write geom
                                    feat.SetField("PROJ4", raster.proj4)
                                    feat.SetField("EPSG", raster.epsg)

                                    src_srs = utils.osr_srs_preserve_axis_order(
                                        osr.SpatialReference())
                                    src_srs.ImportFromWkt(raster.proj)

                                    if raster.geom:
                                        geom = raster.geom.Clone()
                                        transform = osr.CoordinateTransformation(
                                            src_srs, tgt_srs)
                                        geom.Transform(transform)

                                        centroid = geom.Centroid()
                                        feat.SetField("CENT_LAT",
                                                      centroid.GetY())
                                        feat.SetField("CENT_LON",
                                                      centroid.GetX())

                                        feat.SetGeometry(geom)

                                    else:
                                        logger.error(
                                            'No valid geom found: {}'.format(
                                                raster.srcfp))

                                    #### add new feature to layer
                                    lyr.CreateFeature(feat)

                                    ## Close layer and dataset
                                    lyr = None
                                    ds = None

                                    if os.path.isfile(index):
                                        ## Create archive
                                        if not args.dryrun:
                                            archive = tarfile.open(
                                                dstfp, "w:gz")
                                            #archive = tarfile.open(dstfp,"w:")
                                            if not os.path.isfile(dstfp):
                                                logger.error(
                                                    "Cannot create archive: {}"
                                                    .format(dstfn))

                                        ## Add components
                                        for component in components:
                                            logger.debug(
                                                "Adding {} to {}".format(
                                                    component, dstfn))
                                            k += 1
                                            if "dem_smooth.tif" in component:
                                                arcn = component.replace(
                                                    "dem_smooth.tif",
                                                    "dem.tif")
                                            else:
                                                arcn = component
                                            if not args.dryrun:
                                                archive.add(component,
                                                            arcname=arcn)

                                        ## Add optional components
                                        for component in optional_components:
                                            if os.path.isfile(component):
                                                logger.debug(
                                                    "Adding {} to {}".format(
                                                        component, dstfn))
                                                k += 1
                                                if not args.dryrun:
                                                    archive.add(component)

                                        ## Add index in subfolder
                                        os.chdir(scratch)
                                        for f in glob.glob(index_lyr + ".*"):
                                            arcn = os.path.join("index", f)
                                            logger.debug(
                                                "Adding {} to {}".format(
                                                    f, dstfn))
                                            k += 1
                                            if not args.dryrun:
                                                archive.add(f, arcname=arcn)
                                            os.remove(f)

                                        logger.info(
                                            "Added {} items to archive: {}".
                                            format(k, dstfn))

                                        ## Close archive and compress with gz
                                        if not args.dryrun:
                                            try:
                                                archive.close()
                                            except Exception as e:
                                                print(e)

                                else:
                                    logger.error(
                                        'Cannot create layer: {}'.format(
                                            index_lyr))
                            else:
                                logger.error(
                                    "Cannot create index: {}".format(index))
                        else:
                            logger.error(
                                "Cannot remove existing index: {}".format(
                                    index))
                    else:
                        logger.error(
                            "Not enough existing components to make a valid archive: {} ({} found, {} required)"
                            .format(raster.srcfp, existing_components,
                                    len(components)))
Пример #5
0
def HandleTile(t, src, dstdir, csvpath, args, exclude_list):
    
    
    otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(os.path.basename(csvpath)[:-4], t.name))
    mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(os.path.basename(csvpath)[:-4], t.name))
    
    if os.path.isfile(otxtpath) and os.path.isfile(mtxtpath) and args.overwrite is False:
        logger.info("Tile %s processing files already exist", t.name)
    else:
        logger.info("Tile %s", t.name)
    
        t_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        t_srs.ImportFromEPSG(t.epsg)
        
        #### Open mfp
        dsp, lyrn = utils.get_source_names(src)
        
        ds = ogr.Open(dsp)
        if ds is None:
            logger.error("Open failed")
            
        else:
            lyr = ds.GetLayerByName(lyrn)
            
            if not lyr:
                raise RuntimeError("Layer {} does not exist in dataset {}".format(lyrn, dsp))
            else:

                s_srs = lyr.GetSpatialRef()
                #logger.debug(str(s_srs))
                #logger.debug(str(t.geom))
                
                tile_geom_in_s_srs = t.geom.Clone()

                if not t_srs.IsSame(s_srs):
                    ict = osr.CoordinateTransformation(t_srs, s_srs)
                    ct = osr.CoordinateTransformation(s_srs, t_srs)
                    tile_geom_in_s_srs.Transform(ict)

                # if the geometry crosses meridian, split it into multipolygon (else this breaks SetSpatialFilter)
                if utils.doesCross180(tile_geom_in_s_srs):
                    logger.debug("tile_geom_in_s_srs crosses 180 meridian; splitting to multiple polygons...")
                    tile_geom_in_s_srs = utils.getWrappedGeometry(tile_geom_in_s_srs)

                lyr.ResetReading()
                lyr.SetSpatialFilter(tile_geom_in_s_srs)
                feat = lyr.GetNextFeature()
                
                imginfo_list1 = []
                
                while feat:
                    
                    iinfo = mosaic.ImageInfo(feat, "RECORD", srs=s_srs)
                    
                    if iinfo.geom is not None and iinfo.geom.GetGeometryType() in (ogr.wkbPolygon, ogr.wkbMultiPolygon):
                        if not t_srs.IsSame(s_srs):
                            iinfo.geom.Transform(ct)
                            ## fix self-intersection errors caused by reprojecting over 180
                            temp = iinfo.geom.Buffer(0.1) # assumes a projected coordinate system with meters or feet as units
                            iinfo.geom = temp
                        
                        if iinfo.geom.Intersects(t.geom):
                            
                            if iinfo.scene_id in exclude_list:
                                logger.debug("Scene in exclude list, excluding: %s", iinfo.srcfp)
                                
                            elif not os.path.isfile(iinfo.srcfp) and iinfo.status != "tape":
                                #logger.info("iinfo.status != tape: {0}".format(iinfo.status != "tape"))
                                logger.warning("Scene path is invalid, excluding %s (path = %s) (status = %s)",
                                               iinfo.scene_id, iinfo.srcfp, iinfo.status)
                            elif args.require_pan:
                                srcfp = iinfo.srcfp
                                srcdir, mul_name = os.path.split(srcfp)
                                if iinfo.sensor in ["WV02", "WV03", "QB02"]:
                                    pan_name = mul_name.replace("-M", "-P")
                                elif iinfo.sensor == "GE01":
                                    if "_5V" in mul_name:
                                        pan_name_base = srcfp[:-24].replace("M0", "P0")
                                        candidates = glob.glob(pan_name_base + "*")
                                        candidates2 = [f for f in candidates if f.endswith(('.ntf', '.NTF', '.tif',
                                                                                            '.TIF'))]
                                        if len(candidates2) == 0:
                                            pan_name = ''
                                        elif len(candidates2) == 1:
                                            pan_name = os.path.basename(candidates2[0])
                                        else:
                                            pan_name = ''
                                            logger.error('%i panchromatic images match the multispectral image name '
                                                         '%s', len(candidates2), mul_name)
                                    else:
                                        pan_name = mul_name.replace("-M", "-P")
                                elif iinfo.sensor == "IK01":
                                    pan_name = mul_name.replace("blu", "pan")
                                    pan_name = mul_name.replace("msi", "pan")
                                    pan_name = mul_name.replace("bgrn", "pan")
                                pan_srcfp = os.path.join(srcdir, pan_name)
                                if not os.path.isfile(pan_srcfp):
                                    logger.debug("Image does not have a panchromatic component, excluding: %s",
                                                 iinfo.srcfp)
                                else:
                                    logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom))
                                    imginfo_list1.append(iinfo)
                                
                            else:
                                logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom))
                                imginfo_list1.append(iinfo)                                
                                
                    feat = lyr.GetNextFeature()
            
            ds = None
        
            logger.info("Number of intersects in tile %s: %i", t.name, len(imginfo_list1))
            
            if len(imginfo_list1) > 0:

                #### Get mosaic parameters
                logger.debug("Getting mosaic parameters")
                params = mosaic.getMosaicParameters(imginfo_list1[0], args)
                
                #### Remove images that do not match ref
                logger.debug("Setting image pattern filter")
                imginfo_list2 = mosaic.filterMatchingImages(imginfo_list1, params)
                logger.info("Number of images matching filter: %i", len(imginfo_list2))
                    
                if args.nosort is False:    
                    #### Sort by quality
                    logger.debug("Sorting images by quality")
                    imginfo_list3 = []
                    for iinfo in imginfo_list2:
                        
                        iinfo.getScore(params)
                        if iinfo.score > 0:
                            imginfo_list3.append(iinfo)
                    
                    # sort so highest score is last
                    imginfo_list3.sort(key=lambda x: x.score)
                    
                else:
                    imginfo_list3 = list(imginfo_list2)
                    
                ####  Overlay geoms and remove non-contributors
                logger.debug("Overlaying images to determine contributors")
                contribs = mosaic.determine_contributors(imginfo_list3, t.geom, args.min_contribution_area)
                                            
                logger.info("Number of contributing images: %i", len(contribs))
            
                if len(contribs) > 0:
                    
                    if args.build_shp:
                        
                        #######################################################
                        #### Create Shp
                        
                        shp = os.path.join(dstdir, "{}_{}_imagery.shp".format(args.mosaic, t.name))
                   
                        logger.debug("Creating shapefile of geoms: %s", shp)
                    
                        fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)]
                        
                        OGR_DRIVER = "ESRI Shapefile"
                        
                        ogrDriver = ogr.GetDriverByName(OGR_DRIVER)
                        if ogrDriver is None:
                            logger.debug("OGR: Driver %s is not available", OGR_DRIVER)
                            sys.exit(-1)
                        
                        if os.path.isfile(shp):
                            ogrDriver.DeleteDataSource(shp)
                        vds = ogrDriver.CreateDataSource(shp)
                        if vds is None:
                            logger.debug("Could not create shp")
                            sys.exit(-1)
                        
                        shpd, shpn = os.path.split(shp)
                        shpbn, shpe = os.path.splitext(shpn)
                        
                        lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon)
                        if lyr is None:
                            logger.debug("ERROR: Failed to create layer: %s", shpbn)
                            sys.exit(-1)
                        
                        for fld, fdef, flen in fields:
                            field_defn = ogr.FieldDefn(fld, fdef)
                            if fdef == ogr.OFTString:
                                field_defn.SetWidth(flen)
                            if lyr.CreateField(field_defn) != 0:
                                logger.debug("ERROR: Failed to create field: %s", fld)
                        
                        for iinfo, geom in contribs:
                        
                            logger.debug("Image: %s", iinfo.srcfn)
                            
                            feat = ogr.Feature(lyr.GetLayerDefn())
                            
                            feat.SetField("IMAGENAME", iinfo.srcfn)
                            feat.SetField("SCORE", iinfo.score)
    
                            feat.SetGeometry(geom)
                            if lyr.CreateFeature(feat) != 0:
                                logger.debug("ERROR: Could not create feature for image %s", iinfo.srcfn)
                            else:
                                logger.debug("Created feature for image: %s", iinfo.srcfn)
                                
                            feat.Destroy()
                    
                    #### Write textfiles
                    if not os.path.isdir(dstdir):
                        os.makedirs(dstdir)
                    
                    otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(args.mosaic, t.name))
                    otxtpath_ontape = os.path.join(dstdir, "{}_{}_orig_ontape.csv".format(args.mosaic, t.name))
                    mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(args.mosaic, t.name))

                    rn_fromtape_basedir = os.path.join(dstdir, "renamed_fromtape")
                    rn_fromtape_path = os.path.join(rn_fromtape_basedir, t.name)

                    otxt = open(otxtpath, 'w')
                    ttxt = open(otxtpath_ontape, 'w')
                    mtxt = open(mtxtpath, 'w')

                    # write header
                    ttxt.write("{0},{1},{2}\n".format("SCENE_ID", "S_FILEPATH", "STATUS"))

                    tape_ct = 0
                    
                    for iinfo, geom in contribs:
                        
                        if not os.path.isfile(iinfo.srcfp) and iinfo.status != "tape":
                            logger.warning("Image does not exist: %s", iinfo.srcfp)
                            
                        if iinfo.status == "tape":
                            tape_ct += 1
                            ttxt.write("{0},{1},{2}\n".format(iinfo.scene_id, iinfo.srcfp, iinfo.status))
                            # get srcfp with file extension
                            srcfp_file = os.path.basename(iinfo.srcfp)
                            otxt.write("{}\n".format(os.path.join(rn_fromtape_path, srcfp_file)))

                        else:
                            otxt.write("{}\n".format(iinfo.srcfp))

                        m_fn = "{0}_u08{1}{2}.tif".format(
                            os.path.splitext(iinfo.srcfn)[0],
                            args.stretch,
                            t.epsg
                        )
                        
                        mtxt.write(os.path.join(dstdir, 'ortho', t.name, m_fn) + "\n")
 
                    otxt.close()

                    if tape_ct == 0:
                        logger.debug("No files need to be pulled from tape.")
                        os.remove(otxtpath_ontape)

                    else:
                        # make output dirs from tape
                        if not os.path.isdir(rn_fromtape_basedir):
                            os.mkdir(rn_fromtape_basedir)
                        if not os.path.isdir(rn_fromtape_path):
                            os.mkdir(rn_fromtape_path)

                        tape_tmp = os.path.join(dstdir, "{0}_{1}_tmp".format(args.mosaic, t.name))
                        if not os.path.isdir(tape_tmp):
                            os.mkdir(tape_tmp)
                        logger.warning("{0} scenes are not accessible, as they are on tape. Please use ir.py to pull "
                                       "scenes using file '{1}'. They must be put in directory '{2}', as file '{3}' "
                                       "contains hard-coded paths to said files (necessary to perform "
                                       "orthorectification). Please set a --tmp path (use '{4}').\n"
                                       "Note that if some (or all) scenes have already been pulled from tape, ir.py "
                                       "will not pull them again.\n".
                                       format(tape_ct, otxtpath_ontape, rn_fromtape_path, otxtpath, tape_tmp))

                        tape_log = "{0}_{1}_ir_log_{2}.log".format(args.mosaic, t.name,
                                                                   datetime.today().strftime("%Y%m%d%H%M%S"))
                        root_pgclib_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
                                                        "pgclib", "")
                        logger.info("Suggested ir.py command:\n\n"
                                    ""
                                    "python {}ir.py -i {} -o {} --tmp {} -tm link 2>&1 | tee {}"
                        .format(root_pgclib_path, otxtpath_ontape, rn_fromtape_path, tape_tmp,
                                os.path.join(dstdir, tape_log)))
Пример #6
0
def write_to_ogr_dataset(ogr_driver_str, ogrDriver, dst_ds, dst_lyr, groups, pairs, total, db_path_prefix, fld_defs, args):

    ## Create dataset if it does not exist
    if ogr_driver_str == 'ESRI Shapefile':
        max_fld_width = 254
        if os.path.isfile(dst_ds):
            ds = ogrDriver.Open(dst_ds,1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'FileGDB':
        max_fld_width = 1024
        if os.path.isdir(dst_ds):
            ds = ogrDriver.Open(dst_ds,1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'PostgreSQL':
        max_fld_width = 1024
        # DB must already exist
        ds = ogrDriver.Open(dst_ds,1)

    else:
        logger.error("Format {} is not supported".format(ogr_driver_str))

    if args.status:
        status = args.status
    elif args.bp_paths:
        status = 'tape'
    else:
        status = 'online'

    if ds is not None:

        ## Create table if it does not exist
        layer = ds.GetLayerByName(dst_lyr)
        fld_list = [f.fname for f in fld_defs]

        err.err_level = gdal.CE_None
        tgt_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        tgt_srs.ImportFromEPSG(args.epsg)
        if err.err_level >= gdal.CE_Warning:
            raise RuntimeError(err.err_level, err.err_no, err.err_msg)

        if not layer:
            logger.info("Creating table...")

            layer = ds.CreateLayer(dst_lyr, tgt_srs, ogr.wkbMultiPolygon)
            if layer:
                for field_def in fld_defs:
                    field = ogr.FieldDefn(field_def.fname, field_def.ftype)
                    field.SetWidth(min(max_fld_width, field_def.fwidth))
                    field.SetPrecision(field_def.fprecision)
                    layer.CreateField(field)

        ## Append Records
        if layer:
            # Get field widths
            lyr_def = layer.GetLayerDefn()
            fwidths = {lyr_def.GetFieldDefn(i).GetName().upper(): lyr_def.GetFieldDefn(i).GetWidth() for i in range(lyr_def.GetFieldCount())}

            logger.info("Appending records...")
            #### loop through records and add features
            i=0
            recordids = []
            invalid_record_cnt = 0

            for groupid in groups:
                for record in groups[groupid]:
                    i+=1
                    progress(i,total,"features written")
                    feat = ogr.Feature(layer.GetLayerDefn())
                    valid_record = True

                    ## Set attributes
                    ## Fields for scene DEM
                    if args.mode == 'scene':

                        attrib_map = {
                            'SCENEDEMID': record.dsp_sceneid if (args.dsp_original_res and record.is_dsp) else record.sceneid,
                            'STRIPDEMID': record.dsp_stripdemid if (args.dsp_original_res and record.is_dsp) else record.stripdemid,
                            'STATUS': status,
                            'PAIRNAME': record.pairname,
                            'SENSOR1': record.sensor1,
                            'SENSOR2': record.sensor2,
                            'ACQDATE1': record.acqdate1.strftime('%Y-%m-%d'),
                            'ACQDATE2': record.acqdate2.strftime('%Y-%m-%d'),
                            'CATALOGID1': record.catid1,
                            'CATALOGID2': record.catid2,
                            'HAS_LSF': int(os.path.isfile(record.lsf_dem)),
                            'HAS_NONLSF': int(os.path.isfile(record.dem)),
                            'IS_XTRACK': int(record.is_xtrack),
                            'IS_DSP': 0 if args.dsp_original_res else int(record.is_dsp),
                            'ALGM_VER': record.algm_version,
                            'PROJ4': record.proj4,
                            'EPSG': record.epsg,
                        }

                        attr_pfx = 'dsp_' if args.dsp_original_res else ''
                        for k in record.filesz_attrib_map:
                            attrib_map[k.upper()] = getattr(record,'{}{}'.format(attr_pfx,k))

                        # Test if filesz attr is valid for dsp original res records
                        if args.dsp_original_res:
                            if attrib_map['FILESZ_DEM'] is None:
                                logger.error(
                                    "Original res filesz_dem is empty for {}. Record skipped".format(record.sceneid))
                                valid_record = False
                            elif attrib_map['FILESZ_DEM'] == 0:
                                logger.warning(
                                    "Original res filesz_dem is 0 for {}. Record will still be written".format(record.sceneid))

                        # Test if filesz attr is valid for normal records
                        elif not attrib_map['FILESZ_DEM'] and not attrib_map['FILESZ_LSF']:
                            logger.warning(
                                "DEM and LSF DEM file size is zero or null for {}. Record will still be written".format(record.sceneid))
                            valid_record = False

                        # Set region
                        try:
                            region = pairs[record.pairname]
                        except KeyError as e:
                            region = None
                        else:
                            attrib_map['REGION'] = region

                        if db_path_prefix:
                            if args.bp_paths:
                                # https://blackpearl-data2.pgc.umn.edu/dem/setsm/scene/WV02/2015/05/
                                # WV02_20150506_1030010041510B00_1030010043050B00_50cm_v040002.tar
                                custom_path = "{}/{}/{}/{}/{}.tar".format(
                                    args.mode,               # mode (scene, strip, tile)
                                    record.pairname[:4],     # sensor
                                    record.pairname[5:9],    # year
                                    record.pairname[9:11],   # month
                                    groupid                  # mode-specific group ID
                                )

                            elif args.tnva_paths:
                                # /mnt/pgc/data/elev/dem/setsm/ArcticDEM/region/arcticdem_01_iceland/scenes/
                                # 2m/WV01_20200630_10200100991E2C00_102001009A862700_2m_v040204/
                                # WV01_20200630_10200100991E2C00_102001009A862700_504471479080_01_P001_504471481090_01_P001_2_meta.txt

                                if not region:
                                    logger.error("Pairname not found in region lookup {}, cannot built custom path".format(record.pairname))
                                    valid_record = False

                                else:
                                    pretty_project = PROJECTS[region.split('_')[0]]
                                    res_dir = record.res_str + '_dsp' if record.is_dsp else record.res_str

                                    custom_path = "{}/{}/region/{}/scenes/{}/{}/{}".format(
                                        db_path_prefix,
                                        pretty_project,         # project (e.g. ArcticDEM)
                                        region,                 # region
                                        res_dir,                # e.g. 2m, 50cm, 2m_dsp
                                        groupid,                # strip ID
                                        record.srcfn            # file name (meta.txt)
                                    )
                            else:
                                logger.error("Mode {} does not support the specified custom path option, skipping record".format(args.mode))
                                valid_record = False

                    ## Fields for strip DEM
                    if args.mode == 'strip':
                        attrib_map = {
                            'DEM_ID': record.stripid,
                            'STRIPDEMID': record.stripdemid,
                            'PAIRNAME': record.pairname,
                            'SENSOR1': record.sensor1,
                            'SENSOR2': record.sensor2,
                            'ACQDATE1': record.acqdate1.strftime('%Y-%m-%d'),
                            'ACQDATE2': record.acqdate2.strftime('%Y-%m-%d'),
                            'CATALOGID1': record.catid1,
                            'CATALOGID2': record.catid2,
                            'IS_LSF': int(record.is_lsf),
                            'IS_XTRACK': int(record.is_xtrack),
                            'EDGEMASK': int(record.mask_tuple[0]),
                            'WATERMASK': int(record.mask_tuple[1]),
                            'CLOUDMASK': int(record.mask_tuple[2]),
                            'ALGM_VER': record.algm_version,
                            'FILESZ_DEM': record.filesz_dem,
                            'FILESZ_MT': record.filesz_mt,
                            'FILESZ_OR': record.filesz_or,
                            'FILESZ_OR2': record.filesz_or2,
                            'PROJ4': record.proj4,
                            'EPSG': record.epsg,
                            'GEOCELL': record.geocell,
                        }

                        ## Set region
                        try:
                            region = pairs[record.pairname]
                        except KeyError as e:
                            pass
                        else:
                            attrib_map['REGION'] = region

                        if record.version:
                            attrib_map['REL_VER'] = record.version
                        if record.density:
                            attrib_map['DENSITY'] = record.density
                        else:
                            attrib_map['DENSITY'] = -9999

                        ## If registration info exists
                        if args.include_registration:
                            if len(record.reginfo_list) > 0:
                                for reginfo in record.reginfo_list:
                                    if reginfo.name == 'ICESat':
                                        attrib_map["DX"] = reginfo.dx
                                        attrib_map["DY"] = reginfo.dy
                                        attrib_map["DZ"] = reginfo.dz
                                        attrib_map["REG_SRC"] = 'ICESat'
                                        attrib_map["NUM_GCPS"] = reginfo.num_gcps
                                        attrib_map["MEANRESZ"] = reginfo.mean_resid_z

                        ## Set path folders for use if db_path_prefix specified
                        if db_path_prefix:
                            if args.bp_paths:
                               custom_path = "{}/{}/{}/{}/{}/{}.tar".format(
                                    db_path_prefix,
                                    args.mode,               # mode (scene, strip, tile)
                                    record.pairname[:4],     # sensor
                                    record.pairname[5:9],    # year
                                    record.pairname[9:11],   # month
                                    groupid                  # mode-specific group ID
                                )
                            else:
                                logger.error("Mode {} does not support the specified custom path option, skipping record".format(args.mode))
                                valid_record = False

                    ## Fields for tile DEM
                    if args.mode == 'tile':
                        attrib_map = {
                            'DEM_ID': record.tileid,
                            'TILE': record.tilename,
                            'NUM_COMP': record.num_components,
                            'FILESZ_DEM': record.filesz_dem,
                        }

                        ## Optional attributes
                        if record.version:
                            attrib_map['REL_VER'] = record.version
                            version = record.version
                        else:
                            version = 'novers'
                        if record.density:
                            attrib_map['DENSITY'] = record.density
                        else:
                            attrib_map['DENSITY'] = -9999

                        if args.include_registration:
                            if record.reg_src:
                                attrib_map["REG_SRC"] = record.reg_src
                                attrib_map["NUM_GCPS"] = record.num_gcps
                            if record.mean_resid_z:
                                attrib_map["MEANRESZ"] = record.mean_resid_z

                        ## Set path folders for use if db_path_prefix specified
                        if db_path_prefix:
                            if args.bp_paths:
                                custom_path = "{}/{}/{}/{}/{}/{}.tar".format(
                                    db_path_prefix,
                                    record.mode,               # mode (scene, strip, tile)
                                    args.project.lower(),    # project
                                    record.res,              # resolution
                                    version,                 # version
                                    groupid                  # mode-specific group ID
                                )
                            else:
                                logger.error("Mode {} does not support the specified custom path option, skipping record".format(args.mode))
                                valid_record = False

                    ## Common fields
                    if valid_record:
                        ## Common Attributes across all modes
                        attrib_map['INDEX_DATE'] = datetime.datetime.today().strftime('%Y-%m-%d')
                        attrib_map['CR_DATE'] = record.creation_date.strftime('%Y-%m-%d')
                        attrib_map['ND_VALUE'] = record.ndv
                        if args.dsp_original_res:
                            res = record.dsp_dem_res
                        else:
                            res = (record.xres + record.yres) / 2.0
                        attrib_map['DEM_RES'] = res

                        ## Set location
                        if db_path_prefix:
                            location = custom_path
                        else:
                            location = record.srcfp
                        attrib_map['LOCATION'] = location

                        ## Transform and write geom
                        src_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
                        src_srs.ImportFromWkt(record.proj)

                        if not record.geom:
                            logger.error('No valid geom found, feature skipped: {}'.format(record.sceneid))
                            valid_record = False
                        else:
                            temp_geom = record.geom.Clone()
                            transform = osr.CoordinateTransformation(src_srs,tgt_srs)
                            try:
                                temp_geom.Transform(transform)
                            except TypeError as e:
                                logger.error('Geom transformation failed, feature skipped: {} {}'.format(e, record.sceneid))
                                valid_record = False
                            else:

                                ## Get centroid coordinates
                                centroid = temp_geom.Centroid()
                                if 'CENT_LAT' in fld_list:
                                    attrib_map['CENT_LAT'] = centroid.GetY()
                                    attrib_map['CENT_LON'] = centroid.GetX()

                                ## If srs is geographic and geom crosses 180, split geom into 2 parts
                                if tgt_srs.IsGeographic:

                                    ## Get Lat and Lon coords in arrays
                                    lons = []
                                    lats = []
                                    ring = temp_geom.GetGeometryRef(0)  #### assumes a 1 part polygon
                                    for j in range(0, ring.GetPointCount()):
                                        pt = ring.GetPoint(j)
                                        lons.append(pt[0])
                                        lats.append(pt[1])

                                    ## Test if image crosses 180
                                    if max(lons) - min(lons) > 180:
                                        split_geom = wrap_180(temp_geom)
                                        feat_geom = split_geom
                                    else:
                                        mp_geom = ogr.ForceToMultiPolygon(temp_geom)
                                        feat_geom = mp_geom

                                else:
                                    mp_geom = ogr.ForceToMultiPolygon(temp_geom)
                                    feat_geom = mp_geom

                    ## Write feature
                    if valid_record:
                        for fld,val in attrib_map.items():
                            if fld in fwidths:
                                if isinstance(val, str) and len(val) > fwidths[fld]:
                                    logger.warning("Attribute value {} is too long for field {} (width={}). Feature skipped".format(
                                        val, fld, fwidths[fld]
                                    ))
                                    valid_record = False
                            else:
                                logger.warning("Field {} is not in target table. Feature skipped".format(fld))
                                valid_record = False

                            if sys.version_info[0] < 3:  # force unicode to str for a bug in Python2 GDAL's SetField.
                                fld = fld.encode('utf-8')
                                val = val if not isinstance(val, unicode) else val.encode('utf-8')
                            feat.SetField(fld, val)
                        feat.SetGeometry(feat_geom)

                        ## Add new feature to layer
                        if not valid_record:
                            invalid_record_cnt += 1
                        else:
                            if not args.dryrun:
                                # Store record identifiers for later checking
                                recordids.append(recordid_map[args.mode].format(**attrib_map))

                                # Append record
                                err.err_level = gdal.CE_None
                                try:
                                    if ogr_driver_str in ('PostgreSQL'):
                                        layer.StartTransaction()
                                        layer.CreateFeature(feat)
                                        layer.CommitTransaction()
                                    else:
                                        layer.CreateFeature(feat)
                                except Exception as e:
                                    raise e
                                else:
                                    if err.err_level >= gdal.CE_Warning:
                                        raise RuntimeError(err.err_level, err.err_no, err.err_msg)
                                finally:
                                    gdal.PopErrorHandler()

            if invalid_record_cnt > 0:
                logger.info("{} invalid records skipped".format(invalid_record_cnt))

            if len(recordids) == 0 and not args.dryrun:
                logger.error("No valid records found")
                sys.exit(-1)

            # Check contents of layer for all records
            if args.check and not args.dryrun:
                logger.info("Checking for new records in target table")
                layer.ResetReading()
                attrib_maps = [{id_fld: feat.GetField(id_fld) for id_fld in id_flds if id_fld in fld_list} for feat in layer]
                layer_recordids = [recordid_map[args.mode].format(**attrib_map) for attrib_map in attrib_maps]
                layer_recordids = set(layer_recordids)

                err_cnt = 0
                for recordid in recordids:
                    if recordid not in layer_recordids:
                        err_cnt += 1
                        logger.error("Record not found in target layer: {}".format(recordid))

                if err_cnt > 0:
                    sys.exit(-1)

        else:
            logger.error('Cannot open layer: {}'.format(dst_lyr))
            ds = None
            sys.exit(-1)

        ds = None

    else:
        logger.info("Cannot open dataset: {}".format(dst_ds))
        sys.exit(-1)

    if args.dryrun:
        logger.info("Done (dryrun)")
    else:
        logger.info("Done")
    sys.exit(0)
Пример #7
0
    def _get_image_info(self, src_image, spatial_ref):

        if self.sensor == 'IK01' and "_msi_" in src_image:
            src_image_name = src_image("_msi_", "_blu_")
            src_image = os.path.join(self.srcdir, src_image_name)

        ds = gdal.Open(src_image, gdalconst.GA_ReadOnly)
        if ds is not None:

            ####  Get extent from GCPs
            num_gcps = ds.GetGCPCount()

            if num_gcps == 4:
                gcps = ds.GetGCPs()
                proj = ds.GetGCPProjection()

                gcp_dict = {}
                id_dict = {
                    "UpperLeft": 1,
                    "1": 1,
                    "UpperRight": 2,
                    "2": 2,
                    "LowerLeft": 4,
                    "4": 4,
                    "LowerRight": 3,
                    "3": 3
                }

                for gcp in gcps:
                    gcp_dict[id_dict[gcp.Id]] = [
                        float(gcp.GCPPixel),
                        float(gcp.GCPLine),
                        float(gcp.GCPX),
                        float(gcp.GCPY),
                        float(gcp.GCPZ)
                    ]
                ulx = gcp_dict[1][2]
                uly = gcp_dict[1][3]
                urx = gcp_dict[2][2]
                ury = gcp_dict[2][3]
                llx = gcp_dict[4][2]
                lly = gcp_dict[4][3]
                lrx = gcp_dict[3][2]
                lry = gcp_dict[3][3]

                xsize = gcp_dict[1][0] - gcp_dict[2][0]
                ysize = gcp_dict[1][1] - gcp_dict[4][1]

            else:
                xsize = ds.RasterXSize
                ysize = ds.RasterYSize
                proj = ds.GetProjectionRef()
                gtf = ds.GetGeoTransform()
                print(gtf)

                ulx = gtf[0] + 0 * gtf[1] + 0 * gtf[2]
                uly = gtf[3] + 0 * gtf[4] + 0 * gtf[5]
                urx = gtf[0] + xsize * gtf[1] + 0 * gtf[2]
                ury = gtf[3] + xsize * gtf[4] + 0 * gtf[5]
                llx = gtf[0] + 0 * gtf[1] + ysize * gtf[2]
                lly = gtf[3] + 0 * gtf[4] + ysize * gtf[5]
                lrx = gtf[0] + xsize * gtf[1] + ysize * gtf[2]
                lry = gtf[3] + xsize * gtf[4] + ysize * gtf[5]

            ds = None

            ####  Create geometry objects
            ring = ogr.Geometry(ogr.wkbLinearRing)
            ring.AddPoint(ulx, uly)
            ring.AddPoint(urx, ury)
            ring.AddPoint(lrx, lry)
            ring.AddPoint(llx, lly)
            ring.AddPoint(ulx, uly)

            extent_geom = ogr.Geometry(ogr.wkbPolygon)
            extent_geom.AddGeometry(ring)

            #### Create srs objects
            s_srs = utils.osr_srs_preserve_axis_order(
                osr.SpatialReference(proj))
            t_srs = spatial_ref.srs
            st_ct = osr.CoordinateTransformation(s_srs, t_srs)

            #### Transform geoms to target srs
            if not s_srs.IsSame(t_srs):
                extent_geom.Transform(st_ct)
            #logger.info("Projected extent: %s", str(extent_geom))
            return extent_geom

        else:
            logger.error("Cannot open dataset: %s", src_image)
            return None
Пример #8
0
def main():

    #### Set Up Arguments
    parser = argparse.ArgumentParser(
        description=
        "shelve setsm files by centroid location relative to a shp index")

    #### Positional Arguments
    parser.add_argument('src', help="source directory or dem")
    parser.add_argument('dst', help="destination directory")
    parser.add_argument('shp', help='shp index defining grid scheme')
    parser.add_argument('field', help='shp index field with grid name')

    #### Optionsl Arguments
    parser.add_argument('--try-link',
                        action='store_true',
                        default=False,
                        help="try linking instead of copying files")
    parser.add_argument('--log', help="directory for log output")
    parser.add_argument('--overwrite',
                        action='store_true',
                        default=False,
                        help="overwrite existing index")
    parser.add_argument('--dryrun',
                        action='store_true',
                        default=False,
                        help="print actions without executing")

    #### Parse Arguments
    args = parser.parse_args()

    #### Verify Arguments
    if not os.path.isdir(args.src) and not os.path.isfile(args.src):
        parser.error("Source directory or file does not exist: %s" % args.src)

    src = os.path.abspath(args.src)
    dst = os.path.abspath(args.dst)
    shp = os.path.abspath(args.shp)

    lsh = logging.StreamHandler()
    lsh.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lsh.setFormatter(formatter)
    logger.addHandler(lsh)

    if args.log:
        if os.path.isdir(args.log):
            tm = datetime.now()
            logfile = os.path.join(
                args.log,
                "shelve_setsm{}.log".format(tm.strftime("%Y%m%d%H%M%S")))
        else:
            parser.error('log folder does not exist: {}'.format(args.log))

        lfh = logging.FileHandler(logfile)
        lfh.setLevel(logging.DEBUG)
        formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                      '%m-%d-%Y %H:%M:%S')
        lfh.setFormatter(formatter)
        logger.addHandler(lfh)

    ### Open shp, verify field, verify projetion, extract tile geoms
    tiles = {}
    ds = ogr.Open(shp)
    if ds is not None:

        lyr = ds.GetLayerByName(os.path.splitext(os.path.basename(shp))[0])
        lyr.ResetReading()

        src_srs = lyr.GetSpatialRef()

        i = lyr.FindFieldIndex(args.field, 1)
        if i == -1:
            logger.error("Cannot locate field {} in {}".format(
                args.field, args.shp))
            sys.exit(-1)

        shp_srs = lyr.GetSpatialRef()
        if shp_srs is None:
            logger.error("Shp must have a defined spatial reference")
            sys.exit(-1)

        for feat in lyr:
            tile_name = feat.GetFieldAsString(i)
            tile_geom = feat.GetGeometryRef().Clone()
            if not tile_name in tiles:
                tiles[tile_name] = tile_geom
            else:
                logger.error(
                    "Found features with duplicate name: {} - Ignoring 2nd feature"
                    .format(tile_name))

    else:
        logger.error("Cannot open {}".format(src))

    if len(tiles) == 0:
        logger.error("No features found in shp")

    else:
        rasters = []

        #### ID rasters
        logger.info('Identifying DEMs')
        if os.path.isfile(src):
            logger.info(src)
            try:
                raster = dem.SetsmDem(src)
            except RuntimeError as e:
                logger.error(e)
            else:
                if raster.metapath or os.path.isfile(raster.mdf):
                    rasters.append(raster)
                else:
                    logger.warning(
                        "DEM does not include a valid meta.txt or mdf.txt, skipping: {}"
                        .format(raster.srcfp))

        else:
            for root, dirs, files in os.walk(src):
                for f in files:
                    if f.endswith("_dem.tif"):
                        logger.debug(os.path.join(root, f))
                        try:
                            raster = dem.SetsmDem(os.path.join(root, f))
                        except RuntimeError as e:
                            logger.error(e)
                        else:
                            if raster.metapath or os.path.isfile(raster.mdf):
                                rasters.append(raster)
                            else:
                                logger.warning(
                                    "DEM does not include a valid meta.txt or mdf.txt, skipping: {}"
                                    .format(raster.srcfp))

        logger.info('Shelving DEMs')
        total = len(rasters)
        i = 0
        for raster in rasters:
            #### print count/total as progress meter
            i += 1
            #logger.info("[{} of {}] - {}".format(i,total,raster.stripid))

            ## Convert geom to match shp srs and get centroid
            raster.get_metafile_info()
            geom_copy = raster.exact_geom.Clone()
            srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
            srs.ImportFromProj4(raster.proj4_meta)
            if not shp_srs.IsSame(srs):
                ctf = osr.CoordinateTransformation(srs, shp_srs)
                geom_copy.Transform(ctf)
            centroid = geom_copy.Centroid()

            ## Run intersection with each tile
            tile_overlaps = []
            for tile_name, tile_geom in tiles.items():
                if centroid.Intersects(tile_geom):
                    tile_overlaps.append(tile_name)

            ## Raise an error on multiple intersections or zero intersections
            if len(tile_overlaps) == 0:
                logger.error(
                    "raster {} does not intersect the index shp, skipping".
                    format(raster.srcfn))

            elif len(tile_overlaps) > 1:
                logger.error(
                    "raster {} intersects more than one tile ({}), skipping".
                    format(raster.srcfn, ','.join(tile_overlaps)))

            else:
                logger.info("{} shelved to tile {}".format(
                    raster.stripid, tile_overlaps[0]))
                dst_dir = os.path.join(dst, tile_overlaps[0])

                if not os.path.isdir(dst_dir):
                    if not args.dryrun:
                        os.makedirs(dst_dir)

                for ifp in glob.glob(
                        os.path.join(raster.srcdir, raster.stripid) + "*"):
                    ofp = os.path.join(dst_dir, os.path.basename(ifp))
                    if os.path.isfile(ofp) and args.overwrite:
                        logger.debug("Copying {} to {}".format(ifp, ofp))
                        if not args.dryrun:
                            os.remove(ofp)
                            if args.try_link:
                                os.link(ifp, ofp)
                            else:
                                shutil.copy2(ifp, ofp)

                    elif not os.path.isfile(ofp):
                        logger.debug("Copying {} to {}".format(ifp, ofp))
                        if not args.dryrun:
                            if args.try_link:
                                os.link(ifp, ofp)
                            else:
                                shutil.copy2(ifp, ofp)

                    else:
                        logger.debug(
                            "File already exists, skipping {} to {}".format(
                                ifp, ofp))

    logger.info('Done')
Пример #9
0
def write_to_ogr_dataset(ogr_driver_str, ogrDriver, dst_ds, dst_lyr, groups,
                         pairs, total, db_path_prefix, fld_defs, args):

    ## Create dataset if it does not exist
    if ogr_driver_str == 'ESRI Shapefile':
        if os.path.isfile(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'FileGDB':
        if os.path.isdir(dst_ds):
            ds = ogrDriver.Open(dst_ds, 1)
        else:
            ds = ogrDriver.CreateDataSource(dst_ds)

    elif ogr_driver_str == 'PostgreSQL':
        # DB must already exist
        ds = ogrDriver.Open(dst_ds, 1)

    else:
        logger.error("Format {} is not supported".format(ogr_driver_str))

    if args.bp_paths:
        status = 'tape'
    else:
        status = 'online'

    if ds is not None:

        ## Create table if it does not exist
        layer = ds.GetLayerByName(dst_lyr)
        fld_list = [f.fname for f in fld_defs]

        tgt_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
        tgt_srs.ImportFromEPSG(args.epsg)

        if not layer:
            logger.info("Creating table...")

            layer = ds.CreateLayer(dst_lyr, tgt_srs, ogr.wkbMultiPolygon)
            if layer:
                for field_def in fld_defs:
                    field = ogr.FieldDefn(field_def.fname, field_def.ftype)
                    field.SetWidth(field_def.fwidth)
                    field.SetPrecision(field_def.fprecision)
                    layer.CreateField(field)

        ## Append Records
        if layer:
            logger.info("Appending records...")
            #### loop through records and add features
            i = 0
            for groupid in groups:
                for record in groups[groupid]:
                    i += 1
                    progress(i, total, "features written")
                    if not args.dryrun:

                        feat = ogr.Feature(layer.GetLayerDefn())
                        valid_record = True

                        ## Set attributes
                        ## Fields for scene DEM
                        if args.mode == 'scene':

                            attrib_map = {
                                'SCENEDEMID': record.sceneid,
                                'STRIPDEMID': record.stripid,
                                'STATUS': status,
                                'PAIRNAME': record.pairname,
                                'SENSOR1': record.sensor1,
                                'SENSOR2': record.sensor2,
                                'ACQDATE1':
                                record.acqdate1.strftime('%Y-%m-%d'),
                                'ACQDATE2':
                                record.acqdate2.strftime('%Y-%m-%d'),
                                'CATALOGID1': record.catid1,
                                'CATALOGID2': record.catid2,
                                'HAS_LSF': int(os.path.isfile(record.lsf_dem)),
                                'HAS_NONLSF': int(os.path.isfile(record.dem)),
                                'ALGM_VER': record.algm_version,
                                'FILESZ_DEM': record.filesz_dem,
                                'FILESZ_LSF': record.filesz_lsf,
                                'FILESZ_MT': record.filesz_mt,
                                'FILESZ_OR': record.filesz_or,
                                'PROJ4': record.proj4,
                                'EPSG': record.epsg,
                            }

                            ## Set region
                            try:
                                region = pairs[record.pairname]
                            except KeyError as e:
                                pass
                            else:
                                attrib_map['REGION'] = region

                            ## Set path folders within bucket for use if db_path_prefix specified
                            path_prefix_dirs = "{}/{}/{}".format(
                                record.pairname[:4],  # sensor
                                record.pairname[5:9],  # year
                                record.pairname[9:11],  # month"
                            )

                        ## Fields for strip DEM
                        if args.mode == 'strip':
                            attrib_map = {
                                'DEM_ID': record.stripid,
                                'PAIRNAME': record.pairname,
                                'SENSOR1': record.sensor1,
                                'SENSOR2': record.sensor2,
                                'ACQDATE1':
                                record.acqdate1.strftime('%Y-%m-%d'),
                                'ACQDATE2':
                                record.acqdate2.strftime('%Y-%m-%d'),
                                'CATALOGID1': record.catid1,
                                'CATALOGID2': record.catid2,
                                'IS_LSF': int(record.is_lsf),
                                'ALGM_VER': record.algm_version,
                                'FILESZ_DEM': record.filesz_dem,
                                'FILESZ_MT': record.filesz_mt,
                                'FILESZ_OR': record.filesz_or,
                                'PROJ4': record.proj4,
                                'EPSG': record.epsg,
                                'GEOCELL': record.geocell,
                            }

                            if record.version:
                                attrib_map['REL_VER'] = record.version
                            if record.density:
                                attrib_map['DENSITY'] = record.density
                            else:
                                attrib_map['DENSITY'] = -9999

                            ## If registration info exists
                            if len(record.reginfo_list) > 0:
                                for reginfo in record.reginfo_list:
                                    if reginfo.name == 'ICESat':
                                        attrib_map["DX"] = reginfo.dx
                                        attrib_map["DY"] = reginfo.dy
                                        attrib_map["DZ"] = reginfo.dz
                                        attrib_map["REG_SRC"] = 'ICESat'
                                        attrib_map[
                                            "NUM_GCPS"] = reginfo.num_gcps
                                        attrib_map[
                                            "MEANRESZ"] = reginfo.mean_resid_z

                            ## Set path folders within bucket for use if db_path_prefix specified
                            path_prefix_dirs = "{}/{}/{}".format(
                                record.pairname[:4],  # sensor
                                record.pairname[5:9],  # year
                                record.pairname[9:11],  # month"
                            )

                        ## Fields for tile DEM
                        if args.mode == 'tile':
                            attrib_map = {
                                'DEM_ID': record.tileid,
                                'TILE': record.tilename,
                                'NUM_COMP': record.num_components,
                                'FILESZ_DEM': record.filesz_dem,
                            }

                            ## Optional attributes
                            if record.version:
                                attrib_map['REL_VER'] = record.version
                                version = record.version
                            else:
                                version = 'novers'
                            if record.density:
                                attrib_map['DENSITY'] = record.density
                            else:
                                attrib_map['DENSITY'] = -9999
                            if record.reg_src:
                                attrib_map["REG_SRC"] = record.reg_src
                                attrib_map["NUM_GCPS"] = record.num_gcps
                            if record.mean_resid_z:
                                attrib_map["MEANRESZ"] = record.mean_resid_z

                            ## Set path folders within bucket for use if db_path_prefix specified
                            if db_path_prefix:
                                path_prefix_dirs = "{}/{}/{}".format(
                                    args.project.lower(),  # project
                                    record.res,  # resolution
                                    version  # version
                                )

                        ## Common Attributes accross all modes
                        attrib_map['INDEX_DATE'] = datetime.datetime.today(
                        ).strftime('%Y-%m-%d')
                        attrib_map['CR_DATE'] = record.creation_date.strftime(
                            '%Y-%m-%d')
                        attrib_map['ND_VALUE'] = record.ndv
                        attrib_map['DEM_RES'] = (record.xres +
                                                 record.yres) / 2.0

                        ## Set location
                        if db_path_prefix:
                            location = '{}/{}/{}/{}.tar'.format(
                                db_path_prefix,
                                args.mode,  # mode (scene, strip, tile)
                                path_prefix_dirs,  # mode-specific path prefix
                                groupid  # mode-specific group ID
                            )
                        else:
                            location = record.srcfp
                        attrib_map['LOCATION'] = location

                        ## Transfrom and write geom
                        src_srs = utils.osr_srs_preserve_axis_order(
                            osr.SpatialReference())
                        src_srs.ImportFromWkt(record.proj)

                        if not record.geom:
                            logger.error(
                                'No valid geom found, feature skipped: {}'.
                                format(record.sceneid))
                            valid_record = False
                        else:
                            temp_geom = record.geom.Clone()
                            transform = osr.CoordinateTransformation(
                                src_srs, tgt_srs)
                            try:
                                temp_geom.Transform(transform)
                            except TypeError as e:
                                logger.error(
                                    'Geom transformation failed, feature skipped: {}'
                                    .format(record.sceneid))
                                valid_record = False
                            else:

                                ## Get centroid coordinates
                                centroid = temp_geom.Centroid()
                                if 'CENT_LAT' in fld_list:
                                    attrib_map['CENT_LAT'] = centroid.GetY()
                                    attrib_map['CENT_LON'] = centroid.GetX()

                                ## If srs is geographic and geom crosses 180, split geom into 2 parts
                                if tgt_srs.IsGeographic:

                                    ## Get Lat and Lon coords in arrays
                                    lons = []
                                    lats = []
                                    ring = temp_geom.GetGeometryRef(
                                        0)  #### assumes a 1 part polygon
                                    for j in range(0, ring.GetPointCount()):
                                        pt = ring.GetPoint(j)
                                        lons.append(pt[0])
                                        lats.append(pt[1])

                                    ## Test if image crosses 180
                                    if max(lons) - min(lons) > 180:
                                        split_geom = wrap_180(temp_geom)
                                        feat_geom = split_geom
                                    else:
                                        mp_geom = ogr.ForceToMultiPolygon(
                                            temp_geom)
                                        feat_geom = mp_geom

                                else:
                                    mp_geom = ogr.ForceToMultiPolygon(
                                        temp_geom)
                                    feat_geom = mp_geom

                        ## Write feature
                        if valid_record:
                            for fld, val in attrib_map.items():
                                feat.SetField(fld, val)
                            feat.SetGeometry(feat_geom)

                            ## Add new feature to layer
                            if ogr_driver_str in ('PostgreSQL'):
                                layer.StartTransaction()
                                layer.CreateFeature(feat)
                                layer.CommitTransaction()
                            else:
                                layer.CreateFeature(feat)

        else:
            logger.error('Cannot open layer: {}'.format(dst_lyr))

        ds = None

    else:
        logger.info("Cannot open dataset: {}".format(dst_ds))

    logger.info("Done")
Пример #10
0
                            if raster.version:
                                feat.SetField("REL_VER",raster.version)

                            if raster.reg_src:
                                feat.SetField("REG_SRC",raster.reg_src)
                                feat.SetField("NUM_GCPS",raster.num_gcps)
                            if raster.mean_resid_z:
                                feat.SetField("MEANRESZ",raster.mean_resid_z)

                            #### Set fields if populated (will not be populated if metadata file is not found)
                            if raster.creation_date:
                                feat.SetField("CR_DATE",raster.creation_date.strftime("%Y-%m-%d"))

                            ## transfrom and write geom
                            src_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference())
                            src_srs.ImportFromWkt(raster.proj)

                            if raster.geom:
                                geom = raster.geom.Clone()
                                if not src_srs.IsSame(tgt_srs):
                                    transform = osr.CoordinateTransformation(src_srs,tgt_srs)
                                    geom.Transform(transform) #### Verify this works over 180

                                feat.SetGeometry(geom)

                            else:
                                logger.error('No valid geom found: {}'.format(raster.srcfp))

                            #### add new feature to layer
                            lyr.CreateFeature(feat)
Пример #11
0
def build_archive(raster, scratch, args):

    logger.info("Packaging tile {}".format(raster.srcfn))
    #### create archive
    dstfp = raster.archive
    dstdir, dstfn = os.path.split(raster.archive)
    #print dstfn
    #print dstfp

    try:
        raster.get_dem_info()
    except RuntimeError as e:
        logger.error(e)
        print(raster.ndv)
    else:

        ## get raster density if not precomputed
        if raster.density is None:
            try:
                raster.compute_density_and_statistics()
            except RuntimeError as e:
                logger.warning(e)

        #### Build Archive
        if os.path.isfile(dstfp) and args.overwrite is True:
            if not args.dryrun:
                try:
                    os.remove(dstfp)
                except:
                    print("Cannot replace archive: %s" % dstfp)

        if not os.path.isfile(dstfp):

            components = (
                os.path.basename(raster.srcfp),  # dem
                os.path.basename(raster.metapath),  # meta
                # index shp files
            )

            optional_components = [
                os.path.basename(raster.regmetapath),  #reg
                os.path.basename(raster.err),  # err
                os.path.basename(raster.day),  # day
                os.path.basename(raster.browse),  # browse
                os.path.basename(raster.count),
                os.path.basename(raster.countmt),
                os.path.basename(raster.mad),
                os.path.basename(raster.mindate),
                os.path.basename(raster.maxdate),
            ]

            os.chdir(dstdir)
            #logger.info(os.getcwd())

            k = 0
            existing_components = sum(
                [int(os.path.isfile(component)) for component in components])
            ### check if exists, print
            #logger.info(existing_components)
            if existing_components == len(components):

                ## Build index
                index = os.path.join(scratch, raster.tileid + "_index.shp")

                ## create dem index shp: <strip_id>_index.shp
                try:
                    index_dir, index_lyr = utils.get_source_names(index)
                except RuntimeError as e:
                    logger.error("{}: {}".format(index, e))

                if os.path.isfile(index):
                    ogrDriver.DeleteDataSource(index)

                if not os.path.isfile(index):
                    ds = ogrDriver.CreateDataSource(index)
                    if ds is not None:
                        tgt_srs = osr.SpatialReference()
                        tgt_srs.ImportFromEPSG(args.epsg)

                        lyr = ds.CreateLayer(index_lyr, tgt_srs,
                                             ogr.wkbPolygon)

                        if lyr is not None:

                            for field_def in utils.TILE_DEM_ATTRIBUTE_DEFINITIONS_BASIC:

                                field = ogr.FieldDefn(field_def.fname,
                                                      field_def.ftype)
                                field.SetWidth(field_def.fwidth)
                                field.SetPrecision(field_def.fprecision)
                                lyr.CreateField(field)

                            #print raster.stripid
                            feat = ogr.Feature(lyr.GetLayerDefn())

                            ## Set fields
                            feat.SetField("DEM_ID", raster.tileid)
                            feat.SetField("TILE", raster.tilename)
                            feat.SetField("ND_VALUE", raster.ndv)
                            feat.SetField("DEM_NAME", raster.srcfn)
                            res = (raster.xres + raster.yres) / 2.0
                            feat.SetField("DEM_RES", res)
                            feat.SetField("DENSITY", raster.density)
                            feat.SetField("NUM_COMP", raster.num_components)

                            if raster.version:
                                feat.SetField("REL_VER", raster.version)

                            if raster.reg_src:
                                feat.SetField("REG_SRC", raster.reg_src)
                                feat.SetField("NUM_GCPS", raster.num_gcps)
                            if raster.mean_resid_z:
                                feat.SetField("MEANRESZ", raster.mean_resid_z)

                            #### Set fields if populated (will not be populated if metadata file is not found)
                            if raster.creation_date:
                                feat.SetField(
                                    "CR_DATE",
                                    raster.creation_date.strftime("%Y-%m-%d"))

                            ## transfrom and write geom
                            src_srs = utils.osr_srs_preserve_axis_order(
                                osr.SpatialReference())
                            src_srs.ImportFromWkt(raster.proj)

                            if raster.geom:
                                geom = raster.geom.Clone()
                                if not src_srs.IsSame(tgt_srs):
                                    transform = osr.CoordinateTransformation(
                                        src_srs, tgt_srs)
                                    geom.Transform(
                                        transform
                                    )  #### Verify this works over 180

                                feat.SetGeometry(geom)

                            else:
                                logger.error('No valid geom found: {}'.format(
                                    raster.srcfp))

                            #### add new feature to layer
                            lyr.CreateFeature(feat)

                            ## Close layer and dataset
                            lyr = None
                            ds = None

                            if os.path.isfile(index):
                                ## Create archive
                                if not args.dryrun:
                                    #archive = tarfile.open(dstfp,"w:")
                                    archive = tarfile.open(dstfp, "w:gz")
                                    if not os.path.isfile(dstfp):
                                        logger.error(
                                            "Cannot create archive: {}".format(
                                                dstfn))

                                ## Add components
                                for component in components:
                                    logger.debug("Adding {} to {}".format(
                                        component, dstfn))
                                    k += 1
                                    if not args.dryrun:
                                        archive.add(component)
                                        #archive.write(component)

                                ## Add optional components
                                for component in optional_components:
                                    if os.path.isfile(component):
                                        logger.debug("Adding {} to {}".format(
                                            component, dstfn))
                                        k += 1
                                        if not args.dryrun:
                                            archive.add(component)

                                ## Add index in subfolder
                                os.chdir(scratch)
                                for f in glob.glob(index_lyr + ".*"):
                                    arcname = os.path.join("index", f)
                                    logger.debug("Adding {} to {}".format(
                                        f, dstfn))
                                    k += 1
                                    if not args.dryrun:
                                        archive.add(f, arcname=arcname)
                                    os.remove(f)

                                logger.info(
                                    "Added {} items to archive: {}".format(
                                        k, dstfn))

                                ## Close archive
                                if not args.dryrun:
                                    try:
                                        archive.close()
                                    except Exception as e:
                                        print(e)

                        else:
                            logger.error(
                                'Cannot create layer: {}'.format(index_lyr))
                    else:
                        logger.error("Cannot create index: {}".format(index))
                else:
                    logger.error(
                        "Cannot remove existing index: {}".format(index))
            else:
                logger.error(
                    "Not enough existing components to make a valid archive: {} ({} found, {} required)"
                    .format(raster.srcfp, existing_components,
                            len(components)))