def main(): #### Set Up Arguments parser = argparse.ArgumentParser( description="query PGC index for images contributing to a mosaic") parser.add_argument("index", help="PGC index shapefile") parser.add_argument("tile_csv", help="tile schema csv") parser.add_argument("dstdir", help="textfile output directory") parser.add_argument("mosaic", help="mosaic name without extension") #pos_arg_keys = ["index","tile_csv","dstdir"] parser.add_argument( "-e", "--extent", nargs=4, type=float, help= "extent of output mosaic -- xmin xmax ymin ymax (default is union of all inputs)" ) parser.add_argument( "--force-pan-to-multi", action="store_true", default=False, help="if output is multiband, force script to also use 1 band images") parser.add_argument( "-b", "--bands", type=int, help= "number of output bands( default is number of bands in the first image)" ) parser.add_argument( "--tday", help= "month and day of the year to use as target for image suitability ranking -- 04-05" ) parser.add_argument( "--tyear", help= "year (or year range) to use as target for image suitability ranking -- 2017 or 2015-2017" ) parser.add_argument( "--nosort", action="store_true", default=False, help= "do not sort images by metadata. script uses the order of the input textfile or directory " "(first image is first drawn). Not recommended if input is a directory; order will be " "random") parser.add_argument( "--use-exposure", action="store_true", default=False, help="use exposure settings in metadata to inform score") parser.add_argument( "--exclude", help= "file of file name patterns (text only, no wildcards or regexs) to exclude" ) parser.add_argument( "--max-cc", type=float, default=0.2, help="maximum fractional cloud cover (0.0-1.0, default 0.2)") parser.add_argument( "--include-all-ms", action="store_true", default=False, help= "include all multispectral imagery, even if the imagery has differing numbers of bands" ) parser.add_argument( "--min-contribution-area", type=int, default=20000000, help= "minimum area contribution threshold in target projection units (default=20000000). " "Higher values remove more image slivers from the resulting mosaic") parser.add_argument( "--log", help="output log file (default is queryFP.log in the output folder)") parser.add_argument( "--ttile", help= "target tile (default is to compute all valid tiles. multiple tiles should be delimited " "by a comma [ex: 23_24,23_25])") parser.add_argument("--overwrite", action="store_true", default=False, help="overwrite any existing files") parser.add_argument( "--stretch", choices=ortho_functions.stretches, default="rf", help="stretch abbreviation used in image processing (default=rf)") parser.add_argument( "--build-shp", action='store_true', default=False, help= "build shapefile of intersecting images (only invoked if --no_sort is not used)" ) parser.add_argument( "--require-pan", action='store_true', default=False, help= "limit search to imagery with both a multispectral and a panchromatic component" ) parser.add_argument("--version", action='version', version="imagery_utils v{}".format( utils.package_version)) #### Parse Arguments args = parser.parse_args() scriptpath = os.path.abspath(sys.argv[0]) src = os.path.abspath(args.index) csvpath = os.path.abspath(args.tile_csv) dstdir = os.path.abspath(args.dstdir) #### Validate Required Arguments try: dsp, lyrn = utils.get_source_names(src) except RuntimeError as e: parser.error(e) if not os.path.isfile(csvpath): parser.error("Arg2 is not a valid file path: %s" % csvpath) #### Validate target day option if args.tday is not None: try: m = int(args.tday.split("-")[0]) d = int(args.tday.split("-")[1]) td = date(2000, m, d) except ValueError: logger.error("Target day must be in mm-dd format (i.e 04-05)") sys.exit(1) else: m = 0 d = 0 #### Validate target year/year range option if args.tyear is not None: if len(str(args.tyear)) == 4: ## ensure single year is valid try: tyear_test = datetime(year=args.tyear, month=1, day=1) except ValueError: parser.error("Supplied year {0} is not valid".format( args.tyear)) sys.exit(1) elif len(str(args.tyear)) == 9: if '-' in args.tyear: ## decouple range and build year yrs = args.tyear.split('-') yrs_range = range(int(yrs[0]), int(yrs[1]) + 1) for yy in yrs_range: try: tyear_test = datetime(year=yy, month=1, day=1) except ValueError: parser.error( "Supplied year {0} in range {1} is not valid". format(yy, args.tyear)) sys.exit(1) else: parser.error( "Supplied year range {0} is not valid; should be like: 2015 OR 2015-2017" .format(args.tyear)) sys.exit(1) else: parser.error( "Supplied year {0} is not valid, or its format is incorrect; should be 4 digits for single " "year (e.g., 2017), eight digits and dash for range (e.g., 2015-2017)" .format(args.tyear)) sys.exit(1) ##### Configure Logger if args.log is not None: logfile = os.path.abspath(args.log) else: logfile = os.path.join( dstdir, "queryFP_{}.log".format(datetime.today().strftime("%Y%m%d%H%M%S"))) lfh = logging.FileHandler(logfile) lfh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s', '%m-%d-%Y %H:%M:%S') lfh.setFormatter(formatter) logger.addHandler(lfh) lsh = logging.StreamHandler() lsh.setLevel(logging.INFO) lsh.setFormatter(formatter) logger.addHandler(lsh) #### Get exclude_list if specified if args.exclude is not None: if not os.path.isfile(args.exclude): parser.error("Value for option --exclude-list is not a valid file") f = open(args.exclude, 'r') exclude_list = set([line.rstrip() for line in f.readlines()]) else: exclude_list = set() logger.debug("Exclude list: %s", str(exclude_list)) #### Parse csv, validate tile ID and get tilegeom tiles = {} csv = open(csvpath, 'r') for line in csv: tile = line.rstrip().split(",") if len(tile) != 9: logger.warning("funny csv line: %s", line.strip('\n')) else: name = tile[2] if name != "name": ### Tile csv schema: row, column, name, status, xmin, xmax, ymin, ymax, epsg code t = mosaic.TileParams(float(tile[4]), float(tile[5]), float(tile[6]), float(tile[7]), int(tile[0]), int(tile[1]), tile[2]) t.status = tile[3] t.epsg = int(tile[8]) tiles[name] = t csv.close() if args.ttile is not None: if "," in args.ttile: ttiles = args.ttile.split(",") else: ttiles = [args.ttile] for ttile in ttiles: if ttile not in tiles: logger.info("Target tile is not in the tile csv: %s", ttile) else: t = tiles[ttile] if t.status == "0": logger.error( "Tile status indicates it should not be created: %s, %s", ttile, t.status) else: try: HandleTile(t, src, dstdir, csvpath, args, exclude_list) except RuntimeError as e: logger.error(e) else: keys = list(tiles.keys()) keys.sort() for tile in keys: t = tiles[tile] if t.status == "1": try: HandleTile(t, src, dstdir, csvpath, args, exclude_list) except RuntimeError as e: logger.error(e)
def HandleTile(t, src, dstdir, csvpath, args, exclude_list): otxtpath = os.path.join( dstdir, "{}_{}_orig.txt".format(os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join( dstdir, "{}_{}_ortho.txt".format(os.path.basename(csvpath)[:-4], t.name)) if os.path.isfile(otxtpath) and os.path.isfile( mtxtpath) and args.overwrite is False: logger.info("Tile %s processing files already exist", t.name) else: logger.info("Tile %s", t.name) t_srs = osr.SpatialReference() t_srs.ImportFromEPSG(t.epsg) #### Open mfp dsp, lyrn = utils.get_source_names(src) ds = ogr.Open(dsp) if ds is None: logger.error("Open failed") else: lyr = ds.GetLayerByName(lyrn) if not lyr: raise RuntimeError( "Layer {} does not exist in dataset {}".format(lyrn, dsp)) else: s_srs = lyr.GetSpatialRef() #logger.debug(str(s_srs)) #logger.debug(str(t.geom)) tile_geom_in_s_srs = t.geom.Clone() if not t_srs.IsSame(s_srs): ict = osr.CoordinateTransformation(t_srs, s_srs) ct = osr.CoordinateTransformation(s_srs, t_srs) tile_geom_in_s_srs.Transform(ict) # if the geometry crosses meridian, split it into multipolygon (else this breaks SetSpatialFilter) if utils.doesCross180(tile_geom_in_s_srs): logger.debug( "tile_geom_in_s_srs crosses 180 meridian; splitting to multiple polygons..." ) tile_geom_in_s_srs = utils.getWrappedGeometry( tile_geom_in_s_srs) lyr.ResetReading() lyr.SetSpatialFilter(tile_geom_in_s_srs) feat = lyr.GetNextFeature() imginfo_list1 = [] while feat: iinfo = mosaic.ImageInfo(feat, "RECORD", srs=s_srs) if iinfo.geom is not None and iinfo.geom.GetGeometryType( ) in (ogr.wkbPolygon, ogr.wkbMultiPolygon): if not t_srs.IsSame(s_srs): iinfo.geom.Transform(ct) ## fix self-intersection errors caused by reprojecting over 180 temp = iinfo.geom.Buffer( 0.1 ) # assumes a projected coordinate system with meters or feet as units iinfo.geom = temp if iinfo.geom.Intersects(t.geom): if iinfo.scene_id in exclude_list: logger.debug( "Scene in exclude list, excluding: %s", iinfo.srcfp) elif not os.path.isfile(iinfo.srcfp): logger.warning( "Scene path is invalid, excluding %s (path = %s)", iinfo.scene_id, iinfo.srcfp) elif args.require_pan: srcfp = iinfo.srcfp srcdir, mul_name = os.path.split(srcfp) if iinfo.sensor in ["WV02", "WV03", "QB02"]: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "GE01": if "_5V" in mul_name: pan_name_base = srcfp[:-24].replace( "M0", "P0") candidates = glob.glob(pan_name_base + "*") candidates2 = [ f for f in candidates if f.endswith(('.ntf', '.NTF', '.tif', '.TIF')) ] if len(candidates2) == 0: pan_name = '' elif len(candidates2) == 1: pan_name = os.path.basename( candidates2[0]) else: pan_name = '' logger.error( '%i panchromatic images match the multispectral image name ' '%s', len(candidates2), mul_name) else: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "IK01": pan_name = mul_name.replace("blu", "pan") pan_name = mul_name.replace("msi", "pan") pan_name = mul_name.replace("bgrn", "pan") pan_srcfp = os.path.join(srcdir, pan_name) if not os.path.isfile(pan_srcfp): logger.debug( "Image does not have a panchromatic component, excluding: %s", iinfo.srcfp) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) feat = lyr.GetNextFeature() ds = None logger.info("Number of intersects in tile %s: %i", t.name, len(imginfo_list1)) if len(imginfo_list1) > 0: #### Get mosaic parameters logger.debug("Getting mosaic parameters") params = mosaic.getMosaicParameters(imginfo_list1[0], args) #### Remove images that do not match ref logger.debug("Setting image pattern filter") imginfo_list2 = mosaic.filterMatchingImages( imginfo_list1, params) logger.info("Number of images matching filter: %i", len(imginfo_list2)) if args.nosort is False: #### Sort by quality logger.debug("Sorting images by quality") imginfo_list3 = [] for iinfo in imginfo_list2: iinfo.getScore(params) if iinfo.score > 0: imginfo_list3.append(iinfo) # sort so highest score is last imginfo_list3.sort(key=lambda x: x.score) else: imginfo_list3 = list(imginfo_list2) #### Overlay geoms and remove non-contributors logger.debug("Overlaying images to determine contributors") contribs = mosaic.determine_contributors( imginfo_list3, t.geom, args.min_contribution_area) logger.info("Number of contributing images: %i", len(contribs)) if len(contribs) > 0: if args.build_shp: ####################################################### #### Create Shp shp = os.path.join( dstdir, "{}_{}_imagery.shp".format(args.mosaic, t.name)) logger.debug("Creating shapefile of geoms: %s", shp) fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.debug("OGR: Driver %s is not available", OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.debug("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon) if lyr is None: logger.debug("ERROR: Failed to create layer: %s", shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.debug( "ERROR: Failed to create field: %s", fld) for iinfo, geom in contribs: logger.debug("Image: %s", iinfo.srcfn) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SCORE", iinfo.score) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: logger.debug( "ERROR: Could not create feature for image %s", iinfo.srcfn) else: logger.debug("Created feature for image: %s", iinfo.srcfn) feat.Destroy() #### Write textfiles if not os.path.isdir(dstdir): os.makedirs(dstdir) otxtpath = os.path.join( dstdir, "{}_{}_orig.txt".format(args.mosaic, t.name)) mtxtpath = os.path.join( dstdir, "{}_{}_ortho.txt".format(args.mosaic, t.name)) otxt = open(otxtpath, 'w') mtxt = open(mtxtpath, 'w') for iinfo, geom in contribs: if not os.path.isfile(iinfo.srcfp): logger.warning("Image does not exist: %s", iinfo.srcfp) otxt.write("{}\n".format(iinfo.srcfp)) m_fn = "{0}_u08{1}{2}.tif".format( os.path.splitext(iinfo.srcfn)[0], args.stretch, t.epsg) mtxt.write( os.path.join(dstdir, 'ortho', t.name, m_fn) + "\n") otxt.close()
def HandleTile(t, src, dstdir, csvpath, args, exclude_list): otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(os.path.basename(csvpath)[:-4], t.name)) if os.path.isfile(otxtpath) and os.path.isfile(mtxtpath) and args.overwrite is False: logger.info("Tile %s processing files already exist", t.name) else: logger.info("Tile %s", t.name) t_srs = utils.osr_srs_preserve_axis_order(osr.SpatialReference()) t_srs.ImportFromEPSG(t.epsg) #### Open mfp dsp, lyrn = utils.get_source_names(src) ds = ogr.Open(dsp) if ds is None: logger.error("Open failed") else: lyr = ds.GetLayerByName(lyrn) if not lyr: raise RuntimeError("Layer {} does not exist in dataset {}".format(lyrn, dsp)) else: s_srs = lyr.GetSpatialRef() #logger.debug(str(s_srs)) #logger.debug(str(t.geom)) tile_geom_in_s_srs = t.geom.Clone() if not t_srs.IsSame(s_srs): ict = osr.CoordinateTransformation(t_srs, s_srs) ct = osr.CoordinateTransformation(s_srs, t_srs) tile_geom_in_s_srs.Transform(ict) # if the geometry crosses meridian, split it into multipolygon (else this breaks SetSpatialFilter) if utils.doesCross180(tile_geom_in_s_srs): logger.debug("tile_geom_in_s_srs crosses 180 meridian; splitting to multiple polygons...") tile_geom_in_s_srs = utils.getWrappedGeometry(tile_geom_in_s_srs) lyr.ResetReading() lyr.SetSpatialFilter(tile_geom_in_s_srs) feat = lyr.GetNextFeature() imginfo_list1 = [] while feat: iinfo = mosaic.ImageInfo(feat, "RECORD", srs=s_srs) if iinfo.geom is not None and iinfo.geom.GetGeometryType() in (ogr.wkbPolygon, ogr.wkbMultiPolygon): if not t_srs.IsSame(s_srs): iinfo.geom.Transform(ct) ## fix self-intersection errors caused by reprojecting over 180 temp = iinfo.geom.Buffer(0.1) # assumes a projected coordinate system with meters or feet as units iinfo.geom = temp if iinfo.geom.Intersects(t.geom): if iinfo.scene_id in exclude_list: logger.debug("Scene in exclude list, excluding: %s", iinfo.srcfp) elif not os.path.isfile(iinfo.srcfp) and iinfo.status != "tape": #logger.info("iinfo.status != tape: {0}".format(iinfo.status != "tape")) logger.warning("Scene path is invalid, excluding %s (path = %s) (status = %s)", iinfo.scene_id, iinfo.srcfp, iinfo.status) elif args.require_pan: srcfp = iinfo.srcfp srcdir, mul_name = os.path.split(srcfp) if iinfo.sensor in ["WV02", "WV03", "QB02"]: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "GE01": if "_5V" in mul_name: pan_name_base = srcfp[:-24].replace("M0", "P0") candidates = glob.glob(pan_name_base + "*") candidates2 = [f for f in candidates if f.endswith(('.ntf', '.NTF', '.tif', '.TIF'))] if len(candidates2) == 0: pan_name = '' elif len(candidates2) == 1: pan_name = os.path.basename(candidates2[0]) else: pan_name = '' logger.error('%i panchromatic images match the multispectral image name ' '%s', len(candidates2), mul_name) else: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "IK01": pan_name = mul_name.replace("blu", "pan") pan_name = mul_name.replace("msi", "pan") pan_name = mul_name.replace("bgrn", "pan") pan_srcfp = os.path.join(srcdir, pan_name) if not os.path.isfile(pan_srcfp): logger.debug("Image does not have a panchromatic component, excluding: %s", iinfo.srcfp) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) feat = lyr.GetNextFeature() ds = None logger.info("Number of intersects in tile %s: %i", t.name, len(imginfo_list1)) if len(imginfo_list1) > 0: #### Get mosaic parameters logger.debug("Getting mosaic parameters") params = mosaic.getMosaicParameters(imginfo_list1[0], args) #### Remove images that do not match ref logger.debug("Setting image pattern filter") imginfo_list2 = mosaic.filterMatchingImages(imginfo_list1, params) logger.info("Number of images matching filter: %i", len(imginfo_list2)) if args.nosort is False: #### Sort by quality logger.debug("Sorting images by quality") imginfo_list3 = [] for iinfo in imginfo_list2: iinfo.getScore(params) if iinfo.score > 0: imginfo_list3.append(iinfo) # sort so highest score is last imginfo_list3.sort(key=lambda x: x.score) else: imginfo_list3 = list(imginfo_list2) #### Overlay geoms and remove non-contributors logger.debug("Overlaying images to determine contributors") contribs = mosaic.determine_contributors(imginfo_list3, t.geom, args.min_contribution_area) logger.info("Number of contributing images: %i", len(contribs)) if len(contribs) > 0: if args.build_shp: ####################################################### #### Create Shp shp = os.path.join(dstdir, "{}_{}_imagery.shp".format(args.mosaic, t.name)) logger.debug("Creating shapefile of geoms: %s", shp) fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.debug("OGR: Driver %s is not available", OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.debug("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon) if lyr is None: logger.debug("ERROR: Failed to create layer: %s", shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.debug("ERROR: Failed to create field: %s", fld) for iinfo, geom in contribs: logger.debug("Image: %s", iinfo.srcfn) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SCORE", iinfo.score) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: logger.debug("ERROR: Could not create feature for image %s", iinfo.srcfn) else: logger.debug("Created feature for image: %s", iinfo.srcfn) feat.Destroy() #### Write textfiles if not os.path.isdir(dstdir): os.makedirs(dstdir) otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(args.mosaic, t.name)) otxtpath_ontape = os.path.join(dstdir, "{}_{}_orig_ontape.csv".format(args.mosaic, t.name)) mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(args.mosaic, t.name)) rn_fromtape_basedir = os.path.join(dstdir, "renamed_fromtape") rn_fromtape_path = os.path.join(rn_fromtape_basedir, t.name) otxt = open(otxtpath, 'w') ttxt = open(otxtpath_ontape, 'w') mtxt = open(mtxtpath, 'w') # write header ttxt.write("{0},{1},{2}\n".format("SCENE_ID", "S_FILEPATH", "STATUS")) tape_ct = 0 for iinfo, geom in contribs: if not os.path.isfile(iinfo.srcfp) and iinfo.status != "tape": logger.warning("Image does not exist: %s", iinfo.srcfp) if iinfo.status == "tape": tape_ct += 1 ttxt.write("{0},{1},{2}\n".format(iinfo.scene_id, iinfo.srcfp, iinfo.status)) # get srcfp with file extension srcfp_file = os.path.basename(iinfo.srcfp) otxt.write("{}\n".format(os.path.join(rn_fromtape_path, srcfp_file))) else: otxt.write("{}\n".format(iinfo.srcfp)) m_fn = "{0}_u08{1}{2}.tif".format( os.path.splitext(iinfo.srcfn)[0], args.stretch, t.epsg ) mtxt.write(os.path.join(dstdir, 'ortho', t.name, m_fn) + "\n") otxt.close() if tape_ct == 0: logger.debug("No files need to be pulled from tape.") os.remove(otxtpath_ontape) else: # make output dirs from tape if not os.path.isdir(rn_fromtape_basedir): os.mkdir(rn_fromtape_basedir) if not os.path.isdir(rn_fromtape_path): os.mkdir(rn_fromtape_path) tape_tmp = os.path.join(dstdir, "{0}_{1}_tmp".format(args.mosaic, t.name)) if not os.path.isdir(tape_tmp): os.mkdir(tape_tmp) logger.warning("{0} scenes are not accessible, as they are on tape. Please use ir.py to pull " "scenes using file '{1}'. They must be put in directory '{2}', as file '{3}' " "contains hard-coded paths to said files (necessary to perform " "orthorectification). Please set a --tmp path (use '{4}').\n" "Note that if some (or all) scenes have already been pulled from tape, ir.py " "will not pull them again.\n". format(tape_ct, otxtpath_ontape, rn_fromtape_path, otxtpath, tape_tmp)) tape_log = "{0}_{1}_ir_log_{2}.log".format(args.mosaic, t.name, datetime.today().strftime("%Y%m%d%H%M%S")) root_pgclib_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "pgclib", "") logger.info("Suggested ir.py command:\n\n" "" "python {}ir.py -i {} -o {} --tmp {} -tm link 2>&1 | tee {}" .format(root_pgclib_path, otxtpath_ontape, rn_fromtape_path, tape_tmp, os.path.join(dstdir, tape_log)))
def build_archive(src, scratch, args): logger.info("Packaging Raster: {}".format(src)) raster = dem.SetsmDem(src) dstfp = raster.archive dstdir, dstfn = os.path.split(raster.archive) #print dstfn #print dstfp try: raster.get_dem_info() except RuntimeError as e: logger.error(e) else: process = True ## get raster density if not precomputed if raster.density is None: try: raster.compute_density_and_statistics() except RuntimeError as e: logger.warning(e) if args.filter_dems or args.force_filter_dems: # filter dems with area < 5.5 sqkm and density < .1 area = raster.geom.Area() # logger.info(raster.density) if area < 5500000: logger.info("Raster area {} falls below threshold: {}".format( area, raster.srcfp)) process = False elif raster.density < 0.1: logger.info( "Raster density {} falls below threshold: {}".format( raster.density, raster.srcfp)) process = False if not process: logger.info('Removing {}'.format(raster.srcfp)) to_remove = glob.glob(raster.srcfp[:-8] + '*') for f in to_remove: #logger.info('Removing {}'.format(f)) os.remove(f) if process: #### Build mdf if not os.path.isfile(raster.mdf) or args.overwrite: if os.path.isfile(raster.mdf): if not args.dryrun: os.remove(raster.mdf) try: if not args.dryrun: raster.write_mdf_file(args.lsf) except RuntimeError as e: logger.error(e) #### Build Readme if not os.path.isfile(raster.readme) or args.overwrite: if os.path.isfile(raster.readme): if not args.dryrun: os.remove(raster.readme) if not args.dryrun: raster.write_readme_file() #### Build Archive if not args.mdf_only: if os.path.isfile(dstfp) and args.overwrite is True: if not args.dryrun: try: os.remove(dstfp) except: print("Cannot replace archive: %s" % dstfp) if not os.path.isfile(dstfp): if args.lsf: components = ( os.path.basename(raster.srcfp).replace( "dem.tif", "dem_smooth.tif"), # dem os.path.basename(raster.matchtag), # matchtag os.path.basename(raster.mdf), # mdf os.path.basename(raster.readme), # readme os.path.basename(raster.browse), # browse # index shp files ) else: components = ( os.path.basename(raster.srcfp), # dem os.path.basename(raster.matchtag), # matchtag os.path.basename(raster.mdf), # mdf os.path.basename(raster.readme), # readme os.path.basename(raster.browse), # browse # index shp files ) optional_components = [ os.path.basename(r) for r in raster.reg_files ] #reg os.chdir(dstdir) #logger.info(os.getcwd()) k = 0 existing_components = sum([ int(os.path.isfile(component)) for component in components ]) ### check if exists, print #logger.info(existing_components) if existing_components == len(components): ## Build index index = os.path.join(scratch, raster.stripid + "_index.shp") ## create dem index shp: <strip_id>_index.shp try: index_dir, index_lyr = utils.get_source_names( index) except RuntimeError as e: logger.error("{}: {}".format(index, e)) if os.path.isfile(index): ogrDriver.DeleteDataSource(index) if not os.path.isfile(index): ds = ogrDriver.CreateDataSource(index) if ds is not None: lyr = ds.CreateLayer(index_lyr, tgt_srs, ogr.wkbPolygon) if lyr is not None: for field_def in utils.DEM_ATTRIBUTE_DEFINITIONS_BASIC: field = ogr.FieldDefn( field_def.fname, field_def.ftype) field.SetWidth(field_def.fwidth) field.SetPrecision( field_def.fprecision) lyr.CreateField(field) #print raster.stripid feat = ogr.Feature(lyr.GetLayerDefn()) ## Set fields feat.SetField("DEM_ID", raster.stripid) feat.SetField("PAIRNAME", raster.pairname) feat.SetField("SENSOR1", raster.sensor1) feat.SetField("SENSOR2", raster.sensor2) feat.SetField( "ACQDATE1", raster.acqdate1.strftime("%Y-%m-%d")) feat.SetField( "ACQDATE2", raster.acqdate2.strftime("%Y-%m-%d")) feat.SetField("CATALOGID1", raster.catid1) feat.SetField("CATALOGID2", raster.catid2) feat.SetField("ND_VALUE", raster.ndv) feat.SetField("DEM_NAME", raster.srcfn) feat.SetField("ALGM_VER", raster.algm_version) res = (raster.xres + raster.yres) / 2.0 feat.SetField("DEM_RES", res) feat.SetField("DENSITY", raster.density) #### Set fields if populated (will not be populated if metadata file is not found) if raster.creation_date: feat.SetField( "CR_DATE", raster.creation_date.strftime( "%Y-%m-%d")) ## transfrom and write geom feat.SetField("PROJ4", raster.proj4) feat.SetField("EPSG", raster.epsg) src_srs = utils.osr_srs_preserve_axis_order( osr.SpatialReference()) src_srs.ImportFromWkt(raster.proj) if raster.geom: geom = raster.geom.Clone() transform = osr.CoordinateTransformation( src_srs, tgt_srs) geom.Transform(transform) centroid = geom.Centroid() feat.SetField("CENT_LAT", centroid.GetY()) feat.SetField("CENT_LON", centroid.GetX()) feat.SetGeometry(geom) else: logger.error( 'No valid geom found: {}'.format( raster.srcfp)) #### add new feature to layer lyr.CreateFeature(feat) ## Close layer and dataset lyr = None ds = None if os.path.isfile(index): ## Create archive if not args.dryrun: archive = tarfile.open( dstfp, "w:gz") #archive = tarfile.open(dstfp,"w:") if not os.path.isfile(dstfp): logger.error( "Cannot create archive: {}" .format(dstfn)) ## Add components for component in components: logger.debug( "Adding {} to {}".format( component, dstfn)) k += 1 if "dem_smooth.tif" in component: arcn = component.replace( "dem_smooth.tif", "dem.tif") else: arcn = component if not args.dryrun: archive.add(component, arcname=arcn) ## Add optional components for component in optional_components: if os.path.isfile(component): logger.debug( "Adding {} to {}".format( component, dstfn)) k += 1 if not args.dryrun: archive.add(component) ## Add index in subfolder os.chdir(scratch) for f in glob.glob(index_lyr + ".*"): arcn = os.path.join("index", f) logger.debug( "Adding {} to {}".format( f, dstfn)) k += 1 if not args.dryrun: archive.add(f, arcname=arcn) os.remove(f) logger.info( "Added {} items to archive: {}". format(k, dstfn)) ## Close archive and compress with gz if not args.dryrun: try: archive.close() except Exception as e: print(e) else: logger.error( 'Cannot create layer: {}'.format( index_lyr)) else: logger.error( "Cannot create index: {}".format(index)) else: logger.error( "Cannot remove existing index: {}".format( index)) else: logger.error( "Not enough existing components to make a valid archive: {} ({} found, {} required)" .format(raster.srcfp, existing_components, len(components)))
def HandleTile(t, src, dstdir, csvpath, args, exclude_list): otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(os.path.basename(csvpath)[:-4], t.name)) mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(os.path.basename(csvpath)[:-4], t.name)) if os.path.isfile(otxtpath) and os.path.isfile(mtxtpath) and args.overwrite is False: logger.info("Tile %s processing files already exist", t.name) else: logger.info("Tile %s", t.name) t_srs = osr.SpatialReference() t_srs.ImportFromEPSG(t.epsg) #### Open mfp dsp, lyrn = utils.get_source_names(src) ds = ogr.Open(dsp) if ds is None: logger.error("Open failed") else: lyr = ds.GetLayerByName(lyrn) if not lyr: raise RuntimeError("Layer {} does not exist in dataset {}".format(lyrn, dsp)) else: s_srs = lyr.GetSpatialRef() #logger.debug(str(s_srs)) #logger.debug(str(t.geom)) tile_geom_in_s_srs = t.geom.Clone() if not t_srs.IsSame(s_srs): ict = osr.CoordinateTransformation(t_srs, s_srs) ct = osr.CoordinateTransformation(s_srs, t_srs) tile_geom_in_s_srs.Transform(ict) # if the geometry crosses meridian, split it into multipolygon (else this breaks SetSpatialFilter) if utils.doesCross180(tile_geom_in_s_srs): logger.debug("tile_geom_in_s_srs crosses 180 meridian; splitting to multiple polygons...") tile_geom_in_s_srs = utils.getWrappedGeometry(tile_geom_in_s_srs) lyr.ResetReading() lyr.SetSpatialFilter(tile_geom_in_s_srs) feat = lyr.GetNextFeature() imginfo_list1 = [] while feat: iinfo = mosaic.ImageInfo(feat, "RECORD", srs=s_srs) if iinfo.geom is not None and iinfo.geom.GetGeometryType() in (ogr.wkbPolygon, ogr.wkbMultiPolygon): if not t_srs.IsSame(s_srs): iinfo.geom.Transform(ct) ## fix self-intersection errors caused by reprojecting over 180 temp = iinfo.geom.Buffer(0.1) # assumes a projected coordinate system with meters or feet as units iinfo.geom = temp if iinfo.geom.Intersects(t.geom): if iinfo.scene_id in exclude_list: logger.debug("Scene in exclude list, excluding: %s", iinfo.srcfp) elif not os.path.isfile(iinfo.srcfp): logger.warning("Scene path is invalid, excluding %s (path = %s)", iinfo.scene_id, iinfo.srcfp) elif args.require_pan: srcfp = iinfo.srcfp srcdir, mul_name = os.path.split(srcfp) if iinfo.sensor in ["WV02", "WV03", "QB02"]: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "GE01": if "_5V" in mul_name: pan_name_base = srcfp[:-24].replace("M0", "P0") candidates = glob.glob(pan_name_base + "*") candidates2 = [f for f in candidates if f.endswith(('.ntf', '.NTF', '.tif', '.TIF'))] if len(candidates2) == 0: pan_name = '' elif len(candidates2) == 1: pan_name = os.path.basename(candidates2[0]) else: pan_name = '' logger.error('%i panchromatic images match the multispectral image name ' '%s', len(candidates2), mul_name) else: pan_name = mul_name.replace("-M", "-P") elif iinfo.sensor == "IK01": pan_name = mul_name.replace("blu", "pan") pan_name = mul_name.replace("msi", "pan") pan_name = mul_name.replace("bgrn", "pan") pan_srcfp = os.path.join(srcdir, pan_name) if not os.path.isfile(pan_srcfp): logger.debug("Image does not have a panchromatic component, excluding: %s", iinfo.srcfp) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) else: logger.debug("Intersect %s, %s: %s", iinfo.scene_id, iinfo.srcfp, str(iinfo.geom)) imginfo_list1.append(iinfo) feat = lyr.GetNextFeature() ds = None logger.info("Number of intersects in tile %s: %i", t.name, len(imginfo_list1)) if len(imginfo_list1) > 0: #### Get mosaic parameters logger.debug("Getting mosaic parameters") params = mosaic.getMosaicParameters(imginfo_list1[0], args) #### Remove images that do not match ref logger.debug("Setting image pattern filter") imginfo_list2 = mosaic.filterMatchingImages(imginfo_list1, params) logger.info("Number of images matching filter: %i", len(imginfo_list2)) if args.nosort is False: #### Sort by quality logger.debug("Sorting images by quality") imginfo_list3 = [] for iinfo in imginfo_list2: iinfo.getScore(params) if iinfo.score > 0: imginfo_list3.append(iinfo) # sort so highest score is last imginfo_list3.sort(key=lambda x: x.score) else: imginfo_list3 = list(imginfo_list2) #### Overlay geoms and remove non-contributors logger.debug("Overlaying images to determine contributors") contribs = mosaic.determine_contributors(imginfo_list3, t.geom, args.min_contribution_area) logger.info("Number of contributing images: %i", len(contribs)) if len(contribs) > 0: if args.build_shp: ####################################################### #### Create Shp shp = os.path.join(dstdir, "{}_{}_imagery.shp".format(args.mosaic, t.name)) logger.debug("Creating shapefile of geoms: %s", shp) fields = [("IMAGENAME", ogr.OFTString, 100), ("SCORE", ogr.OFTReal, 0)] OGR_DRIVER = "ESRI Shapefile" ogrDriver = ogr.GetDriverByName(OGR_DRIVER) if ogrDriver is None: logger.debug("OGR: Driver %s is not available", OGR_DRIVER) sys.exit(-1) if os.path.isfile(shp): ogrDriver.DeleteDataSource(shp) vds = ogrDriver.CreateDataSource(shp) if vds is None: logger.debug("Could not create shp") sys.exit(-1) shpd, shpn = os.path.split(shp) shpbn, shpe = os.path.splitext(shpn) lyr = vds.CreateLayer(shpbn, t_srs, ogr.wkbPolygon) if lyr is None: logger.debug("ERROR: Failed to create layer: %s", shpbn) sys.exit(-1) for fld, fdef, flen in fields: field_defn = ogr.FieldDefn(fld, fdef) if fdef == ogr.OFTString: field_defn.SetWidth(flen) if lyr.CreateField(field_defn) != 0: logger.debug("ERROR: Failed to create field: %s", fld) for iinfo, geom in contribs: logger.debug("Image: %s", iinfo.srcfn) feat = ogr.Feature(lyr.GetLayerDefn()) feat.SetField("IMAGENAME", iinfo.srcfn) feat.SetField("SCORE", iinfo.score) feat.SetGeometry(geom) if lyr.CreateFeature(feat) != 0: logger.debug("ERROR: Could not create feature for image %s", iinfo.srcfn) else: logger.debug("Created feature for image: %s", iinfo.srcfn) feat.Destroy() #### Write textfiles if not os.path.isdir(dstdir): os.makedirs(dstdir) otxtpath = os.path.join(dstdir, "{}_{}_orig.txt".format(args.mosaic, t.name)) mtxtpath = os.path.join(dstdir, "{}_{}_ortho.txt".format(args.mosaic, t.name)) otxt = open(otxtpath, 'w') mtxt = open(mtxtpath, 'w') for iinfo, geom in contribs: if not os.path.isfile(iinfo.srcfp): logger.warning("Image does not exist: %s", iinfo.srcfp) otxt.write("{}\n".format(iinfo.srcfp)) m_fn = "{0}_u08{1}{2}.tif".format( os.path.splitext(iinfo.srcfn)[0], args.stretch, t.epsg ) mtxt.write(os.path.join(dstdir, 'ortho', t.name, m_fn) + "\n") otxt.close()
def main(): #### Set Up Arguments parser = argparse.ArgumentParser( description="query PGC index for images contributing to a mosaic" ) parser.add_argument("index", help="PGC index shapefile") parser.add_argument("tile_csv", help="tile schema csv") parser.add_argument("dstdir", help="textfile output directory") parser.add_argument("mosaic", help="mosaic name without extension") #pos_arg_keys = ["index","tile_csv","dstdir"] parser.add_argument("-e", "--extent", nargs=4, type=float, help="extent of output mosaic -- xmin xmax ymin ymax (default is union of all inputs)") parser.add_argument("--force-pan-to-multi", action="store_true", default=False, help="if output is multiband, force script to also use 1 band images") parser.add_argument("-b", "--bands", type=int, help="number of output bands( default is number of bands in the first image)") parser.add_argument("--tday", help="month and day of the year to use as target for image suitability ranking -- 04-05") parser.add_argument("--tyear", help="year (or year range) to use as target for image suitability ranking -- 2017 or 2015-2017") parser.add_argument("--nosort", action="store_true", default=False, help="do not sort images by metadata. script uses the order of the input textfile or directory " "(first image is first drawn). Not recommended if input is a directory; order will be " "random") parser.add_argument("--use-exposure", action="store_true", default=False, help="use exposure settings in metadata to inform score") parser.add_argument("--exclude", help="file of file name patterns (text only, no wildcards or regexs) to exclude") parser.add_argument("--max-cc", type=float, default=0.2, help="maximum fractional cloud cover (0.0-1.0, default 0.2)") parser.add_argument("--include-all-ms", action="store_true", default=False, help="include all multispectral imagery, even if the imagery has differing numbers of bands") parser.add_argument("--min-contribution-area", type=int, default=20000000, help="minimum area contribution threshold in target projection units (default=20000000). " "Higher values remove more image slivers from the resulting mosaic") parser.add_argument("--log", help="output log file (default is queryFP.log in the output folder)") parser.add_argument("--ttile", help="target tile (default is to compute all valid tiles. multiple tiles should be delimited " "by a comma [ex: 23_24,23_25])") parser.add_argument("--overwrite", action="store_true", default=False, help="overwrite any existing files") parser.add_argument("--stretch", choices=ortho_functions.stretches, default="rf", help="stretch abbreviation used in image processing (default=rf)") parser.add_argument("--build-shp", action='store_true', default=False, help="build shapefile of intersecting images (only invoked if --no_sort is not used)") parser.add_argument("--require-pan", action='store_true', default=False, help="limit search to imagery with both a multispectral and a panchromatic component") parser.add_argument("--version", action='version', version="imagery_utils v{}".format(utils.package_version)) #### Parse Arguments args = parser.parse_args() scriptpath = os.path.abspath(sys.argv[0]) src = os.path.abspath(args.index) csvpath = os.path.abspath(args.tile_csv) dstdir = os.path.abspath(args.dstdir) #### Validate Required Arguments try: dsp, lyrn = utils.get_source_names(src) except RuntimeError as e: parser.error(e) if not os.path.isfile(csvpath): parser.error("Arg2 is not a valid file path: %s" %csvpath) #### Validate target day option if args.tday is not None: try: m = int(args.tday.split("-")[0]) d = int(args.tday.split("-")[1]) td = date(2000, m, d) except ValueError: logger.error("Target day must be in mm-dd format (i.e 04-05)") sys.exit(1) else: m = 0 d = 0 #### Validate target year/year range option if args.tyear is not None: if len(str(args.tyear)) == 4: ## ensure single year is valid try: tyear_test = datetime(year=args.tyear, month=1, day=1) except ValueError: parser.error("Supplied year {0} is not valid".format(args.tyear)) sys.exit(1) elif len(str(args.tyear)) == 9: if '-' in args.tyear: ## decouple range and build year yrs = args.tyear.split('-') yrs_range = range(int(yrs[0]), int(yrs[1]) + 1) for yy in yrs_range: try: tyear_test = datetime(year=yy, month=1, day=1) except ValueError: parser.error("Supplied year {0} in range {1} is not valid".format(yy, args.tyear)) sys.exit(1) else: parser.error("Supplied year range {0} is not valid; should be like: 2015 OR 2015-2017" .format(args.tyear)) sys.exit(1) else: parser.error("Supplied year {0} is not valid, or its format is incorrect; should be 4 digits for single " "year (e.g., 2017), eight digits and dash for range (e.g., 2015-2017)".format(args.tyear)) sys.exit(1) ##### Configure Logger if args.log is not None: logfile = os.path.abspath(args.log) else: logfile = os.path.join(dstdir, "queryFP_{}.log".format(datetime.today().strftime("%Y%m%d%H%M%S"))) lfh = logging.FileHandler(logfile) lfh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s', '%m-%d-%Y %H:%M:%S') lfh.setFormatter(formatter) logger.addHandler(lfh) lsh = logging.StreamHandler() lsh.setLevel(logging.INFO) lsh.setFormatter(formatter) logger.addHandler(lsh) #### Get exclude_list if specified if args.exclude is not None: if not os.path.isfile(args.exclude): parser.error("Value for option --exclude-list is not a valid file") f = open(args.exclude, 'r') exclude_list = set([line.rstrip() for line in f.readlines()]) else: exclude_list = set() logger.debug("Exclude list: %s", str(exclude_list)) #### Parse csv, validate tile ID and get tilegeom tiles = {} csv = open(csvpath, 'r') for line in csv: tile = line.rstrip().split(",") if len(tile) != 9: logger.warning("funny csv line: %s", line.strip('\n')) else: name = tile[2] if name != "name": ### Tile csv schema: row, column, name, status, xmin, xmax, ymin, ymax, epsg code t = mosaic.TileParams(float(tile[4]), float(tile[5]), float(tile[6]), float(tile[7]), int(tile[0]), int(tile[1]), tile[2]) t.status = tile[3] t.epsg = int(tile[8]) tiles[name] = t csv.close() if args.ttile is not None: if "," in args.ttile: ttiles = args.ttile.split(",") else: ttiles = [args.ttile] for ttile in ttiles: if ttile not in tiles: logger.info("Target tile is not in the tile csv: %s", ttile) else: t = tiles[ttile] if t.status == "0": logger.error("Tile status indicates it should not be created: %s, %s", ttile, t.status) else: try: HandleTile(t, src, dstdir, csvpath, args, exclude_list) except RuntimeError as e: logger.error(e) else: keys = list(tiles.keys()) keys.sort() for tile in keys: t = tiles[tile] if t.status == "1": try: HandleTile(t, src, dstdir, csvpath, args, exclude_list) except RuntimeError as e: logger.error(e)
def main(): #### Set Up Arguments parser = argparse.ArgumentParser( description="query PGC index for images contributing to a mosaic") parser.add_argument("index", help="PGC index shapefile") parser.add_argument("tile_csv", help="tile schema csv") parser.add_argument("dstdir", help="textfile output directory") parser.add_argument("mosaic", help="mosaic name without extension") #pos_arg_keys = ["index","tile_csv","dstdir"] parser.add_argument( "-e", "--extent", nargs=4, type=float, help= "extent of output mosaic -- xmin xmax ymin ymax (default is union of all inputs)" ) parser.add_argument( "--force-pan-to-multi", action="store_true", default=False, help="if output is multiband, force script to also use 1 band images") parser.add_argument( "-b", "--bands", type=int, help= "number of output bands( default is number of bands in the first image)" ) parser.add_argument( "--tday", help= "month and day of the year to use as target for image suitability ranking -- 04-05" ) parser.add_argument( "--nosort", action="store_true", default=False, help= "do not sort images by metadata. script uses the order of the input textfile or directory (first image is first drawn). Not recommended if input is a directory; order will be random" ) parser.add_argument( "--use-exposure", action="store_true", default=False, help="use exposure settings in metadata to inform score") parser.add_argument( "--exclude", help= "file of file name patterns (text only, no wildcards or regexs) to exclude" ) parser.add_argument( "--max-cc", type=float, default=0.2, help="maximum fractional cloud cover (0.0-1.0, default 0.2)") parser.add_argument( "--include-all-ms", action="store_true", default=False, help= "include all multispectral imagery, even if the imagery has differing numbers of bands" ) parser.add_argument( "--min-contribution-area", type=int, default=20000000, help= "minimum area contribution threshold in target projection units (default=20000000). Higher values remove more image slivers from the resulting mosaic" ) parser.add_argument( "--log", help="output log file (default is queryFP.log in the output folder)") parser.add_argument( "--ttile", help= "target tile (default is to compute all valid tiles. multiple tiles should be delimited by a comma [ex: 23_24,23_25])" ) parser.add_argument("--overwrite", action="store_true", default=False, help="overwrite any existing files") parser.add_argument( "--stretch", choices=ortho_functions.stretches, default="rf", help="stretch abbreviation used in image processing (default=rf)") parser.add_argument( "--build-shp", action='store_true', default=False, help= "build shapefile of intersecting images (only invoked if --no_sort is not used)" ) parser.add_argument( "--require-pan", action='store_true', default=False, help= "limit search to imagery with both a multispectral and a panchromatic component" ) #### Parse Arguments args = parser.parse_args() scriptpath = os.path.abspath(sys.argv[0]) src = os.path.abspath(args.index) csvpath = os.path.abspath(args.tile_csv) dstdir = os.path.abspath(args.dstdir) #### Validate Required Arguments try: dsp, lyrn = utils.get_source_names(src) except RuntimeError, e: parser.error(e)
os.chdir(dstdir) #logger.info(os.getcwd()) k = 0 existing_components = sum([int(os.path.isfile(component)) for component in components]) ### check if exists, print #logger.info(existing_components) if existing_components == len(components): ## Build index index = os.path.join(scratch,raster.tileid+"_index.shp") ## create dem index shp: <strip_id>_index.shp try: index_dir, index_lyr = utils.get_source_names(index) except RuntimeError, e: logger.error("{}: {}".format(index,e)) if os.path.isfile(index): ogrDriver.DeleteDataSource(index) if not os.path.isfile(index): ds = ogrDriver.CreateDataSource(index) if ds is not None: tgt_srs = osr.SpatialReference() tgt_srs.ImportFromEPSG(args.epsg) lyr = ds.CreateLayer(index_lyr, tgt_srs, ogr.wkbPolygon) if lyr is not None:
def build_archive(raster, scratch, args): logger.info("Packaging tile {}".format(raster.srcfn)) #### create archive dstfp = raster.archive dstdir, dstfn = os.path.split(raster.archive) #print dstfn #print dstfp try: raster.get_dem_info() except RuntimeError as e: logger.error(e) print(raster.ndv) else: ## get raster density if not precomputed if raster.density is None: try: raster.compute_density_and_statistics() except RuntimeError as e: logger.warning(e) #### Build Archive if os.path.isfile(dstfp) and args.overwrite is True: if not args.dryrun: try: os.remove(dstfp) except: print("Cannot replace archive: %s" % dstfp) if not os.path.isfile(dstfp): components = ( os.path.basename(raster.srcfp), # dem os.path.basename(raster.metapath), # meta # index shp files ) optional_components = [ os.path.basename(raster.regmetapath), #reg os.path.basename(raster.err), # err os.path.basename(raster.day), # day os.path.basename(raster.browse), # browse os.path.basename(raster.count), os.path.basename(raster.countmt), os.path.basename(raster.mad), os.path.basename(raster.mindate), os.path.basename(raster.maxdate), ] os.chdir(dstdir) #logger.info(os.getcwd()) k = 0 existing_components = sum( [int(os.path.isfile(component)) for component in components]) ### check if exists, print #logger.info(existing_components) if existing_components == len(components): ## Build index index = os.path.join(scratch, raster.tileid + "_index.shp") ## create dem index shp: <strip_id>_index.shp try: index_dir, index_lyr = utils.get_source_names(index) except RuntimeError as e: logger.error("{}: {}".format(index, e)) if os.path.isfile(index): ogrDriver.DeleteDataSource(index) if not os.path.isfile(index): ds = ogrDriver.CreateDataSource(index) if ds is not None: tgt_srs = osr.SpatialReference() tgt_srs.ImportFromEPSG(args.epsg) lyr = ds.CreateLayer(index_lyr, tgt_srs, ogr.wkbPolygon) if lyr is not None: for field_def in utils.TILE_DEM_ATTRIBUTE_DEFINITIONS_BASIC: field = ogr.FieldDefn(field_def.fname, field_def.ftype) field.SetWidth(field_def.fwidth) field.SetPrecision(field_def.fprecision) lyr.CreateField(field) #print raster.stripid feat = ogr.Feature(lyr.GetLayerDefn()) ## Set fields feat.SetField("DEM_ID", raster.tileid) feat.SetField("TILE", raster.tilename) feat.SetField("ND_VALUE", raster.ndv) feat.SetField("DEM_NAME", raster.srcfn) res = (raster.xres + raster.yres) / 2.0 feat.SetField("DEM_RES", res) feat.SetField("DENSITY", raster.density) feat.SetField("NUM_COMP", raster.num_components) if raster.version: feat.SetField("REL_VER", raster.version) if raster.reg_src: feat.SetField("REG_SRC", raster.reg_src) feat.SetField("NUM_GCPS", raster.num_gcps) if raster.mean_resid_z: feat.SetField("MEANRESZ", raster.mean_resid_z) #### Set fields if populated (will not be populated if metadata file is not found) if raster.creation_date: feat.SetField( "CR_DATE", raster.creation_date.strftime("%Y-%m-%d")) ## transfrom and write geom src_srs = utils.osr_srs_preserve_axis_order( osr.SpatialReference()) src_srs.ImportFromWkt(raster.proj) if raster.geom: geom = raster.geom.Clone() if not src_srs.IsSame(tgt_srs): transform = osr.CoordinateTransformation( src_srs, tgt_srs) geom.Transform( transform ) #### Verify this works over 180 feat.SetGeometry(geom) else: logger.error('No valid geom found: {}'.format( raster.srcfp)) #### add new feature to layer lyr.CreateFeature(feat) ## Close layer and dataset lyr = None ds = None if os.path.isfile(index): ## Create archive if not args.dryrun: #archive = tarfile.open(dstfp,"w:") archive = tarfile.open(dstfp, "w:gz") if not os.path.isfile(dstfp): logger.error( "Cannot create archive: {}".format( dstfn)) ## Add components for component in components: logger.debug("Adding {} to {}".format( component, dstfn)) k += 1 if not args.dryrun: archive.add(component) #archive.write(component) ## Add optional components for component in optional_components: if os.path.isfile(component): logger.debug("Adding {} to {}".format( component, dstfn)) k += 1 if not args.dryrun: archive.add(component) ## Add index in subfolder os.chdir(scratch) for f in glob.glob(index_lyr + ".*"): arcname = os.path.join("index", f) logger.debug("Adding {} to {}".format( f, dstfn)) k += 1 if not args.dryrun: archive.add(f, arcname=arcname) os.remove(f) logger.info( "Added {} items to archive: {}".format( k, dstfn)) ## Close archive if not args.dryrun: try: archive.close() except Exception as e: print(e) else: logger.error( 'Cannot create layer: {}'.format(index_lyr)) else: logger.error("Cannot create index: {}".format(index)) else: logger.error( "Cannot remove existing index: {}".format(index)) else: logger.error( "Not enough existing components to make a valid archive: {} ({} found, {} required)" .format(raster.srcfp, existing_components, len(components)))