def get_uavsar_annotation(fkey, directory): ''' ''' # search local files for a matching file with .ann in its name ann_candidates = os.listdir(directory) fmatches = [f for f in ann_candidates if fkey in f and 'ann' in f] # If we find too many or not enough raise an exception and exit if len(fmatches) != 1: raise ValueError('Unable to find a corresponding description file to' ' UAVsAR file {}'.format(f)) # Form the descriptor file name based on the grid file name, should have .ann in it ann_file = join(directory, fmatches[0]) desc = read_InSar_annotation(ann_file) return desc, ann_file
def setup_class(self): """ Attempt to convert all the files """ if not isdir(self.temp): os.mkdir(self.temp) self.desc = read_InSar_annotation(join(self.d, 'uavsar_latlon.ann')) # Output file f_pieces = self.input_f.split('.')[0:-1] + [self.component, 'tif'] output_f = join(self.temp, '.'.join(f_pieces)) if isdir(self.temp): shutil.rmtree(self.temp) os.mkdir(self.temp) INSAR_to_rasterio(join(self.d, self.input_f), self.desc, join(self.temp, self.input_f.replace('grd', 'tif'))) self.dataset = rasterio.open(output_f) self.band = self.dataset.read(1)
def convert(filenames, output, epsg, clean_first=False): ''' Convert all grd files from the UAVSAR grd to tiff. Then reporjects the resulting files from Lat long to UTM, and then saves to the output dir Args: filenames: List of *.grd files needed to be converted output: directory to output files to epsg: epsg of the resulting file clean_first: Boolean indicating whether to clear out the output folder first ''' # Keep track of errors, time elapsed, and number of files completed start = time.time() errors = [] completed = 0 # Clean up existing and make an output folder with a temp folder temp = join(output, 'temp') for d in [output, temp]: if isdir(d): if clean_first: log.info('Removing {}...'.format(d)) shutil.rmtree(d) if not isdir(d): mkdir(d) nfiles = len(filenames) log.info('Converting {} UAVSAR .grd files to geotiff...'.format(nfiles)) directory = dirname(filenames[0]) # Loop over all the files, name them using the same name just using a # different folder for ann in sorted(filenames): # open the ann file desc = read_InSar_annotation(ann) # Form a pattern based on the annotation filename base_f = basename(ann) pattern = '.'.join(base_f.split('.')[0:-1]) + '*' # Gather all files associated grd_files = glob.glob(join(directory, pattern + '.grd')) log.info('Converting {} grd files to geotiff...'.format( len(grd_files))) for grd in grd_files: # Save to our temporary folder and only change fname to have # ext=tif latlon_tiff = grd.replace(directory, temp).replace('grd', 'tif') try: # Convert the GRD to a geotiff thats projected in lat long INSAR_to_rasterio(grd, desc, latlon_tiff) tiff_pattern = '.'.join(latlon_tiff.split('.')[0:-1]) + '*' tif_files = glob.glob(tiff_pattern) log.info('Reprojecting {} files to utm...'.format( len(tif_files))) for tif in glob.glob(tiff_pattern): utm_file = tif.replace(temp, output) reproject_raster_by_epsg(tif, utm_file, epsg) completed += 1 except Exception as e: log.error(e) errors.append((grd, e)) nfiles = completed + len(errors) log.info('Converted {}/{} files.'.format(completed, nfiles)) # Report errors an a convenient location for users if errors: log.warning('{}/{} files errored out during conversion...'.format( len(errors), nfiles)) for c in errors: f, e = c[0], c[1] log.error('Conversion of {} errored out with:\n{}'.format(f, e)) # Clean up the temp folder log.debug('Removing {}...'.format(temp)) shutil.rmtree(temp) log.info('Completed! {:0.0f}s elapsed'.format(time.time() - start))
def _push_one(self, f, **kwargs): ''' Here we overwrite _push_one to push a set of rasters associated to the annotation file instead a single raster ''' # Copy the metadata for modifying and open the ann file meta = kwargs.copy() desc = read_InSar_annotation(f) # Expand the path for the geotiffs tiff_dir = abspath(expanduser(self.geotiff_dir)) # form the pattern to look for and grab the tifs pattern = '.'.join(basename(f).split('.')[0:-1]) + '*.tif' rasters = glob.glob(join(tiff_dir, pattern)) # Submit each geotif, modifying meta on the fly for r in rasters: # Grab information from the filename f_pieces = r.split('.') component = f_pieces[-2] # Real or imaginary component data_abbr = f_pieces[-3] # Key to the data name dname = self.dname_map[data_abbr] # Data type in db # For the data type meta['type'] = 'insar ' + dname.split(' ')[0] if dname == 'interferogram': meta['type'] += (' ' + component) # Assign the date for the respective flights if 'amplitude' in dname: meta['date'] = desc['start time of acquisition for pass {}'.format( dname.split(' ')[-1])]['value'] # Derived products always receive the date of the last overpass else: meta['date'] = desc['start time of acquisition for pass 2']['value'] # Assign only the date not the date and time meta['date'] = meta['date'].date() # Assign units meta['units'] = desc['{} units'.format( dname.split(' ')[0])]['value'] # Flexibly form a comment for each of the products for dates comment = get_InSar_flight_comment(dname, desc) # add which dem was used which dictates the file name convert e.g. # ...VV_01.int.grd comment += ', DEM used = {}'.format( desc['dem used in processing']['value']) # Add the polarization to the the comments comment += ', Polarization = {}'.format( desc['polarization']['value']) meta['description'] = comment self.log.info('Uploading {} as {}...'.format(r, meta['type'])) d = self.UploaderClass(r, **meta) # Submit the data to the database d.submit(self.session) # Uploaded set self.uploaded += 1
def main(): # How much of the original data cropped to the middle, e.g. 20% in each direction from center ratio = 0.2 # Output directory outdir = '../tests/data' temp = './temp' outdir = abspath(outdir) temp = abspath(temp) # Pattern to look for directory = '~/Downloads/SnowEx2020_UAVSAR' pattern = 'grmesa_27416_20003-028_20005-007_0011d_s01_L090HH_01.*.grd' # Get the directory the file directory = abspath(expanduser(directory)) files = glob.glob(join(directory, pattern)) log.info('Found {} files that can be used for testing...'.format( len(files))) fkey = pattern.split('.')[0] # Get the ann file desc, ann_file = get_uavsar_annotation(fkey, directory) # Form the modifications to the ann file mods = make_mods(desc, ratio) # make our temporary folder if isdir(temp): rmtree(temp) mkdir(temp) log.info("") log.info("Cropping binary files...") for f in files: # Output file name, use the same extension ext = '.'.join(f.split('.')[-2:]) grd_file = join(outdir, 'uavsar_latlon.' + ext) # Crop and save resulting binary file in tests/data open_crop_grd_files(f, desc, ratio, grd_file) # Modify the annotation file and write it out to the new location new_ann = join(outdir, 'uavsar_latlon.ann') copy_and_mod_annotation(ann_file, new_ann, mods) desc = read_InSar_annotation(new_ann) log.info("") log.info("Converting files to GeoTiffs...") for f in glob.glob(join(outdir, 'uavsar_latlon*.grd')): # Convert the binary to tiff and save in our testing tif_file = f.replace('.grd', '.tif') INSAR_to_rasterio(f, desc, tif_file) log.info("") log.info("Reprojecting files to GeoTiffs...") utm_dir = '../tests/data/uavsar' utm_dir = abspath(expanduser(utm_dir)) if isdir(utm_dir): rmtree(utm_dir) mkdir(utm_dir) for f in glob.glob(join(outdir, 'uavsar_latlon*.tif')): # # Reproject the data utm_file = basename(f).replace('_latlon', '_utm') utm_file = join(utm_dir, utm_file) reproject_raster_by_epsg(f, utm_file, 26912)