def __init__(self, where=[], datatype=None, gtx=False, epsg=None, **kwargs): super().__init__(**kwargs) self._vdatum_data_url = 'https://vdatum.noaa.gov/download/data/' self._proj_vdatum_index = 'https://cdn.proj.org/files.geojson' self._outdir = os.path.join(os.getcwd(), 'vdatum') ## add others IGLD85 #self._vdatums = ['VERTCON', 'EGM1984', 'EGM1996', 'EGM2008', 'GEOID03', 'GEOID06', 'GEOID09', 'GEOID12A', 'GEOID12B', 'GEOID96', 'GEOID99', 'TIDAL'] self._vdatums = ['TIDAL'] self._tidal_datums = ['mhw', 'mhhw', 'mlw', 'mllw', 'tss', 'mtl'] self.where = where self.datatype = datatype self.epsg = utils.int_or(epsg) self.gtx = gtx self.name = 'vdatum' self.v_datum = 'varies' self.FRED = FRED.FRED(name=self.name, verbose=self.verbose) self.update_if_not_in_FRED()
def __init__(self, data=[], src_region=None, inc=None, name='waffles_sm', epsg=4326, warp=None, extend=0, node='pixel', verbose=False): """generate spatial-metadata Args: geojson(bool): generate a geojson output """ #super().__init__(**kwargs) self.inc = utils.int_or(inc) self.epsg = utils.int_or(inc) self.warp = utils.int_or(warp) self.extend = extend self.node = node self.region = src_region self.d_region = self.dist_region() self.name = name self.verbose = verbose self.data = data self.data = [ dlim.DatasetFactory( fn=" ".join(['-' if x == "" else x for x in dl.split(":")]), src_region=self.d_region, verbose=self.verbose, epsg=self.epsg).acquire_dataset() for dl in self.data ] self.data = [d for d in self.data if d is not None] for d in self.data: d.parse() self._init_vector()
def _datum_by_name(self, datum_name): ## tidal if utils.int_or(datum_name) not in _tidal_frames.keys(): for t in _tidal_frames.keys(): if datum_name.lower() in _tidal_frames[t]['name'].lower(): return(t) else: return(int(datum_name)) ## htdp if utils.int_or(datum_name) not in _htdp_reference_frames.keys(): for t in _htdp_reference_frames.keys(): if datum_name.lower() in _htdp_reference_frames[t]['name'].lower(): return(t) else: return(int(datum_name)) ## cdn if utils.int_or(datum_name) not in _cdn_reference_frames.keys(): for t in _cdn_reference_frames.keys(): if datum_name.lower() in _cdn_reference_frames[t]['name'].lower(): return(t) else: return(int(datum_name)) return(None)
def __init__(self, processed=True, inc=None, process=False, min_year=None, survey_id=None, exclude=None, **kwargs): super().__init__(**kwargs) self._mb_data_url = "https://data.ngdc.noaa.gov/platforms/" self._mb_metadata_url = "https://data.noaa.gov/waf/NOAA/NESDIS/NGDC/MGG/Multibeam/iso/" self._mb_search_url = "https://maps.ngdc.noaa.gov/mapviewer-support/multibeam/files.groovy?" self._mb_autogrid = "https://www.ngdc.noaa.gov/maps/autogrid/" self._mb_html = "https://www.ngdc.noaa.gov/" self._outdir = os.path.join(os.getcwd(), 'mb') self._urls = [self._mb_data_url, self._mb_metadata_url, self._mb_autogrid] self.name = 'multibeam' self.processed_p = processed self.process = process self.inc = utils.str2inc(inc) self.min_year = utils.int_or(min_year) self.survey_id = survey_id self.exclude = exclude
i_regions.append(str(arg[2:])) elif arg == '--outname' or arg == '-O': name = argv[i + 1] i += 1 elif arg[:2] == '-O': name = arg[2:] elif arg == '-s_epsg' or arg == '--s_epsg' or arg == '-P': epsg = argv[i + 1] i = i + 1 elif arg == '--increment' or arg == '-E': inc = utils.str2inc(argv[i + 1]) i = i + 1 elif arg[:2] == '-E': inc = utils.str2inc(arg[2:]) elif arg == '--extend' or arg == '-X': extend = utils.int_or(argv[i + 1], 0) i = i + 1 elif arg[:2] == '-X': extend = utils.int_or(arg[2:], 0) elif arg == '-r' or arg == '--grid-node': node = 'grid' elif arg == '-p' or arg == '--prefix': want_prefix = True elif arg == '-help' or arg == '--help' or arg == '-h': sys.stderr.write(_usage) sys.exit(1) elif arg == '-version' or arg == '--version': sys.stderr.write('{}\n'.format(_version)) sys.exit(1) else: dls.append(arg)
CIRES DEM home page: <http://ciresgroups.colorado.edu/coastalDEM> '''.format(_version) if __name__ == "__main__": elev = None chunk_step = None chunk_size = None agg_level = 3 i = 1 argv = sys.argv while i < len(sys.argv): arg = sys.argv[i] if arg == '-size' or arg == '--size' or arg == '-z': chunk_size = utils.int_or(argv[i + 1]) i = i + 1 elif arg == '-step' or arg == '--step' or arg == '-s': chunk_step = utils.int_or(argv[i + 1]) i = i + 1 elif arg == '-agg_level' or arg == '--agg_level' or arg == '-a': agg_level = utils.int_or(argv[i + 1]) i = i + 1 elif arg == '-help' or arg == '--help' or arg == '-h': sys.stderr.write(_usage) sys.exit(1) elif arg == '-version' or arg == '--version': sys.stderr.write('{}\n'.format(_version)) sys.exit(1) elif elev is None: elev = arg
def spat_meta_cli(argv = sys.argv): i = 1 dls = [] i_regions = [] these_regions = [] src_srs = 'epsg:4326' xinc = utils.str2inc('1s') yinc = utils.str2inc('1s') node = 'pixel' name = 'waffles_spat' ogr_format = 'ESRI Shapefile' extend = 0 want_verbose = True want_prefix = False want_recursive = False prefix_args = {} argv = sys.argv while i < len(argv): arg = sys.argv[i] if arg == '--region' or arg == '-R': i_regions.append(str(argv[i + 1])) i = i + 1 elif arg[:2] == '-R': i_regions.append(str(arg[2:])) elif arg == '--outname' or arg == '-O': name = argv[i + 1] i += 1 elif arg[:2] == '-O': name = arg[2:] elif arg == '-s_srs' or arg == '--s_srs' or arg == '-P': src_srs = argv[i + 1] i = i + 1 elif arg == '--increment' or arg == '-E': incs = argv[i + 1].split(':') xy_inc = incs[0].split('/') xinc = utils.str2inc(xy_inc[0]) if len(xy_inc) > 1: yinc = utils.str2inc(xy_inc[1]) else: yinc = utils.str2inc(xy_inc[0]) i = i + 1 elif arg[:2] == '-E': incs = arg[2:].split(':') xy_inc = incs[0].split('/') xinc = utils.str2inc(xy_inc[0]) if len(xy_inc) > 1: yinc = utils.str2inc(xy_inc[1]) else: yinc = utils.str2inc(xy_inc[0]) elif arg == '--extend' or arg == '-X': exts = argv[i + 1].split(':') extend = utils.int_or(exts[0], 0) i += 1 elif arg[:2] == '-X': exts = arg[2:].split(':') extend = utils.int_or(exts[0], 0) elif arg == '--format' or arg == '-F': ogr_format = argv[i + 1] i += 1 elif arg[:2] == '-F': ogr_format = argv[2:] elif arg == '-p' or arg == '--prefix': want_prefix = True prefix_opts = argv[i + 1].split(':') prefix_args = utils.args2dict(prefix_opts, prefix_args) if len(prefix_args) > 0: i += 1 elif arg == '-r' or arg == '--grid-node': node = 'grid' elif arg == '-c' or arg == '--recursive': want_recursive = True elif arg == '--quiet' or arg == '-q': want_verbose = False elif arg == '-help' or arg == '--help' or arg == '-h': sys.stderr.write(_usage) sys.exit(1) elif arg == '-version' or arg == '--version': sys.stdout.write('{}\n'.format(__version__)) sys.exit(1) else: dls.append(arg) i = i + 1 for i_region in i_regions: tmp_region = regions.Region().from_string(i_region) if tmp_region.valid_p(check_xy=True): these_regions.append(tmp_region) else: i_region_s = i_region.split(':') tmp_region = regions.ogr_wkts(i_region_s[0]) for i in tmp_region: if i.valid_p(): if len(i_region_s) > 1: these_regions.append( regions.Region().from_string( '/'.join([i.format('str'), i_region_s[1]]) ) ) else: these_regions.append(i) if len(these_regions) == 0: these_regions = [None] utils.echo_error_msg('Could not parse region {}'.format(these_regions)) sys.stderr.write('{}\n'.format(_usage)) sys.exit(1) else: if want_verbose: utils.echo_msg( 'parsed {} region(s)'.format(len(these_regions)) ) name_ = name for rn, this_region in enumerate(these_regions): utils.echo_msg('using region {}'.format(this_region.format('gmt'))) if len(dls) == 0: sys.stderr.write(_usage) utils.echo_error_msg('you must specify some type of data') else: if want_prefix or len(these_regions) > 1: name_ = utils.append_fn(name, this_region, xinc, **prefix_args) if os.path.exists('{}_sm.{}'.format(name_, utils.ogr_fext(ogr_format))): utils.echo_msg( 'SPATIAL METADATA {} already exists, skipping...'.format('{}_sm.{}'.format(name_, utils.ogr_fext(ogr_format))) ) else: SpatialMetadata( data=dls, src_region=this_region, xinc=xinc, yinc=yinc, extend=extend, src_srs=src_srs, node=node, name=name_, verbose=want_verbose, recursive=want_recursive, ogr_format=ogr_format ).run()
def parse_fn(self): """parse the datalist entry line""" if self.fn is None: return (self) if os.path.exists(self.fn): self.guess_data_format() self.metadata['name'] = self.fn.split('.')[0] return (self.fn) this_entry = re.findall(r'[^"\s]\S*|".+?"', self.fn.rstrip()) try: entry = [utils.str_or(x) if n == 0 else utils.int_or(x) if n < 2 else utils.float_or(x) if n < 3 else utils.str_or(x) \ for n, x in enumerate(this_entry)] except Exception as e: utils.echo_error_msg('could not parse entry {}'.format(self.fn)) return (self) ## ============================================== ## data format ## guess format based on fn if not specified and ## parse format for dataset specific opts. ## ============================================== if len(entry) < 2: for key in self.data_types.keys(): se = entry[0].split('.') see = se[-1] if len(se) > 1 else entry[0].split(":")[0] if see in self.data_types[key]['fmts']: entry.append(int(key)) break if len(entry) < 2: utils.echo_error_msg('could not parse entry {}'.format( self.fn)) return (self) else: opts = this_entry[1].split(':') if len(opts) > 1: self.ds_args = utils.args2dict(list(opts[1:]), {}) this_entry[1] = opts[0] else: self.ds_args = {} ## ============================================== ## weight ## inherit weight of parent ## ============================================== if len(entry) < 3: entry.append(1) elif entry[2] is None: entry[2] = 1 if self.parent is not None: if self.weight is not None: self.weight *= entry[2] else: if self.weight is not None: self.weight = entry[2] ## ============================================== ## title ## ============================================== if len(entry) < 4: entry.append(self.metadata['title']) else: self.metadata['title'] = entry[3] ## ============================================== ## source ## ============================================== if len(entry) < 5: entry.append(self.metadata['source']) else: self.metadata['source'] = entry[4] ## ============================================== ## date ## ============================================== if len(entry) < 6: entry.append(self.metadata['date']) else: self.metadata['date'] = entry[5] ## ============================================== ## data type ## ============================================== if len(entry) < 7: entry.append(self.metadata['data_type']) else: self.metadata['data_type'] = entry[6] ## ============================================== ## resolution ## ============================================== if len(entry) < 8: entry.append(self.metadata['resolution']) else: self.metadata['resolution'] = entry[7] ## ============================================== ## hdatum ## ============================================== if len(entry) < 9: entry.append(self.metadata['hdatum']) else: self.metadata['hdatum'] = entry[8] ## ============================================== ## vdatum ## ============================================== if len(entry) < 10: entry.append(self.metadata['vdatum']) else: self.metadata['vdatum'] = entry[9] ## ============================================== ## url ## ============================================== if len(entry) < 11: entry.append(self.metadata['url']) else: self.metadata['url'] = entry[10] ## ============================================== ## file-name ## ============================================== if self.parent is None or entry[1] == -11: self.fn = entry[0] else: self.fn = os.path.join(os.path.dirname(self.parent.fn), entry[0]) self.data_format = entry[1] self.guess_data_format() return (self)