コード例 #1
0
ファイル: dlim.py プロジェクト: ciresdem/cudem
    def generate_inf(self, callback=lambda: False):
        """return the region of the datalist and generate
        an associated `.inf` file if `inf_file` is True.
        """

        _region = self.region
        out_region = None
        out_regions = []
        self.region = None
        self.infos['name'] = self.fn
        self.infos['numpts'] = 0
        self.infos['hash'] = self.hash()  #dl_hash(self.fn)
        self._init_datalist_vector()
        for entry in self.parse():
            if self.verbose:
                callback()

            if entry.src_srs is not None and self.dst_srs is not None:
                e_region = regions.Region().from_list(entry.infos['minmax'])
                e_region.src_srs = entry.src_srs
                e_region.warp(self.dst_srs)
                entry_region = e_region.export_as_list(include_z=True)
            else:
                entry_region = entry.infos['minmax']

            if regions.Region().from_list(entry_region).valid_p():
                self._create_entry_feature(
                    entry,
                    regions.Region().from_list(entry_region))
                #out_regions.append(entry.infos['minmax'])
                out_regions.append(entry_region)
                if 'numpts' in self.infos.keys():
                    self.infos['numpts'] += entry.infos['numpts']

        self.ds = self.layer = None
        count = 0
        for this_region in out_regions:
            tmp_region = regions.Region().from_list(this_region)
            if tmp_region.valid_p():
                if count == 0:
                    out_region = tmp_region
                    count += 1
                else:
                    out_region = regions.regions_merge(out_region, tmp_region)

        if out_region is not None:
            self.infos['minmax'] = out_region.export_as_list(include_z=True)
            self.infos['wkt'] = out_region.export_as_wkt()
        else:
            self.infos['minmax'] = None

        self.region = _region
        if 'src_srs' not in self.infos.keys() or self.infos['src_srs'] is None:
            self.infos['src_srs'] = self.src_srs
        else:
            self.src_srs = self.infos['src_srs']

        return (self.infos)
コード例 #2
0
ファイル: dlim.py プロジェクト: ciresdem/cudem
    def parse_(self):
        import zipfile
        exts = [
            DatasetFactory().data_types[x]['fmts']
            for x in DatasetFactory().data_types.keys()
        ]
        exts = [x for y in exts for x in y]
        datalist = []
        if self.fn.split('.')[-1].lower() == 'zip':
            with zipfile.ZipFile(self.fn) as z:
                zfs = z.namelist()
                for ext in exts:
                    for zf in zfs:
                        if ext == zf.split('.')[-1]:
                            datalist.append(os.path.basename(zf))

        for this_data in datalist:
            this_line = utils.p_f_unzip(self.fn, [this_data])[0]
            data_set = DatasetFactory(this_line,
                                      weight=self.weight,
                                      parent=self,
                                      src_region=self.region,
                                      metadata=copy.deepcopy(self.metadata),
                                      src_srs=self.src_srs,
                                      dst_srs=self.dst_srs,
                                      verbose=self.verbose).acquire()
            if data_set is not None and data_set.valid_p(
                    fmts=DatasetFactory.data_types[
                        data_set.data_format]['fmts']):
                if self.region is not None and self.region.valid_p(
                        check_xy=True):
                    try:
                        inf_region = regions.Region().from_string(
                            data_set.infos['wkt'])
                    except:
                        try:
                            inf_region = regions.Region().from_list(
                                data_set.infos['minmax'])
                        except:
                            inf_region = self.region.copy()

                    inf_region.wmin = data_set.weight
                    inf_region.wmax = data_set.weight
                    if regions.regions_intersect_p(inf_region, self.region):
                        for ds in data_set.parse():
                            self.data_entries.append(ds)
                            yield (ds)
                else:
                    for ds in data_set.parse():
                        self.data_entries.append(ds)
                        yield (ds)

            utils.remove_glob('{}*'.format(this_data))
コード例 #3
0
    def update(self):
        self.FRED._open_ds()
        v_zip = os.path.basename(self._hrdem_footprints_url)
        status = f_utils.Fetch(self._hrdem_footprints_url,
                               verbose=self.verbose).fetch_ftp_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v
        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else:
                shp_region = this_region
        geom = shp_region.export_as_geom()

        self.FRED._attribute_filter(["ID = '{}'".format('HRDEM-1')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name='High-Resolution DEM (Canada)',
                                  ID='HRDEM-1',
                                  Agency='NRCAN',
                                  Date=utils.this_year(),
                                  MetadataLink=self._hrdem_info_url,
                                  MetadataDate=utils.this_year(),
                                  DataLink=self._hrdem_footprints_url,
                                  IndexLink=self._hrdem_footprints_url,
                                  DataType='raster',
                                  DataSource='hrdem',
                                  Info='Canada Only',
                                  geom=geom)
        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
コード例 #4
0
ファイル: utils.py プロジェクト: ciresdem/cudem
 def bounds(self, geom=True):
     wl = self.xml_doc.find('.//gmd:westBoundLongitude/gco:Decimal',
                            namespaces=self.namespaces)
     el = self.xml_doc.find('.//gmd:eastBoundLongitude/gco:Decimal',
                            namespaces=self.namespaces)
     sl = self.xml_doc.find('.//gmd:southBoundLatitude/gco:Decimal',
                            namespaces=self.namespaces)
     nl = self.xml_doc.find('.//gmd:northBoundLatitude/gco:Decimal',
                            namespaces=self.namespaces)
     if wl is not None and el is not None and sl is not None and nl is not None:
         region = [
             float(wl.text),
             float(el.text),
             float(sl.text),
             float(nl.text)
         ]
         if geom:
             return (regions.Region().from_list([
                 float(wl.text),
                 float(el.text),
                 float(sl.text),
                 float(nl.text)
             ]).export_as_geom())
         else:
             return (region)
     else:
         return (None)
コード例 #5
0
    def update(self):
        self.FRED._open_ds()        
        v_zip = os.path.basename(self._arctic_dem_index_url)
        status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])

        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v

        utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True)
        utils.remove_glob(v_zip, *v_shps)
        v_shp = 'arctic_tmp.shp'
        v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj']
        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            #this_region.src_srs = 'epsg:3413'
            #this_region.warp('epsg:4326')
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else: shp_region = this_region
        geom = shp_region.export_as_geom()
        
        self.FRED._attribute_filter(["ID = '{}'".format('ARCTICDEM-1')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name = 'ArcticDEM', ID = 'ARCTICDEM-1', Agency = 'UMN', Date = utils.this_year(),
                                  MetadataLink = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/', MetadataDate = utils.this_year(),
                                  DataLink = self._arctic_dem_index_url, IndexLink = self._arctic_dem_index_url,
                                  DataType = 'raster', DataSource = 'arcticdem', Info = 'Arctic Only', geom = geom)
        utils.remove_glob(*v_shps)
        self.FRED._close_ds()
コード例 #6
0
 def dist_region(self):
         
     dr = regions.Region().from_region(self.region)
     return(
         dr.buffer(
             x_bv=(self.xinc*self.extend),
             y_bv=(self.yinc*self.extend)
         )
     )
コード例 #7
0
ファイル: utils.py プロジェクト: ciresdem/cudem
 def _get_coverage_region(self, cov_desc):
     uc = [
         float(x) for x in cov_desc["boundedBy"]["Envelope"]["upperCorner"]
         [0].split()
     ]
     lc = [
         float(x) for x in cov_desc["boundedBy"]["Envelope"]["lowerCorner"]
         [0].split()
     ]
     return (regions.Region().from_list([lc[1], uc[1], lc[0], uc[0]]))
コード例 #8
0
    def yield_xyz(self, entry):
        src_dc = os.path.basename(entry[1])
        src_ext = src_dc.split('.')[-1].lower()
        if src_ext == 'laz' or src_ext == 'las': dt = 'lidar'
        elif src_ext == 'tif' or src_ext == 'img': dt = 'raster'
        else: dt = None
        if dt == 'lidar':
            if f_utils.Fetch(entry[0],
                             callback=self.callback,
                             verbose=self.verbose).fetch_file(src_dc) == 0:
                _ds = datasets.LASFile(
                    fn=src_dc,
                    data_format=400,
                    dst_srs=self.dst_srs,
                    #name=src_dc,
                    src_region=self.region,
                    verbose=self.verbose,
                    remote=True)
                if self.inc is not None:
                    b_region = regions.regions_reduce(
                        self.region,
                        regions.Region().from_list(_ds.infos['minmax']))
                    xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True)
                    for xyz in utils.yield_cmd(
                            'gmt blockmedian -I{:.10f} {} -r -V'.format(
                                self.inc, b_region.format('gmt')),
                            verbose=self.verbose,
                            data_fun=xyz_func):
                        yield (xyzfun.XYZPoint().from_list(
                            [float(x) for x in xyz.split()]))

                else:
                    for xyz in _ds.yield_xyz():
                        yield (xyz)
                utils.remove_glob('{}*'.format(src_dc))
        elif dt == 'raster':
            if f_utils.Fetch(entry[0],
                             callback=self.callback,
                             verbose=self.verbose).fetch_file(src_dc) == 0:
                _ds = datasets.RasterFile(
                    fn=src_dc,
                    data_format=200,
                    dst_srs=self.dst_srs,
                    #src_srs=None,
                    name=src_dc,
                    src_region=self.region,
                    verbose=self.verbose)
                for xyz in _ds.block_xyz(
                        inc=self.inc, want_gmt=True
                ) if self.inc is not None else _ds.yield_xyz():
                    yield (xyz)
                utils.remove_glob('{}.*'.format(src_dc))
コード例 #9
0
ファイル: dlim.py プロジェクト: ciresdem/cudem
    def generate_inf(self, callback=lambda: False):
        """generate a infos dictionary from the Fetches dataset"""

        self.infos['name'] = self.fn
        self.infos['hash'] = None
        self.infos['numpts'] = 0
        if self.region is None:
            #self.region = self.fetch_module.region
            self.region = regions.Region().from_list([-180, 180, -90, 90])

        self.infos['minmax'] = self.region.export_as_list()
        self.infos['wkt'] = self.region.export_as_wkt()
        return (self.infos)
コード例 #10
0
    def run(self):
        '''Run the GMRT fetching module'''

        if self.region is None:
            return ([])

        self.data = {
            'north': self.region.ymax,
            'west': self.region.xmin,
            'south': self.region.ymin,
            'east': self.region.xmax,
            'mformat': 'json',
            'resolution': self.res,
            'format': self.fmt,
        }

        req = f_utils.Fetch(self._gmrt_grid_urls_url).fetch_req(
            params=self.data, tries=10, timeout=2)
        if req is not None:
            try:
                gmrt_urls = req.json()
            except Exception as e:
                utils.echo_error_msg(e)
                gmrt_urls = []

            for url in gmrt_urls:
                if self.layer == 'topo-mask':
                    url = url.replace('topo', 'topo-mask')

                opts = {}
                for url_opt in url.split('?')[1].split('&'):
                    opt_kp = url_opt.split('=')
                    opts[opt_kp[0]] = opt_kp[1]

                url_region = regions.Region().from_list([
                    float(opts['west']),
                    float(opts['east']),
                    float(opts['south']),
                    float(opts['north'])
                ])
                outf = 'gmrt_{}_{}.tif'.format(opts['layer'],
                                               url_region.format('fn'))
                self.results.append(
                    [url, os.path.join(self._outdir, outf), 'gmrt'])

        return (self)
コード例 #11
0
    def run(self):
        """Run the EMODNET fetching module"""

        if self.region is None: return ([])

        _data = {
            'request': 'DescribeCoverage',
            'version': '2.0.1',
            'CoverageID': 'emodnet:mean',
            'service': 'WCS',
        }
        _req = f_utils.Fetch(self._emodnet_grid_url).fetch_req(params=_data)
        _results = lxml.etree.fromstring(_req.text.encode('utf-8'))
        g_env = _results.findall(
            './/{http://www.opengis.net/gml/3.2}GridEnvelope',
            namespaces=f_utils.namespaces)[0]
        hl = [
            float(x) for x in g_env.find(
                '{http://www.opengis.net/gml/3.2}high').text.split()
        ]

        g_bbox = _results.findall(
            './/{http://www.opengis.net/gml/3.2}Envelope')[0]
        lc = [
            float(x) for x in g_bbox.find(
                '{http://www.opengis.net/gml/3.2}lowerCorner').text.split()
        ]
        uc = [
            float(x) for x in g_bbox.find(
                '{http://www.opengis.net/gml/3.2}upperCorner').text.split()
        ]

        ds_region = regions.Region().from_list([lc[1], uc[1], lc[0], uc[0]])
        resx = (uc[1] - lc[1]) / hl[0]
        resy = (uc[0] - lc[0]) / hl[1]
        if regions.regions_intersect_ogr_p(self.region, ds_region):
            emodnet_wcs = '{}service=WCS&request=GetCoverage&version=1.0.0&Identifier=emodnet:mean&coverage=emodnet:mean&format=GeoTIFF&bbox={}&resx={}&resy={}&crs=EPSG:4326'\
                                      .format(self._emodnet_grid_url, self.region.format('bbox'), resx, resy)
            outf = 'emodnet_{}.tif'.format(self.region.format('fn'))
            self.results.append(
                [emodnet_wcs,
                 os.path.join(self._outdir, outf), 'emodnet'])
        return (self)
コード例 #12
0
    def update(self):
        """Crawl the COP30 database and update/generate the NASADEM reference vector."""
        
        self.FRED._open_ds(1)
        surveys = []                    
        f = f_utils.Fetch(self.nasadem_vrt_url, headers=self.headers, verbose=True)
        page = f.fetch_xml()
        fns = page.findall('.//SourceFilename')

        if self.verbose:
            _prog = utils.CliProgress('scanning {} tiles in {}...'.format(len(fns), self.nasadem_url))
        
        for i, fn in enumerate(fns):
            sid = fn.text.split('/')[-1].split('.')[0]
            if self.verbose:
                _prog.update_perc((i, len(fns)))
                
            self.FRED._attribute_filter(["ID = '{}'".format(sid)])
            if self.FRED.layer is None or len(self.FRED.layer) == 0:
                spat = fn.text.split('_HGT_')[-1].split('.')[0]
                xsplit = 'e' if 'e' in spat else 'w'
                ysplit = 's' if 's' in spat else 'n'
                x = int(spat.split(xsplit)[-1])
                y = int(spat.split(xsplit)[0].split(ysplit)[-1])

                if xsplit == 'w':
                    x = x * -1
                if ysplit == 's':
                    y = y * -1

                this_region = regions.Region().from_list([x, x + 1, y, y + 1])
                geom = this_region.export_as_geom()
                if geom is not None:
                    surveys.append({'Name': fn.text.split('.')[0].split('/')[-1], 'ID': sid, 'Agency': 'NASA', 'Date': utils.this_date(),
                                    'MetadataLink': '', 'MetadataDate': utils.this_date(), 'DataLink': self.nasadem_url + fn.text.split('/')[-1] + '?token=',
                                    'DataType': '1', 'DataSource': 'nasadem', 'HorizontalDatum': 4326, 'Etcetra': self.nasadem_rurl,
                                    'VerticalDatum': 'msl', 'Info': '', 'geom': geom})

        if self.verbose:
            _prog.end(0, 'scanned {} tiles in {}.'.format(len(fns), self.nasadem_url))
            utils.echo_msg('added {} NASADEM DEM tiles'.format(len(surveys)))
        self.FRED._add_surveys(surveys)
        self.FRED._close_ds()
コード例 #13
0
ファイル: dlim.py プロジェクト: ciresdem/cudem
    def generate_inf(self, callback=lambda: False):
        """return the region of the datalist and generate
        an associated `.inf` file if `inf_file` is True.
        """

        _region = self.region
        out_region = None
        out_regions = []
        self.region = None
        self.infos['name'] = self.fn
        self.infos['numpts'] = 0
        self.infos['hash'] = self.hash()  #dl_hash(self.fn)

        for entry in self.parse_():
            if self.verbose:
                callback()

            out_regions.append(entry.infos['minmax'])
            if 'numpts' in self.infos.keys():
                self.infos['numpts'] += entry.infos['numpts']

        count = 0
        for this_region in out_regions:
            tmp_region = regions.Region().from_list(this_region)
            if tmp_region.valid_p():
                if count == 0:
                    out_region = tmp_region
                    count += 1
                else:
                    out_region = regions.regions_merge(out_region, tmp_region)

        if out_region is not None:
            self.infos['minmax'] = out_region.export_as_list(include_z=True)
            self.infos['wkt'] = out_region.export_as_wkt()
        else:
            self.infos['minmax'] = None

        self.region = _region
        return (self.infos)
コード例 #14
0
    def _cdn_transform(self, epsg=None, name=None, invert=False):
        """create a cdn transofrmation grid"""

        if epsg is not None:
            cdn_results = cudem.fetches.vdatum.search_proj_cdn(self.src_region, epsg=epsg, cache_dir=self.cache_dir)
        else: cdn_results = cudem.fetches.vdatum.search_proj_cdn(self.src_region, cache_dir=self.cache_dir)

        for _result in cdn_results:
            for g in _geoids:
                if g in _result['name']:
                    cdn_results = [_result]
                    
        if len(cdn_results) > 0:
            for _result in cdn_results:
                src_code = int(_result['source_crs_code'].split(':')[-1])
                dst_code = int(_result['target_crs_code'].split(':')[-1])
                #if epsg == dst_code or epsg == src_code or np.any([g in _result['name'] for g in self._geoids]):
                if epsg == dst_code or np.any([g in _result['name'] for g in _geoids]):
                    if src_code in _htdp_reference_frames.keys():
                        _trans_grid = os.path.join(self.cache_dir, _result['name'])
                        if cudem.fetches.utils.Fetch(_result['url'], verbose=self.verbose).fetch_file(_trans_grid) == 0:
                            tmp_infos = demfun.infos(_trans_grid)
                            tmp_region = regions.Region().from_geo_transform(tmp_infos['geoT'], tmp_infos['nx'], tmp_infos['ny'])
                            if os.path.exists('_{}'.format(os.path.basename(_trans_grid))):
                                utils.remove_glob('_{}'.format(os.path.basename(_trans_grid)))
                            utils.run_cmd('gdalwarp {} {} -s_srs epsg:4326 -te {} -ts {} {} --config CENTER_LONG 0'.format(
                                _trans_grid, '_{}'.format(os.path.basename(_trans_grid)), self.src_region.format('te'), self.xcount, self.ycount
                            ), verbose=True)
                            
                            _tmp_array, _tmp_infos = demfun.get_array('_{}'.format(os.path.basename(_trans_grid)))
                            utils.remove_glob('_{}'.format(os.path.basename(_trans_grid)))
                            if invert:
                                _tmp_array = _tmp_array * -1
                            
                            return(_tmp_array, src_code)

        utils.echo_error_msg('failed to locate transformation for {}'.format(epsg))
        return(np.zeros( (self.ycount, self.xcount) ), epsg)
コード例 #15
0
ファイル: hydrolakes.py プロジェクト: ciresdem/cudem
    def update(self):
        self.FRED._open_ds()
        v_shp = None
        v_zip = os.path.basename(self._hydrolakes_poly_zip)
        status = f_utils.Fetch(self._hydrolakes_poly_zip,
                               verbose=self.verbose).fetch_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        for v in v_shps:
            if '.shp' in v:
                v_shp = v

        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else:
                shp_region = this_region

        geom = shp_region.export_as_geom()
        self.FRED._attribute_filter(["ID = '{}'".format('HYDROLAKES')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name='HydorLakes',
                                  ID='HYDROLAKES',
                                  Agency='HydroSheds',
                                  Date=utils.this_year(),
                                  MetadataLink=self._hydrolakes_prods,
                                  MetadataDate=utils.this_year(),
                                  DataLink=self._hydrolakes_poly_zip,
                                  IndexLink=self._hydrolakes_poly_zip,
                                  DataType='vector',
                                  DataSource='hydrolakes',
                                  Info='World-wide lakes',
                                  geom=geom)

        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
コード例 #16
0
ファイル: utils.py プロジェクト: ciresdem/cudem
 def _get_coverage_url(self, coverage, region=None):
     dl_coverage = self.fix_coverage_id(coverage)
     cov_desc = self._describe_coverage(coverage)
     fmt = cov_desc["ServiceParameters"]["nativeFormat"][0]
     hl = [
         float(x) for x in cov_desc["domainSet"]["RectifiedGrid"]["limits"]
         ["GridEnvelope"]['high'][0].split()
     ]
     uc = [
         float(x) for x in cov_desc["boundedBy"]["Envelope"]["upperCorner"]
         [0].split()
     ]
     lc = [
         float(x) for x in cov_desc["boundedBy"]["Envelope"]["lowerCorner"]
         [0].split()
     ]
     ds_region = regions.Region().from_list([lc[1], uc[1], lc[0], uc[0]])
     resx = (uc[1] - lc[1]) / hl[0]
     resy = (uc[0] - lc[0]) / hl[1]
     data = {
         'request': 'GetCoverage',
         'version': '1.0.0',
         'service': 'WCS',
         'resx': resx,
         'resy': resy,
         'crs': 'EPSG:4326',
         'format': fmt,
         'coverage': coverage,
         'Identifier': coverage
     }
     if region is not None: data['bbox'] = region.format('bbox')
     enc_data = urlencode(data)
     #try:
     #    enc_data = urllib.urlencode(data)
     #except: enc_data = urllib.parse.urlencode(data)
     return ('{}{}'.format(self.url, enc_data))
コード例 #17
0
def main():
    src_grid = None
    dst_grid = None
    vdatum_in = 5703
    vdatum_out = 7662
    verbose = False
    keep_cache = False
    cache_dir = utils.cudem_cache

    i = 1

    argv = sys.argv
    while i < len(argv):
        arg = argv[i]

        if arg == '-i' or arg == '--vdatum_in':
            vdatum_in = argv[i + 1]
            i = i + 1
        elif arg == '-o' or arg == '--vdatum_out':
            vdatum_out = argv[i + 1]
            i = i + 1

        elif arg == '--cache-dir' or arg == '-D' or arg == '-cache-dir':
            cache_dir = os.path.join(utils.str_or(argv[i + 1], os.path.expanduser('~')), '.cudem_cache')
            i = i + 1
        elif arg[:2] == '-D': cache_dir = os.path.join(utils.str_or(argv[i + 1], os.path.expanduser('~')), '.cudem_cache')
        elif arg == '--list-epsg':
            #print(_epsg_desc(htdpfun.HTDP()._reference_frames))
            print(_epsg_desc('htdp epsg', vdatums._htdp_reference_frames))
            print(_epsg_desc('cdn espg', vdatums._cdn_reference_frames))
            print(_epsg_desc('tidal epsg', vdatums._tidal_frames))
            #list_proj_cdn_epsg()
            sys.exit(1)
        elif arg == '-k' or arg == '--keep-cache': keep_cache = True
        elif arg == '--verbose': verbose = True
        elif arg == '-help' or arg == '--help' or arg == '-h':
            print(_usage)
            sys.exit(1)
        elif arg == '-version' or arg == '--version':
            print('vertical_datum_convert.py, version {}'.format(_version))
            sys.exit(1)
        elif src_grid is None:
            src_grid = arg
        elif dst_grid is None:
            dst_grid = arg
        else:
            sys.stderr.write(_usage)
            sys.exit(1)
        i = i + 1

    if src_grid is None:
        sys.stderr.write(_usage)
        sys.exit(1)

    if dst_grid is None:
        dst_grid = '.'.join(src_grid.split('.')[:-1]) + '_' + str(vdatum_out.replace('(', '_').replace(')', '_')) + '.' + src_grid.split('.')[-1]

    if not os.path.exists(src_grid):
        utils.echo_error_msg('Error: {} is not a valid file'.format(src_grid))
    else:
        src_infos = demfun.infos(src_grid)
        
        src_region = regions.Region().from_geo_transform(src_infos['geoT'], src_infos['nx'], src_infos['ny'])
        src_region.src_srs = demfun.get_srs(src_grid)
        src_region.warp()
        x_inc, y_inc = src_region.increments(src_infos['nx'], src_infos['ny'])
        tmp_x_inc, tmp_y_inc = src_region.increments(src_infos['nx']/10, src_infos['ny']/10)
        vt = vdatums.VerticalTransform(src_region, tmp_x_inc, tmp_y_inc, vdatum_in, vdatum_out, cache_dir=cache_dir)
        _trans_grid = vt.run()
        
        if _trans_grid is not None:
            utils.run_cmd('gdalwarp {} {} -ts {} {} -s_srs epsg:4326 -t_srs {}'.format(_trans_grid, '_{}'.format(_trans_grid), src_infos['nx'], src_infos['ny'], demfun.get_srs(src_grid)), verbose=True)
            utils.run_cmd('gdal_calc.py -A {} -B {} --calc "A+B" --outfile {}'.format(src_grid.replace(' ', '\ '), '_{}'.format(_trans_grid).replace(' ', '\ '), dst_grid.replace(' ', '\ ')), verbose=True)
            utils.remove_glob(_trans_grid, '_{}'.format(_trans_grid))
        else:
            utils.echo_error_msg('could not parse input/output vertical datums: {} -> {}; check spelling, etc'.format(vdatum_in, vdatum_out))

        if not keep_cache:
            utils.remove_glob(cache_dir)
コード例 #18
0
    def yield_xyz(self, entry):
        src_dc = os.path.basename(entry[1])
        src_ext = src_dc.split('.')[-1].lower()
        if src_ext == 'laz' or src_ext == 'las': dt = 'lidar'
        elif src_ext == 'tif' or src_ext == 'img': dt = 'raster'
        else: dt = None
        if dt == 'lidar':
            if f_utils.Fetch(entry[0],
                             callback=self.callback,
                             verbose=self.verbose).fetch_file(src_dc) == 0:
                # xyz_dat = utils.yield_cmd('las2txt -stdout -parse xyz -keep_xy {} -keep_class {} -i {}\
                # '.format(self.region.format('te'), '2 29', src_dc), verbose = False)
                # _ds = datasets.XYZFile(fn=xyz_dat, data_format=168, dst_srs=self.dst_Srs,
                #                        name=xyz_dat, src_region=self.region, verbose=self.verbose, remote=True)
                #xyz_dat = utils.yield_cmd('las2txt -stdout -parse xyz -keep_xy {} -keep_class {} -i {}\
                #'.format(self.region.format('te'), '2 29', src_dc), verbose = False)
                _ds = datasets.LASFile(fn=src_dc,
                                       data_format=400,
                                       dst_srs=self.dst_srs,
                                       name=src_dc,
                                       src_region=self.region,
                                       verbose=self.verbose,
                                       remote=True)
                if self.inc is not None:
                    b_region = regions.regions_reduce(
                        self.region,
                        regions.Region().from_list(_ds.infos['minmax']))
                    xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True)
                    for xyz in utils.yield_cmd(
                            'gmt blockmedian -I{:.10f} {} -r -V'.format(
                                self.inc, b_region.format('gmt')),
                            verbose=self.verbose,
                            data_fun=xyz_func):
                        yield (xyzfun.XYZPoint().from_list(
                            [float(x) for x in xyz.split()]))

                else:
                    for xyz in _ds.yield_xyz():
                        yield (xyz)

                #for xyz in _ds.block_xyz(inc=self.inc, want_gmt=True) if self.inc is not None else _ds.yield_xyz():
                #    yield(xyz)
                #y = _ds.block_xyz if self.inc is not None else _ds.yield_xyz

                #for xyz in y():
                #    yield(xyz)

        elif dt == 'raster':
            #try:
            #    src_ds = gdal.Open(entry[0])
            #    src_dc = entry[0]
            #except Exception as e:
            if f_utils.Fetch(entry[0],
                             callback=self.callback,
                             verbose=self.verbose).fetch_file(src_dc) == 0:
                #try:
                #    src_ds = gdal.Open(src_dc)
                #except Exception as e:
                #    utils.echo_error_msg('could not read dc raster file: {}, {}'.format(entry[0], e))
                #    src_ds = None
                #except Exception as e:
                #    utils.echo_error_msg('could not read dc raster file: {}, {}'.format(entry[0], e))
                #    src_ds = None

                #if src_ds is not None:
                #src_ds = None
                _ds = datasets.RasterFile(fn=src_dc,
                                          data_format=200,
                                          dst_srs=self.dst_srs,
                                          src_srs=None,
                                          name=src_dc,
                                          src_region=self.region,
                                          verbose=self.verbose)
                #_ds.src_ds = src_ds
                #_ds.ds_open_p = True
                for xyz in _ds.block_xyz(
                        inc=self.inc, want_gmt=True
                ) if self.inc is not None else _ds.yield_xyz():
                    yield (xyz)
                #src_ds = None
                utils.remove_glob(src_dc)
コード例 #19
0
    def update(self):
        """Update or create the reference vector file"""

        self.FRED._open_ds(1)
        for vd in self._vdatums:
            surveys = []

            if vd == 'TIDAL' or vd == 'IGLD85':
                ## ==============================================
                ## All tidal inf data is in each one, so we only
                ## have to download one of the tidal zips to process
                ## them all; lets use the smallest one
                ## Keep this link up-to-date!
                ## ==============================================
                if vd == 'TIDAL':
                    vd_ = 'DEVAemb12_8301'
                else:
                    vd_ = vd

                vd_zip_url = '{}{}.zip'.format(self._vdatum_data_url, vd_)
                v_inf = 'tidal_area.inf'
            elif vd == 'VERTCON':
                vd_zip_url = '{}vdatum_{}.zip'.format(self._vdatum_data_url,
                                                      vd)
                v_inf = 'vcn.inf'
            else:
                vd_zip_url = '{}vdatum_{}.zip'.format(self._vdatum_data_url,
                                                      vd)
                v_inf = '{}.inf'.format(vd.lower())

            if f_utils.Fetch(vd_zip_url, verbose=True).fetch_file(
                    '{}.zip'.format(vd)) == 0:
                v_infs = utils.p_unzip('{}.zip'.format(vd), ['inf'])
                v_dict = proc_vdatum_inf(
                    v_inf, name=vd if vd != 'TIDAL' else
                    None)  #, loff=-360 if vd =='TIDAL' else -180)
                v_dict = proc_vdatum_inf(
                    v_inf, name=vd if vd != 'TIDAL' else None)  #, loff=-360)

                for key in v_dict.keys():
                    v_dict[key]['vdatum'] = vd
                    v_dict[key]['remote'] = vd_zip_url

                ## tidal datums:
                if vd == 'TIDAL':
                    v_dict_ = {}
                    for tidal_key in v_dict.keys():
                        for t in self._tidal_datums:
                            key_ = '{}_{}'.format(t, tidal_key)
                            v_dict_[key_] = {}
                            v_dict_[key_]['region'] = v_dict[tidal_key][
                                'region']
                            v_dict_[key_]['vdatum'] = t
                            v_dict_[key_]['grid'] = '{}.gtx'.format(t)
                            v_dict_[key_]['remote'] = '{}{}.zip'.format(
                                self._vdatum_data_url, tidal_key)

                    v_dict = v_dict_

                print(v_dict)

                for key in v_dict.keys():
                    self.FRED._attribute_filter(["ID = '{}'".format(key)])
                    if self.FRED.layer is None or len(self.FRED.layer) == 0:
                        geom = regions.Region().from_list(
                            v_dict[key]['region']).export_as_geom()
                        if geom is not None:
                            surveys.append({
                                'Name':
                                v_dict[key]['grid'],
                                'ID':
                                key,
                                'Agency':
                                'NOAA',
                                'Date':
                                utils.this_date(),
                                'MetadataLink':
                                "",
                                'MetadataDate':
                                utils.this_date(),
                                'DataLink':
                                v_dict[key]['remote'],
                                'Link':
                                self._vdatum_data_url,
                                'DataType':
                                v_dict[key]['vdatum'],
                                'DataSource':
                                'vdatum',
                                'HorizontalDatum':
                                4326,
                                'VerticalDatum':
                                v_dict[key]['vdatum'],
                                'Info':
                                "",
                                'geom':
                                geom
                            })
            self.FRED._add_surveys(surveys)
            utils.remove_glob(*v_infs, '{}.zip'.format(vd))

        self.FRED._close_ds()
コード例 #20
0
ファイル: spatial_metadata.py プロジェクト: ciresdem/cudem
    def dist_region(self):

        dr = regions.Region().from_region(self.region)
        return (dr.buffer((self.inc * self.extend)))
コード例 #21
0
ファイル: spatial_metadata.py プロジェクト: ciresdem/cudem
        elif arg == '-r' or arg == '--grid-node':
            node = 'grid'
        elif arg == '-p' or arg == '--prefix':
            want_prefix = True
        elif arg == '-help' or arg == '--help' or arg == '-h':
            sys.stderr.write(_usage)
            sys.exit(1)
        elif arg == '-version' or arg == '--version':
            sys.stderr.write('{}\n'.format(_version))
            sys.exit(1)
        else:
            dls.append(arg)
        i = i + 1

    for i_region in i_regions:
        tmp_region = regions.Region().from_string(i_region)
        if tmp_region.valid_p():
            these_regions.append(tmp_region)
        else:
            tmp_region = regions.ogr_wkts(i_region)
            for i in tmp_region:
                if i.valid_p():
                    these_regions.append(i)

    if len(these_regions) == 0:
        these_regions = [None]
    else:
        if want_verbose:
            utils.echo_msg('parsed {} region(s)'.format(len(these_regions)))

    for rn, this_region in enumerate(these_regions):
コード例 #22
0
ファイル: copernicus.py プロジェクト: ciresdem/cudem
    def update(self):
        """Crawl the COP30 database and update/generate the COPERNICUS reference vector."""

        self.FRED._open_ds(1)
        surveys = []
        page = f_utils.Fetch(self.cop_10_url, verbose=True).fetch_html()
        rows = page.xpath('//a[contains(@href, ".zip")]/@href')
        if self.verbose:
            _prog = utils.CliProgress('scanning {} tiles in {}...'.format(
                len(rows), self.cop_10_url))

        for i, row in enumerate(rows):
            sid = row.split('.')[0]
            if self.verbose:
                _prog.update_perc((i, len(rows)))

            self.FRED._attribute_filter(["ID = '{}'".format(sid)])
            if self.FRED.layer is None or len(self.FRED.layer) == 0:
                spat = row.split('.')[0].split('_')[-1]
                x = int(spat.split('x')[-1])
                y = int(spat.split('x')[0].split('y')[-1])
                this_region = regions.Region().from_list(
                    [x, x + 10, y, y + 10])
                geom = this_region.export_as_geom()
                if geom is not None:
                    surveys.append({
                        'Name': row.split('.')[0],
                        'ID': sid,
                        'Agency': 'EU',
                        'Date': utils.this_date(),
                        'MetadataLink': self.cop_10_aux_url,
                        'MetadataDate': utils.this_date(),
                        'DataLink': self.cop_10_url + row,
                        'DataType': '3',
                        'DataSource': 'copernicus',
                        'HorizontalDatum': 'epsg:4326',
                        'VerticalDatum': 'msl',
                        'Info': '',
                        'geom': geom
                    })

        if self.verbose:
            _prog.end(
                0, 'scanned {} tiles in {}.'.format(len(rows),
                                                    self.cop_10_url))

        f = f_utils.Fetch(self.cop30_vrt_url,
                          headers=self.headers,
                          verbose=True)
        page = f.fetch_xml()
        fns = page.findall('.//SourceFilename')
        if self.verbose:
            _prog = utils.CliProgress('scanning {} tiles in {}...'.format(
                len(fns), self.cop30_url))

        for i, fn in enumerate(fns):
            sid = fn.text.split('/')[-1].split('.')[0]
            if self.verbose:
                _prog.update_perc((i, len(fns)))

            self.FRED._attribute_filter(["ID = '{}'".format(sid)])
            if self.FRED.layer is None or len(self.FRED.layer) == 0:
                spat = fn.text.split('_10_')[-1].split('_DEM')[0]
                xsplit = '_E' if 'E' in spat else '_W'
                ysplit = 'S' if 'S' in spat else 'N'
                x = int(spat.split(xsplit)[-1].split('_')[0])
                y = int(spat.split(xsplit)[0].split(ysplit)[-1].split('_')[0])

                if xsplit == '_W':
                    x = x * -1
                if ysplit == 'S':
                    y = y * -1

                this_region = regions.Region().from_list([x, x + 1, y, y + 1])
                geom = this_region.export_as_geom()
                if geom is not None:
                    surveys.append({
                        'Name':
                        fn.text.split('.')[0].split('/')[-1],
                        'ID':
                        sid,
                        'Agency':
                        'EU',
                        'Date':
                        utils.this_date(),
                        'MetadataLink':
                        '',
                        'MetadataDate':
                        utils.this_date(),
                        'DataLink':
                        self.cop30_url + fn.text.split('/')[-1] + '?token=',
                        'DataType':
                        '1',
                        'DataSource':
                        'copernicus',
                        'HorizontalDatum':
                        'epsg:4326',
                        'Etcetra':
                        self.cop30_rurl,
                        'VerticalDatum':
                        'msl',
                        'Info':
                        '',
                        'geom':
                        geom
                    })

        if self.verbose:
            _prog.end(
                0, 'scanned {} tiles in {}.'.format(len(fns), self.cop30_url))
            utils.echo_msg('added {} COPERNICUS DEM tiles'.format(
                len(surveys)))

        self.FRED._add_surveys(surveys)
        self.FRED._close_ds()
コード例 #23
0
ファイル: gdal_null.py プロジェクト: ciresdem/cudem
            verbose = True
        elif arg[0] == '-':
            sys.stderr.write(_usage)
            sys.exit(1)
        elif output is None:
            output = arg
        else:
            sys.stderr.write(_usage)
            sys.exit(1)

        i = i + 1

    if output is None:
        sys.stderr.write(_usage)
        utils.echo_error_msg('you must enter an output file name')
        sys.exit(0)

    if extent == None:
        extent = '1'
    else:
        this_region = regions.Region().from_list(extent)

    #Run the program given the user input
    if cpgrd is not None:
        createNullCopy(cpgrd, output, nodata, d_format, verbose, overwrite)
    else:
        createGrid(output, this_region, cellsize, nodata, d_format, verbose,
                   overwrite)

### END
コード例 #24
0
ファイル: dlim.py プロジェクト: ciresdem/cudem
    def parse(self):
        """import a datalist entry from a string"""

        if self.verbose:
            _prog = utils.CliProgress('parsing datalist {}{}'.format(
                self.fn,
                ' @{}'.format(self.weight) if self.weight is not None else ''))

        if os.path.exists(self.fn):
            with open(self.fn, 'r') as f:
                count = sum(1 for _ in f)

            with open(self.fn, 'r') as op:
                for l, this_line in enumerate(op):
                    if self.verbose:
                        _prog.update_perc((l, count))

                    if this_line[0] != '#' and this_line[
                            0] != '\n' and this_line[0].rstrip() != '':
                        data_set = DatasetFactory(
                            this_line,
                            weight=self.weight,
                            parent=self,
                            src_region=self.region,
                            metadata=copy.deepcopy(self.metadata),
                            src_srs=self.src_srs,
                            dst_srs=self.dst_srs,
                            x_inc=self.x_inc,
                            y_inc=self.y_inc,
                            sample_alg=self.sample_alg,
                            verbose=self.verbose).acquire()
                        if data_set is not None and data_set.valid_p(
                                fmts=DatasetFactory.data_types[
                                    data_set.data_format]['fmts']):
                            if self.region is not None and self.region.valid_p(
                                    check_xy=True):
                                # try:
                                #     inf_region = regions.Region().from_string(
                                #         data_set.infos['wkt']
                                #     )
                                # except:
                                try:
                                    inf_region = regions.Region().from_list(
                                        data_set.infos['minmax'])
                                except:
                                    inf_region = self.region.copy()

                                inf_region.wmin = data_set.weight
                                inf_region.wmax = data_set.weight
                                if regions.regions_intersect_p(
                                        inf_region, self.region
                                        if data_set.dst_trans is None else
                                        data_set.trans_region):
                                    for ds in data_set.parse():
                                        self.data_entries.append(ds)
                                        yield (ds)
                            else:
                                for ds in data_set.parse():
                                    self.data_entries.append(ds)
                                    yield (ds)
        elif len(self.data_entries) > 0:
            for data_set in self.data_entries:
                for ds in data_set.parse():
                    yield (ds)
        else:
            if self.verbose:
                utils.echo_warning_msg(
                    'could not open datalist/entry {}'.format(self.fn))

        if self.verbose:
            _prog.end(
                0, 'parsed datalist {}{}'.format(
                    self.fn, ' @{}'.format(self.weight)
                    if self.weight is not None else ''))
コード例 #25
0
def spat_meta_cli(argv = sys.argv):
    i = 1
    dls = []
    i_regions = []
    these_regions = []
    src_srs = 'epsg:4326'
    xinc = utils.str2inc('1s')
    yinc = utils.str2inc('1s')
    node = 'pixel'
    name = 'waffles_spat'
    ogr_format = 'ESRI Shapefile'
    extend = 0
    want_verbose = True
    want_prefix = False
    want_recursive = False
    prefix_args = {}
            
    argv = sys.argv
    while i < len(argv):
        arg = sys.argv[i]
        if arg == '--region' or arg == '-R':
            i_regions.append(str(argv[i + 1]))
            i = i + 1
        elif arg[:2] == '-R':
            i_regions.append(str(arg[2:]))
        elif arg == '--outname' or arg == '-O':
            name = argv[i + 1]
            i += 1
        elif arg[:2] == '-O': name = arg[2:]
        elif arg == '-s_srs' or arg == '--s_srs' or arg == '-P':
            src_srs = argv[i + 1]
            i = i + 1
        elif arg == '--increment' or arg == '-E':
            incs = argv[i + 1].split(':')
            xy_inc = incs[0].split('/')
            xinc = utils.str2inc(xy_inc[0])
            if len(xy_inc) > 1:
                yinc = utils.str2inc(xy_inc[1])
            else:
                yinc = utils.str2inc(xy_inc[0])
            i = i + 1
        elif arg[:2] == '-E':
            incs = arg[2:].split(':')
            xy_inc = incs[0].split('/')
            xinc = utils.str2inc(xy_inc[0])
            if len(xy_inc) > 1:
                yinc = utils.str2inc(xy_inc[1])
            else:
                yinc = utils.str2inc(xy_inc[0])
        elif arg == '--extend' or arg == '-X':
            exts = argv[i + 1].split(':')
            extend = utils.int_or(exts[0], 0)
            i += 1
        elif arg[:2] == '-X':
            exts = arg[2:].split(':')
            extend = utils.int_or(exts[0], 0)
        elif arg == '--format' or arg == '-F':
            ogr_format = argv[i + 1]
            i += 1
        elif arg[:2] == '-F':
            ogr_format = argv[2:]
        elif arg == '-p' or arg == '--prefix':
            want_prefix = True
            prefix_opts = argv[i + 1].split(':')
            prefix_args = utils.args2dict(prefix_opts, prefix_args)
            if len(prefix_args) > 0:
                i += 1
                
        elif arg == '-r' or arg == '--grid-node': node = 'grid'
        elif arg == '-c' or arg == '--recursive': want_recursive = True
        elif arg == '--quiet' or arg == '-q': want_verbose = False
        elif arg == '-help' or arg == '--help' or arg == '-h':
            sys.stderr.write(_usage)
            sys.exit(1)
        elif arg == '-version' or arg == '--version':
            sys.stdout.write('{}\n'.format(__version__))

            sys.exit(1)
        else: dls.append(arg)
        i = i + 1

    for i_region in i_regions:
        tmp_region = regions.Region().from_string(i_region)
        if tmp_region.valid_p(check_xy=True):
            these_regions.append(tmp_region)
        else:
            i_region_s = i_region.split(':')
            tmp_region = regions.ogr_wkts(i_region_s[0])
            for i in tmp_region:
                if i.valid_p():
                    if len(i_region_s) > 1:
                        these_regions.append(
                            regions.Region().from_string(
                                '/'.join([i.format('str'), i_region_s[1]])
                            )
                        )
                        
                    else:
                        these_regions.append(i)

    if len(these_regions) == 0:
        these_regions = [None]
        utils.echo_error_msg('Could not parse region {}'.format(these_regions))
        sys.stderr.write('{}\n'.format(_usage))
        sys.exit(1)
    else:
        if want_verbose:
            utils.echo_msg(
                'parsed {} region(s)'.format(len(these_regions))
            )

    name_ = name
    for rn, this_region in enumerate(these_regions):
        utils.echo_msg('using region {}'.format(this_region.format('gmt')))
        if len(dls) == 0:
            sys.stderr.write(_usage)
            utils.echo_error_msg('you must specify some type of data')
        else:
            if want_prefix or len(these_regions) > 1:
                name_ = utils.append_fn(name, this_region, xinc, **prefix_args)
                
            if os.path.exists('{}_sm.{}'.format(name_, utils.ogr_fext(ogr_format))):
                utils.echo_msg(
                'SPATIAL METADATA {} already exists, skipping...'.format('{}_sm.{}'.format(name_, utils.ogr_fext(ogr_format)))
                    )
            else:
                
                SpatialMetadata(
                    data=dls,
                    src_region=this_region,
                    xinc=xinc,
                    yinc=yinc,
                    extend=extend,
                    src_srs=src_srs,
                    node=node,
                    name=name_,
                    verbose=want_verbose,
                    recursive=want_recursive,
                    ogr_format=ogr_format
                ).run()
コード例 #26
0
ファイル: dlim.py プロジェクト: ciresdem/cudem
def datalists_cli(argv=sys.argv):
    """run datalists from command-line

    See `datalists_cli_usage` for full cli options.
    """

    dls = []
    src_srs = None
    dst_srs = None
    i_regions = []
    these_regions = []
    xy_inc = [None, None]
    want_weights = False
    want_inf = False
    want_list = False
    want_glob = False
    want_archive = False
    want_verbose = True
    want_region = False
    want_csv = False
    want_json = False
    want_datalists = False
    want_separate = False

    ## ==============================================
    ## parse command line arguments.
    ## ==============================================
    i = 1
    while i < len(argv):
        arg = argv[i]
        if arg == '--region' or arg == '-R':
            i_regions.append(str(argv[i + 1]))
            i = i + 1
        elif arg[:2] == '-R':
            i_regions.append(str(arg[2:]))
        elif arg == '--increment' or arg == '-E':
            xy_inc = argv[i + 1].split('/')
            i = i + 1
        elif arg[:2] == '-E':
            xy_inc = arg[2:].split('/')
        elif arg == '-s_srs' or arg == '--s_srs' or arg == '-P':
            src_srs = argv[i + 1]
            i = i + 1
        elif arg == '-t_srs' or arg == '--t_srs' or arg == '-W':
            dst_srs = argv[i + 1]
            i = i + 1
        elif arg == '--separate' or arg == '-s':
            want_separate = True
        elif arg == '--archive' or arg == '-a':
            want_archive = True
        elif arg == '--weights' or arg == '-w':
            want_weights = True
        elif arg == '--info' or arg == '-i':
            want_inf = True
        elif arg == '--region_inf' or arg == '-r':
            want_region = True
        elif arg == '--list' or arg == '-l':
            want_list = True
        elif arg == '--glob' or arg == '-g':
            want_glob = True
        elif arg == '--datalists' or arg == '-d':
            want_datalists = True
        elif arg == '--csv' or arg == '-c':
            want_csv = True
        elif arg == '--json' or arg == '-j':
            want_json = True
        elif arg == '--quiet' or arg == '-q':
            want_verbose = False
        elif arg == '--help' or arg == '-h':
            print(datalists_usage)
            sys.exit(1)
        elif arg == '--version' or arg == '-v':
            print('{}, version {}'.format(os.path.basename(sys.argv[0]),
                                          cudem.__version__))
            sys.exit(1)
        elif arg[0] == '-':
            print(datalists_usage)
            sys.exit(0)
        else:
            dls.append(arg)

        i = i + 1

    if len(xy_inc) < 2:
        xy_inc.append(xy_inc[0])

    elif len(xy_inc) == 0:
        xy_inc = [None, None]

    if want_glob:
        import glob
        for key in DatasetFactory().data_types.keys():
            if key != -1:
                for f in DatasetFactory().data_types[key]['fmts']:
                    globs = glob.glob('*.{}'.format(f))
                    [
                        sys.stdout.write('{}\n'.format(' '.join(
                            [x, str(key), '1']))) for x in globs
                    ]

        sys.exit(0)

    for i_region in i_regions:
        tmp_region = regions.Region().from_string(i_region)
        if tmp_region.valid_p(check_xy=True):
            these_regions.append(tmp_region)
        else:
            i_region_s = i_region.split(':')
            tmp_region = regions.ogr_wkts(i_region_s[0])
            for i in tmp_region:
                if i.valid_p():
                    if len(i_region_s) > 1:
                        these_regions.append(regions.Region().from_string(
                            '/'.join([i.format('str'), i_region_s[1]])))
                    else:
                        these_regions.append(i)

    if len(these_regions) == 0:
        these_regions = [None]
    else:
        if want_verbose:
            utils.echo_msg('parsed {} region(s)'.format(len(these_regions)))

    for rn, this_region in enumerate(these_regions):
        if len(dls) == 0:
            sys.stderr.write(datalists_usage)
            utils.echo_error_msg('you must specify some type of data')
        else:
            this_datalist = init_data(dls, this_region, src_srs, dst_srs,
                                      xy_inc, 'bilinear', want_verbose)
            if this_datalist is not None and this_datalist.valid_p(
                    fmts=DatasetFactory.data_types[
                        this_datalist.data_format]['fmts']):
                if not want_weights:
                    this_datalist.weight = None

                if want_inf:
                    print(this_datalist.inf())
                elif want_list:
                    this_datalist.echo()
                elif want_archive:
                    [x for x in this_datalist.archive_xyz()]
                elif want_region:
                    print(regions.Region().from_list(
                        this_datalist.inf()['minmax']).format('gmt'))
                elif want_csv:
                    this_datalist.parse_data_lists()
                    for x in this_datalist.data_lists.keys():
                        this_datalist.data_entries = this_datalist.data_lists[
                            x]['data']
                        p = this_datalist.data_lists[x]['parent']
                        print('|'.join([
                            '"{}"'.format(str(y)) for y in [
                                x, p.metadata['title'] if p.metadata['title']
                                is not None else x, p.metadata['source'],
                                p.metadata['date'], p.metadata['data_type'],
                                p.metadata['resolution'], p.metadata['hdatum'],
                                p.metadata['vdatum'], p.metadata['url']
                            ]
                        ]))
                elif want_datalists:
                    #import json
                    #j = open('{}.json'.format(this_datalist.metadata['name']), 'w')
                    this_datalist.parse_data_lists()
                    # for x in this_datalist.data_lists.keys():
                    #     p = this_datalist.data_lists[x]['parent']

                    #     out_json = {
                    #         "Name": x,
                    #         "Title": p.title if p.title is not None else x,
                    #         "Source": p.source,
                    #         "Date": p.date,
                    #         "DataType": p.data_type,
                    #         "Resolution": p.resolution,
                    #         "HDatum": p.hdatum,
                    #         "VDatum": p.vdatum,
                    #         "URL": p.url
                    #     }
                    #     j.write(json.dumps(out_json))
                    #     j.write('\n')
                    # j.close()

                    for x in this_datalist.data_lists.keys():
                        p = this_datalist.data_lists[x]['parent']
                        print('{} ({})|{}'.format(p.metadata['title'],
                                                  p.metadata['name'],
                                                  p.weight))
                        #print(xdl.data_lists[x]['parent'].echo_())
                        #print('{}'.format(this_datalist.data_lists[x]['parent'].fn))
                        #print(this_datalist.data_lists[x])

                        #print('{} ({})|{}'.format(this_datalist.data_lists[x]['parent'].metadata['title'], this_datalist.data_lists[x]['parent'].metadata['name'], this_datalist.data_lists[x]['parent'].weight))
                else:
                    if want_separate:
                        try:
                            for this_entry in this_datalist.parse():
                                this_entry.dump_xyz()
                        except KeyboardInterrupt:
                            utils.echo_error_msg('Killed by user')
                            break
                        except BrokenPipeError:
                            utils.echo_error_msg('Pipe Broken')
                            break
                    else:
                        try:
                            this_datalist.dump_xyz()
                        except KeyboardInterrupt:
                            utils.echo_error_msg('Killed by user')
                            break
                        except BrokenPipeError:
                            utils.echo_error_msg('Pipe Broken')
                            break
コード例 #27
0
def fetches_cli(argv=sys.argv):
    """run fetches from command-line

See `fetches_cli_usage` for full cli options.
    """

    i_regions = []
    these_regions = []
    mods = []
    mod_opts = {}
    want_list = False
    want_proc = False
    want_verbose = True
    stop_threads = False

    ## ==============================================
    ## parse command line arguments.
    ## ==============================================
    i = 1
    while i < len(argv):
        arg = argv[i]
        if arg == '--region' or arg == '-R':
            i_regions.append(str(argv[i + 1]))
            i = i + 1
        elif arg[:2] == '-R':
            i_regions.append(str(arg[2:]))
        elif arg == '--list' or arg == '-l':
            want_list = True
        elif arg == '--process' or arg == '-p':
            want_proc = True
        elif arg == '--quiet' or arg == '-q':
            want_verbose = False
        elif arg == '--help' or arg == '-h':
            sys.stderr.write(fetches_usage)
            sys.exit(1)
        elif arg == '--version' or arg == '-v':
            print('{}, version {}'.format(os.path.basename(sys.argv[0]),
                                          fetches.__version__))
            sys.exit(1)
        elif arg == '--modules' or arg == '-m':
            try:
                if argv[i + 1] in FetchesFactory.mods.keys():
                    sys.stderr.write(
                        _fetches_module_long_desc({
                            k: FetchesFactory.mods[k]
                            for k in (argv[i + 1], )
                        }))
                else:
                    sys.stderr.write(
                        _fetches_module_long_desc(FetchesFactory.mods))

            except:
                sys.stderr.write(_fetches_module_long_desc(
                    FetchesFactory.mods))

            sys.exit(0)
        elif arg[0] == '-':
            sys.stderr.write(fetches_usage)
            sys.exit(0)
        else:
            mods.append(arg)

        i = i + 1

    if len(mods) == 0:
        sys.stderr.write(fetches_usage)
        utils.echo_error_msg('you must select at least one fetch module')
        sys.exit(-1)

    for i_region in i_regions:
        tmp_region = regions.Region().from_string(i_region)
        if tmp_region.valid_p(check_xy=True):
            these_regions.append(tmp_region)
        else:
            i_region_s = i_region.split(':')
            tmp_region = regions.ogr_wkts(i_region_s[0])
            for i in tmp_region:
                if i.valid_p():
                    if len(i_region_s) > 1:
                        these_regions.append(regions.Region().from_string(
                            '/'.join([i.format('str'), i_region_s[1]])))
                    else:
                        these_regions.append(i)

    if not these_regions:
        these_regions = [regions.Region().from_string('-R-180/180/-90/90')]
    if want_verbose:
        utils.echo_msg('parsed {} region(s)'.format(len(these_regions)))

    for rn, this_region in enumerate(these_regions):
        if stop_threads:
            return

        x_fs = [
            FetchesFactory(mod=mod,
                           src_region=this_region,
                           verbose=want_verbose).acquire(dst_srs='epsg:4326')
            for mod in mods
        ]
        for x_f in x_fs:
            if x_f is None:
                continue

            if want_verbose:
                utils.echo_msg(
                    'running fetch module {} on region {}...'.format(
                        x_f.name, this_region.format('str')))

            x_f.run()
            if want_verbose:
                utils.echo_msg('found {} data files.'.format(len(x_f.results)))

            if len(x_f.results) == 0:
                break

            if want_list:
                for result in x_f.results:
                    print(result[0])
            else:
                fr = f_utils.fetch_results(x_f, want_proc=want_proc)
                fr.daemon = True
                _p = utils.CliProgress('fetching {} remote data files'.format(
                    len(x_f.results)))
                try:
                    fr.start()
                    while True:
                        time.sleep(2)
                        sys.stderr.write('\x1b[2K\r')
                        perc = float(
                            (len(x_f.results) - fr.fetch_q.qsize())) / len(
                                x_f.results) * 100 if len(
                                    x_f.results) > 0 else 1
                        if want_verbose:
                            _p.update_perc(
                                (len(x_f.results) - fr.fetch_q.qsize(),
                                 len(x_f.results)))

                        sys.stderr.flush()
                        if not fr.is_alive():
                            break

                except (KeyboardInterrupt, SystemExit):
                    utils.echo_error_msg(
                        'user breakage...please wait for while fetches exits.')
                    x_f.status = -1
                    stop_threads = True
                    while not fr.fetch_q.empty():
                        try:
                            fr.fetch_q.get(False)
                        except Empty:
                            continue

                        fr.fetch_q.task_done()
                fr.join()
                _p.end(x_f.status,
                       'fetched {} remote data files'.format(len(x_f.results)))
            if want_verbose:
                utils.echo_msg('ran fetch module {} on region {}...\
            '.format(x_f.name, this_region.format('str')))
コード例 #28
0
    def _update_prods(self):
        """updated FRED with each product file available from TNM"""

        for dsTag in self._elev_ds:
            offset = 0
            utils.echo_msg('processing TNM dataset {}...'.format(dsTag))
            _req = f_utils.Fetch(
                self._tnm_product_url).fetch_req(params={
                    'max': 1,
                    'datasets': dsTag
                })
            try:
                _dsTag_results = _req.json()
            except ValueError:
                utils.echo_error_msg('tnm server error, try again')
            except Exception as e:
                utils.echo_error_msg('error, {}'.format(e))

            total = _dsTag_results['total']
            if self.verbose:
                _prog = utils.CliProgress(
                    'gathering {} products from {}...'.format(total, dsTag))

            ds = self._datasets(dataset=dsTag)
            #this_xml = f_utils.iso_xml('{}{}?format=iso'.format(self._tnm_meta_base, ds['id']))
            this_xml = f_utils.iso_xml('{}?format=iso'.format(ds['infoUrl']))
            h_epsg, v_epsg = this_xml.reference_system()

            while True:
                _data = {'max': 100, 'datasets': dsTag, 'offset': offset}
                _req = f_utils.Fetch(
                    self._tnm_product_url).fetch_req(params=_data)
                try:
                    _dsTag_results = _req.json()
                except ValueError:
                    utils.echo_error_msg('tnm server error, try again')
                except Exception as e:
                    utils.echo_error_msg('error, {}'.format(e))
                if self.verbose:
                    _prog.update_perc(
                        (offset, total),
                        msg='gathering {} products from {}...'.format(
                            total, dsTag))

                for i, item in enumerate(_dsTag_results['items']):
                    if self.verbose:
                        _prog.update_perc(
                            (i + offset, total),
                            msg='gathering {} products from {}...'.format(
                                total, dsTag))
                    try:
                        self.FRED.layer.SetAttributeFilter("ID = '{}'".format(
                            item['sourceId']))
                    except:
                        pass
                    if self.FRED.layer is None or len(self.FRED.layer) == 0:
                        bbox = item['boundingBox']
                        geom = regions.Region().from_list([
                            bbox['minX'], bbox['maxX'], bbox['minY'],
                            bbox['maxY']
                        ]).export_as_geom()

                        if item['format'] == 'IMG' or item[
                                'format'] == 'GeoTIFF':
                            tnm_ds = 'raster'
                        elif item['format'] == 'LAZ' or item['format'] == 'LAS':
                            tnm_ds = 'lidar'
                        else:
                            tnm_ds = 'tnm'

                        if geom is not None:
                            self.FRED._add_survey(
                                Name=item['title'],
                                ID=item['sourceId'],
                                Agency='USGS',
                                Date=item['publicationDate'],
                                MetadataLink=item['metaUrl'],
                                MetadataDate=item['dateCreated'],
                                DataLink=item['downloadURL'],
                                Link=item['sourceOriginId'],
                                Resolution=item['extent'],
                                DataType=tnm_ds,
                                DataSource='tnm',
                                HorizontalDatum=h_epsg,
                                VerticalDatum=v_epsg,
                                Etcetra=dsTag,
                                Info=item['moreInfo'],
                                geom=geom)
                offset += 100
                if total - offset <= 0: break
            if self.verbose:
                _prog.end(0,
                          'gathered {} products from {}'.format(total, dsTag))