Exemplo n.º 1
0
    def update(self):
        """Crawl the NOS database and update/generate the NOS reference vector."""
        
        self.FRED._open_ds(1)
        for nosdir in self._nos_directories:
            if self.callback():
                break
            
            surveys = []
            xml_catalog = self._nos_xml_url(nosdir)
            page = f_utils.Fetch(xml_catalog).fetch_html()
            if page is None:
                xml_catalog = self._nos_iso_xml_url(nosdir)
                page = f_utils.Fetch(xml_catalog).fetch_html()
                
            if page is None:
                utils.echo_error_msg('failed to retrieve {}'.format(nosdir))
                break
            
            rows = page.xpath('//a[contains(@href, ".xml")]/@href')
            if self.verbose:
                _prog = utils.CliProgress('scanning {} surveys in {}...'.format(len(rows), nosdir))

            for i, survey in enumerate(rows):
                if self.callback():
                    break
                
                sid = survey[:-4]
                if self.verbose:
                    _prog.update_perc((i, len(rows)))
                    
                self.FRED._attribute_filter(["ID = '{}'".format(sid)])
                if self.FRED.layer is None or len(self.FRED.layer) == 0:
                    this_xml = f_utils.iso_xml(xml_catalog + survey)
                    h_epsg, v_epsg = this_xml.reference_system()
                    this_data = this_xml.data_links()
                    d_links = []
                    d_types = []

                    for key in this_data.keys():
                        if key in ['GEODAS_XYZ', 'BAG', 'GRID_BAG']:
                            d_links.append(this_data[key])
                            d_types.append(key)

                    geom = this_xml.bounds(geom=True)
                    if geom is not None:
                        surveys.append({'Name': this_xml.title(), 'ID': sid, 'Agency': 'NOAA/NOS', 'Date': this_xml.date(),
                                        'MetadataLink': this_xml.url, 'MetadataDate': this_xml.xml_date(), 'DataLink': ','.join([','.join(x) for x in d_links]),
                                        'DataType': ','.join(list(set(d_types))), 'DataSource': 'nos', 'HorizontalDatum': h_epsg,
                                        'VerticalDatum': v_epsg, 'Info': this_xml.abstract(), 'geom': geom})
                        
            if self.verbose:
                _prog.end(0, 'scanned {} surveys in {}.'.format(len(rows), nosdir))
                utils.echo_msg('added {} surveys from {}'.format(len(surveys), nosdir))
                
            self.FRED._add_surveys(surveys)
        self.FRED._close_ds()
Exemplo n.º 2
0
    def yield_xyz(self, entry):
        src_dc = os.path.basename(entry[1])
        src_ext = src_dc.split('.')[-1].lower()
        if src_ext == 'laz' or src_ext == 'las': dt = 'lidar'
        elif src_ext == 'tif' or src_ext == 'img': dt = 'raster'
        else: dt = None
        if dt == 'lidar':
            if f_utils.Fetch(entry[0],
                             callback=self.callback,
                             verbose=self.verbose).fetch_file(src_dc) == 0:
                _ds = datasets.LASFile(
                    fn=src_dc,
                    data_format=400,
                    dst_srs=self.dst_srs,
                    #name=src_dc,
                    src_region=self.region,
                    verbose=self.verbose,
                    remote=True)
                if self.inc is not None:
                    b_region = regions.regions_reduce(
                        self.region,
                        regions.Region().from_list(_ds.infos['minmax']))
                    xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True)
                    for xyz in utils.yield_cmd(
                            'gmt blockmedian -I{:.10f} {} -r -V'.format(
                                self.inc, b_region.format('gmt')),
                            verbose=self.verbose,
                            data_fun=xyz_func):
                        yield (xyzfun.XYZPoint().from_list(
                            [float(x) for x in xyz.split()]))

                else:
                    for xyz in _ds.yield_xyz():
                        yield (xyz)
                utils.remove_glob('{}*'.format(src_dc))
        elif dt == 'raster':
            if f_utils.Fetch(entry[0],
                             callback=self.callback,
                             verbose=self.verbose).fetch_file(src_dc) == 0:
                _ds = datasets.RasterFile(
                    fn=src_dc,
                    data_format=200,
                    dst_srs=self.dst_srs,
                    #src_srs=None,
                    name=src_dc,
                    src_region=self.region,
                    verbose=self.verbose)
                for xyz in _ds.block_xyz(
                        inc=self.inc, want_gmt=True
                ) if self.inc is not None else _ds.yield_xyz():
                    yield (xyz)
                utils.remove_glob('{}.*'.format(src_dc))
Exemplo n.º 3
0
    def run(self):
        '''Run the TIDES fetching module'''

        if self.region is None:
            return ([])

        _data = {
            'outFields': '*',
            'units': 'esriSRUnit_Meter',
            'geometry': self.region.format('bbox'),
            'inSR': 4326,
            'outSR': 4326,
            'f': 'pjson',
        }
        _req = f_utils.Fetch(self._stations_api_url,
                             verbose=self.verbose).fetch_req(params=_data)
        if _req is not None:
            self.results.append([
                _req.url,
                os.path.join(
                    self._outdir,
                    'tides_results_{}.json'.format(self.region.format('fn'))),
                'tides'
            ])

        return (self)
Exemplo n.º 4
0
    def update(self):
        self.FRED._open_ds()
        v_zip = os.path.basename(self._hrdem_footprints_url)
        status = f_utils.Fetch(self._hrdem_footprints_url,
                               verbose=self.verbose).fetch_ftp_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v
        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else:
                shp_region = this_region
        geom = shp_region.export_as_geom()

        self.FRED._attribute_filter(["ID = '{}'".format('HRDEM-1')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name='High-Resolution DEM (Canada)',
                                  ID='HRDEM-1',
                                  Agency='NRCAN',
                                  Date=utils.this_year(),
                                  MetadataLink=self._hrdem_info_url,
                                  MetadataDate=utils.this_year(),
                                  DataLink=self._hrdem_footprints_url,
                                  IndexLink=self._hrdem_footprints_url,
                                  DataType='raster',
                                  DataSource='hrdem',
                                  Info='Canada Only',
                                  geom=geom)
        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
Exemplo n.º 5
0
    def run(self):
        '''Run the MGDS fetching module'''

        if self.region is None:
            return([])
        
        self.data = {
            'north':self.region.ymax,
            'west':self.region.xmin,
            'south':self.region.ymin,
            'east':self.region.xmax,
            'format':'summary',
            'data_type':'Bathymetry',
        }

        req = f_utils.Fetch(self._mgds_file_url).fetch_req(
            params=self.data, tries=10, timeout=2
        )
        if req is not None:
            req_xml = lxml.etree.fromstring(req.content)
            req_results = req_xml.findall('.//{https://www.marine-geo.org/services/xml/mgdsDataService}file')

            for req_result in req_results:
                name = req_result.attrib['name']
                link = req_result.attrib['download']
                self.results.append([link, os.path.join(self._outdir, name), 'mgds'])            
                
        return(self)
Exemplo n.º 6
0
    def yield_xyz(self, entry):
        """process ngs monuments"""

        src_data = 'ngs_tmp.json'
        src_csv = 'ngs_tmp.csv'
        if f_utils.Fetch(entry[0],
                         callback=self.callback,
                         verbose=self.verbose).fetch_file(src_data) == 0:
            with open(src_data, 'r') as json_file:
                r = json.load(json_file)

            if len(r) > 0:
                for row in r:
                    z = utils.float_or(row[self.datum])
                    if z is not None:
                        xyz = xyzfun.XYZPoint(src_srs='epsg:4326').from_list(
                            [float(row['lon']),
                             float(row['lat']), z])
                        if self.dst_srs is not None:
                            xyz.warp(dst_srs=self.dst_srs)

                        yield (xyz)

        else:
            utils.echo_error_msg(
                'failed to fetch remote file, {}...'.format(src_data))

        utils.remove_glob('{}*'.format(src_data))
Exemplo n.º 7
0
    def yield_xyz(self, entry):
        src_data = 'mar_grav_tmp.xyz'
        if f_utils.Fetch(entry[0],
                         callback=self.callback,
                         verbose=self.verbose,
                         verify=False).fetch_file(src_data) == 0:
            _ds = datasets.XYZFile(
                fn=src_data,
                data_format=168,
                skip=1,
                x_offset=-360,
                src_srs='epsg:4326',
                dst_srs=self.dst_srs,
                #name=src_data,
                src_region=self.region,
                verbose=self.verbose,
                remote=True)
            for xyz in _ds.yield_xyz():
                yield (xyz)

        else:
            utils.echo_error_msg(
                'failed to fetch remote file, {}...'.format(src_data))

        utils.remove_glob('{}*'.format(src_data))
Exemplo n.º 8
0
    def run(self):
        if self.region is None:
            return ([])

        _data = {
            'where': self.where,
            'outFields': '*',
            'geometry': self.region.format('bbox'),
            'inSR': 4326,
            'outSR': 4326,
            'f': 'pjson',
            'returnGeometry': 'False',
        }
        _req = f_utils.Fetch(self._usiei_query_url,
                             verbose=self.verbose).fetch_req(params=_data)
        if _req is not None:
            print(_req.text)
            features = _req.json()
            # for feature in features['features']:
            #     links = json.loads(feature['attributes']['ExternalProviderLink'])
            #     for link in links['links']:
            #         if link['serviceID'] == 46:
            #             urllist = 'urllist' + str(feature['attributes']['ID']) + '.txt'
            #             index_zipfile = 'tileindex.zip'
            #             index_zipurl = link['link'] + '/' + index_zipfile
            #             if f_utils.Fetch(link['link'] + '/' + urllist, verbose=True).fetch_file(urllist) == 0:
            #                 with open(urllist, 'r') as ul:
            #                     for line in ul:
            #                         if 'tileindex' in line:
            #                             index_zipurl = line.strip()
            #                             break

            #                 utils.remove_glob(urllist)

        return (self)
Exemplo n.º 9
0
    def run(self):
        '''Run the TNM (National Map) fetching module.'''

        if self.region is None: return ([])
        _req = f_utils.Fetch(self._tnm_dataset_url).fetch_req()
        if _req is not None:
            try:
                self._datasets = _req.json()
            except Exception as e:
                utils.echo_error_msg('try again, {}'.format(e))
                self.status = -1
        else:
            self.status = -1

        if self.status == 0:
            if self.index:
                self.print_dataset_index()
                sys.exit()

            this_ds = [int(self.ds)]
            if self.sub_ds is not None: this_ds.append(int(self.sub_ds))
            self._tnm_ds = [this_ds]
            self._tnm_df = [] if self.formats is None else [
                x.strip() for x in self.formats.split(',')
            ]
            self._extents = [] if self.extent is None else [
                x.strip() for x in self.extent.split(',')
            ]
            self.filter_datasets(self.q)
        return (self)
Exemplo n.º 10
0
    def run(self):
        '''Run the GLOBALELUS fetching module'''

        if self.region is None: return ([])
        _data = {
            'bbox': self.region.format('bbox'),
            'bboxSR': 4326,
            'imageSR': 4326,
            'format': 'png',
            'layers': 'visible:4',
            'f': 'pjson',
        }
        _req = f_utils.Fetch(self._usace_gs_api_url).fetch_req(params=_data)
        print(_req.url)
        if _req is not None:
            survey_list = _req.json()
            print(survey_list)
            fetch_fn = survey_list['href']
            self.results.append([
                fetch_fn,
                os.path.join(self._outdir,
                             fetch_fn.split('/')[-1]), 'globalelus'
            ])

        return (self)
Exemplo n.º 11
0
    def yield_xyz(self, entry):
        src_data = 'gmrt_tmp.tif'
        if f_utils.Fetch(entry[0],
                         callback=self.callback,
                         verbose=self.verbose).fetch_file(src_data) == 0:
            gmrt_ds = datasets.RasterFile(
                fn=src_data,
                data_format=200,
                src_srs='epsg:4326',
                dst_srs=self.dst_srs,
                weight=.25,
                #name=src_data,
                src_region=self.region,
                verbose=self.verbose)
            if self.bathy_only:
                for xyz in gmrt_ds.yield_xyz():
                    if xyz.z < 0:
                        yield (xyz)
            else:
                for xyz in gmrt_ds.yield_xyz():
                    yield (xyz)

        else:
            utils.echo_error_msg(
                'failed to fetch remote file, {}...'.format(src_data))

        utils.remove_glob('{}*'.format(src_data))
Exemplo n.º 12
0
    def update(self):
        self.FRED._open_ds()        
        v_zip = os.path.basename(self._arctic_dem_index_url)
        status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])

        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v

        utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True)
        utils.remove_glob(v_zip, *v_shps)
        v_shp = 'arctic_tmp.shp'
        v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj']
        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            #this_region.src_srs = 'epsg:3413'
            #this_region.warp('epsg:4326')
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else: shp_region = this_region
        geom = shp_region.export_as_geom()
        
        self.FRED._attribute_filter(["ID = '{}'".format('ARCTICDEM-1')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name = 'ArcticDEM', ID = 'ARCTICDEM-1', Agency = 'UMN', Date = utils.this_year(),
                                  MetadataLink = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/', MetadataDate = utils.this_year(),
                                  DataLink = self._arctic_dem_index_url, IndexLink = self._arctic_dem_index_url,
                                  DataType = 'raster', DataSource = 'arcticdem', Info = 'Arctic Only', geom = geom)
        utils.remove_glob(*v_shps)
        self.FRED._close_ds()
Exemplo n.º 13
0
    def yield_xyz(self, entry):
        src_ncei = os.path.basename(entry[1])
        #try:
        #    src_ds = gdal.Open(entry[0])
        #    src_dc = entry[0]
        #except Exception as e:
        f_utils.Fetch(entry[0], callback=self.callback,
                      verbose=self.verbose).fetch_file(src_ncei)
        try:
            src_ds = gdal.Open(src_ncei)
        except Exception as e:
            utils.echo_error_msg(
                'could not read ncei raster file: {}, {}'.format(entry[0], e))
            src_ds = None
        #except Exception as e:
        #    utils.echo_error_msg('could not read ncei raster file: {}, {}'.format(entry[0], e))
        #    src_ds = None

        if src_ds is not None:

            _ds = datasets.RasterFile(
                fn=src_ncei,
                data_format=200,
                src_srs='epsg:4326',
                dst_srs=self.dst_srs,
                #name=src_ncei,
                src_region=self.region,
                verbose=self.verbose)
            _ds.src_ds = src_ds
            _ds.ds_open_p = True
            for xyz in _ds.yield_xyz():
                yield (xyz)

        src_ds = None
        utils.remove_glob(src_ncei)
Exemplo n.º 14
0
    def _update_all(self):
        self.FRED._open_ds(1)
        v_zip = os.path.basename(self._hrdem_footprints_url)
        status = f_utils.Fetch(self._hrdem_footprints_url,
                               verbose=self.verbose).fetch_ftp_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v
        try:
            v_ds = ogr.Open(v_shp)
        except:
            v_ds = None
            status = -1
        if v_ds is not None:
            layer = v_ds.GetLayer()
            fcount = layer.GetFeatureCount()
            if self.verbose:
                _prog = utils.CliProgress(
                    'scanning {} datasets...'.format(fcount))
            for f in range(0, fcount):
                feature = layer[f]
                name = feature.GetField('Tile_name')
                if self.verbose:
                    _prog.update_perc((f, fcount))
                try:
                    self.FRED.layer.SetAttributeFilter(
                        "Name = '{}'".format(name))
                except:
                    pass
                if self.FRED.layer is None or len(self.FRED.layer) == 0:
                    data_link = feature.GetField('Ftp_dtm')
                    if data_link is not None:
                        geom = feature.GetGeometryRef()
                        self.FRED._add_survey(
                            Name=name,
                            ID=feature.GetField('Project'),
                            Agency='NRCAN',
                            Date=utils.this_year(),
                            MetadataLink=feature.GetField('Meta_dtm'),
                            MetadataDate=utils.this_year(),
                            DataLink=data_link.replace('http', 'ftp'),
                            IndexLink=self._hrdem_footprints_url,
                            DataType='raster',
                            DataSource='hrdem',
                            HorizontalDatum=feature.GetField(
                                'Coord_Sys').split(':')[-1],
                            Info=feature.GetField('Provider'),
                            geom=geom)

            if self.verbose:
                _prog.end('scanned {} datasets.'.format(fcount))
        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
Exemplo n.º 15
0
def search_proj_cdn(region,
                    epsg=None,
                    crs_name=None,
                    name=None,
                    verbose=True,
                    cache_dir='./'):
    """Search PROJ CDN for transformation grids:
    the PROJ CDN holds transformation grids from around the
    world, including global transformations such as EGM
    """

    _proj_vdatum_index = 'https://cdn.proj.org/files.geojson'
    cdn_index = os.path.join(cache_dir, 'proj_cdn_files.geojson')
    if f_utils.Fetch(_proj_vdatum_index,
                     verbose=verbose).fetch_file(cdn_index) == 0:
        cdn_driver = ogr.GetDriverByName('GeoJSON')
        cdn_ds = cdn_driver.Open(cdn_index, 0)
        cdn_layer = cdn_ds.GetLayer()
        _boundsGeom = region.export_as_geom()
        _results = []

        if crs_name is not None:
            cdn_layer.SetAttributeFilter(
                "type != 'HORIZONTAL_OFFSET' AND (target_crs_name LIKE '%{}%' OR source_crs_name LIKE '%{}%')"
                .format(name.upper(), name.upper()))
        elif epsg is not None:
            cdn_layer.SetAttributeFilter(
                "type != 'HORIZONTAL_OFFSET' AND (target_crs_code LIKE '%{}%' OR source_crs_code LIKE '%{}%')"
                .format(epsg, epsg))
        elif name is not None:
            cdn_layer.SetAttributeFilter(
                "type != 'HORIZONTAL_OFFSET' AND name LIKE '%{}%'".format(
                    name))
        else:
            cdn_layer.SetAttributeFilter("type != 'HORIZONTAL_OFFSET'")

        for feat in cdn_layer:
            if _boundsGeom is not None:
                geom = feat.GetGeometryRef()
                if geom is not None:
                    if _boundsGeom.Intersects(geom):
                        _results.append({})
                        f_j = json.loads(feat.ExportToJson())
                        for key in f_j['properties'].keys():
                            _results[-1][key] = feat.GetField(key)
            else:
                _results.append({})
                f_j = json.loads(feat.ExportToJson())
                for key in f_j['properties'].keys():
                    _results[-1][key] = feat.GetField(key)

        cdn_ds = None
        #utils.remove_glob(cdn_index)
        return (_results)
Exemplo n.º 16
0
 def parse_entry_inf(self, entry, keep_inf=False):
     src_data = os.path.basename(entry[1])
     src_mb = src_data[:-4]
     survey = entry[0].split('/')[7]
     if f_utils.Fetch('{}.inf'.format(entry[0][:-4]), callback=self.callback, verbose=False).fetch_file('{}.inf'.format(src_mb)) == 0:
         mb_fmt = self.mb_inf_data_format('{}.inf'.format(src_mb))
         mb_date = self.mb_inf_data_date('{}.inf'.format(src_mb))
         mb_perc = self.mb_inf_perc_good('{}.inf'.format(src_mb))
         if not keep_inf:
             utils.remove_glob('{}.inf'.format(src_mb))
         return(survey, src_data, mb_fmt, mb_perc, mb_date)
Exemplo n.º 17
0
    def run(self):
        if self.region is None:
            return([])

        _data = {
            'where': self.where,
            'outFields': '*',
            'geometry': self.region.format('bbox'),
            'inSR':4326,
            'outSR':4326,
            'f':'pjson',
            'returnGeometry':'False',
        }
        _req = f_utils.Fetch(self._nos_query_url, verbose=self.verbose).fetch_req(params=_data)
        if _req is not None:
            features = _req.json()
            for feature in features['features']:
                if self.index:
                    print(json.dumps(feature['attributes'], indent=4))
                else:
                    ID = feature['attributes']['SURVEY_ID']
                    link = feature['attributes']['DOWNLOAD_URL']
                    nos_dir = link.split('/')[-2]
                    data_link = '{}{}/{}/'.format(self._nos_data_url, nos_dir, ID)

                    if self.datatype is None or 'bag' in self.datatype.lower():
                        if feature['attributes']['BAGS_EXIST'] == 'TRUE':
                            page = f_utils.Fetch(data_link + 'BAG').fetch_html()
                            bags = page.xpath('//a[contains(@href, ".bag")]/@href')
                            [self.results.append(['{0}BAG/{1}'.format(data_link, bag), os.path.join(self._outdir, bag), 'bag']) for bag in bags]

                    if self.datatype is None or 'xyz' in self.datatype.lower():
                        page = f_utils.Fetch(data_link).fetch_html()
                        geodas = page.xpath('//a[contains(@href, "GEODAS")]/@href')
                        if geodas:
                            xyz_link = data_link + 'GEODAS/{0}.xyz.gz'.format(ID)
                            self.results.append([xyz_link, os.path.join(self._outdir, xyz_link.split('/')[-1]), 'xyz'])                
Exemplo n.º 18
0
    def run(self):
        if self.region is None:
            return([])

        _data = {
            'where': self.where,
            'outFields': '*',
            'geometry': self.region.format('bbox'),
            'inSR':4326,
            'outSR':4326,
            'f':'pjson',
            'returnGeometry':'False',
        }
        _req = f_utils.Fetch(self._charts_query_url, verbose=self.verbose).fetch_req(params=_data)
        if _req is not None:
            print(_req.text)
Exemplo n.º 19
0
    def yield_xyz(self, entry):
        src_nos = os.path.basename(entry[1])
        dt = None
        if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_nos) == 0:
            dt = self._data_type(src_nos)
            
            if dt == 'geodas_xyz':
                nos_fns = utils.p_unzip(src_nos, ['xyz', 'dat'])
                for nos_f_r in nos_fns:
                    _ds = datasets.XYZFile(
                        fn=nos_f_r,
                        data_format=168,
                        skip=1,
                        xpos=2,
                        ypos=1,
                        zpos=3,
                        z_scale=-1,
                        src_srs='epsg:4326',
                        dst_srs=self.dst_srs,
                        name=nos_f_r,
                        src_region=self.region,
                        verbose=self.verbose,
                        remote=True
                    )
                    for xyz in _ds.yield_xyz():
                        yield(xyz)
                        
                utils.remove_glob(*nos_fns, *[x+'.inf' for x in nos_fns])

            elif dt == 'grid_bag':
                src_bags = utils.p_unzip(src_nos, exts=['bag'])
                for src_bag in src_bags:

                    _ds = datasets.RasterFile(
                        fn=src_bag,
                        data_format=200,
                        dst_srs=self.dst_srs,
                        name=src_bag,
                        src_region=self.region,
                        verbose=self.verbose
                    )
                    for xyz in _ds.yield_xyz():
                        yield(xyz)
                        
                utils.remove_glob(*src_bags)
        utils.remove_glob(src_nos)
Exemplo n.º 20
0
    def run(self):
        '''Run the GMRT fetching module'''

        if self.region is None:
            return ([])

        self.data = {
            'north': self.region.ymax,
            'west': self.region.xmin,
            'south': self.region.ymin,
            'east': self.region.xmax,
            'mformat': 'json',
            'resolution': self.res,
            'format': self.fmt,
        }

        req = f_utils.Fetch(self._gmrt_grid_urls_url).fetch_req(
            params=self.data, tries=10, timeout=2)
        if req is not None:
            try:
                gmrt_urls = req.json()
            except Exception as e:
                utils.echo_error_msg(e)
                gmrt_urls = []

            for url in gmrt_urls:
                if self.layer == 'topo-mask':
                    url = url.replace('topo', 'topo-mask')

                opts = {}
                for url_opt in url.split('?')[1].split('&'):
                    opt_kp = url_opt.split('=')
                    opts[opt_kp[0]] = opt_kp[1]

                url_region = regions.Region().from_list([
                    float(opts['west']),
                    float(opts['east']),
                    float(opts['south']),
                    float(opts['north'])
                ])
                outf = 'gmrt_{}_{}.tif'.format(opts['layer'],
                                               url_region.format('fn'))
                self.results.append(
                    [url, os.path.join(self._outdir, outf), 'gmrt'])

        return (self)
Exemplo n.º 21
0
    def yield_xyz(self, entry):
        """ENC data comes as a .000 file in a zip.

The data is referenced to MHW and is represente as a depth.
In U.S. waters, MHW can be transformed to MSL or the local GEOID using
VDatum and/or it's associated grids (mhw.gtx or tss.gtx)
"""

        ## create the tidal transformation grid from mhw to geoid
        src_zip = os.path.basename(entry[1])
        if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_zip) == 0:
            if entry[2].lower() == 'enc':
                src_encs = utils.p_unzip(src_zip, ['000'])
                for src_ch in src_encs:
                    dst_xyz = src_ch.split('.')[0] + '.xyz'
                    try:
                        ds_ogr = ogr.Open(src_ch)
                        layer_s = ds_ogr.GetLayerByName('SOUNDG')
                        if layer_s is not None:
                            with open(dst_xyz, 'w') as o_xyz:
                                for f in layer_s:
                                    g = json.loads(f.GetGeometryRef().ExportToJson())
                                    for xyz in g['coordinates']:
                                        xyzfun.XYZPoint().from_list([float(x) for x in xyz]).dump(dst_port=o_xyz, encode=False)
                        ds_ogr = layer_s = None
                    except:
                        utils.echo_warning_msg('could not parse {}'.format(src_ch))

                    _ds = datasets.XYZFile(
                        fn=dst_xyz,
                        data_format=168,
                        z_scale=-1,
                        src_srs='epsg:4326',
                        #src_srs='+proj=longlat +datum=WGS84 +geoidgrids=./{}'.format(vdatum_grid),
                        dst_srs=self.dst_srs,
                        #name=dst_xyz,
                        src_region=self.region,
                        verbose=self.verbose,
                        remote=True
                    )
                    for xyz in _ds.yield_xyz():
                        yield(xyz)

                utils.remove_glob(dst_xyz, o_xyz, *src_encs)
        utils.remove_glob(src_zip)
Exemplo n.º 22
0
 def yield_xyz(self, entry):
     src_emodnet = 'emodnet_tmp.tif'
     if f_utils.Fetch(entry[0],
                      callback=self.callback,
                      verbose=self.verbose).fetch_file(src_emodnet) == 0:
         _ds = datasets.RasterFile(fn=src_emodnet,
                                   data_format=200,
                                   src_srs='epsg:4326',
                                   dst_srs=self.dst_srs,
                                   name=src_emodnet,
                                   src_region=self.region,
                                   verbose=self.verbose)
         for xyz in _ds.yield_xyz():
             yield (xyz)
     else:
         utils.echo_error_msg(
             'failed to fetch remote file, {}...'.format(src_emodnet))
     utils.remove_glob(src_emodnet)
Exemplo n.º 23
0
 def yield_xyz(self, entry):
     """yield the xyz data from the nasadem fetch module"""
     
     if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, headers=self.headers).fetch_file(entry[1]) == 0:
         _ds = datasets.RasterFile(
             fn=entry[1],
             data_format=200,
             src_srs='epsg:4326',
             dst_srs=self.dst_srs,
             #name=entry[1],
             src_region=self.region,
             verbose=self.verbose
         )
         for xyz in _ds.yield_xyz():
             if xyz.z != 0:
                 yield(xyz)
                 
     utils.remove_glob(entry[1])
Exemplo n.º 24
0
    def run(self):
        """Run the EMODNET fetching module"""

        if self.region is None: return ([])

        _data = {
            'request': 'DescribeCoverage',
            'version': '2.0.1',
            'CoverageID': 'emodnet:mean',
            'service': 'WCS',
        }
        _req = f_utils.Fetch(self._emodnet_grid_url).fetch_req(params=_data)
        _results = lxml.etree.fromstring(_req.text.encode('utf-8'))
        g_env = _results.findall(
            './/{http://www.opengis.net/gml/3.2}GridEnvelope',
            namespaces=f_utils.namespaces)[0]
        hl = [
            float(x) for x in g_env.find(
                '{http://www.opengis.net/gml/3.2}high').text.split()
        ]

        g_bbox = _results.findall(
            './/{http://www.opengis.net/gml/3.2}Envelope')[0]
        lc = [
            float(x) for x in g_bbox.find(
                '{http://www.opengis.net/gml/3.2}lowerCorner').text.split()
        ]
        uc = [
            float(x) for x in g_bbox.find(
                '{http://www.opengis.net/gml/3.2}upperCorner').text.split()
        ]

        ds_region = regions.Region().from_list([lc[1], uc[1], lc[0], uc[0]])
        resx = (uc[1] - lc[1]) / hl[0]
        resy = (uc[0] - lc[0]) / hl[1]
        if regions.regions_intersect_ogr_p(self.region, ds_region):
            emodnet_wcs = '{}service=WCS&request=GetCoverage&version=1.0.0&Identifier=emodnet:mean&coverage=emodnet:mean&format=GeoTIFF&bbox={}&resx={}&resy={}&crs=EPSG:4326'\
                                      .format(self._emodnet_grid_url, self.region.format('bbox'), resx, resy)
            outf = 'emodnet_{}.tif'.format(self.region.format('fn'))
            self.results.append(
                [emodnet_wcs,
                 os.path.join(self._outdir, outf), 'emodnet'])
        return (self)
Exemplo n.º 25
0
    def update(self):
        """Crawl the COP30 database and update/generate the NASADEM reference vector."""
        
        self.FRED._open_ds(1)
        surveys = []                    
        f = f_utils.Fetch(self.nasadem_vrt_url, headers=self.headers, verbose=True)
        page = f.fetch_xml()
        fns = page.findall('.//SourceFilename')

        if self.verbose:
            _prog = utils.CliProgress('scanning {} tiles in {}...'.format(len(fns), self.nasadem_url))
        
        for i, fn in enumerate(fns):
            sid = fn.text.split('/')[-1].split('.')[0]
            if self.verbose:
                _prog.update_perc((i, len(fns)))
                
            self.FRED._attribute_filter(["ID = '{}'".format(sid)])
            if self.FRED.layer is None or len(self.FRED.layer) == 0:
                spat = fn.text.split('_HGT_')[-1].split('.')[0]
                xsplit = 'e' if 'e' in spat else 'w'
                ysplit = 's' if 's' in spat else 'n'
                x = int(spat.split(xsplit)[-1])
                y = int(spat.split(xsplit)[0].split(ysplit)[-1])

                if xsplit == 'w':
                    x = x * -1
                if ysplit == 's':
                    y = y * -1

                this_region = regions.Region().from_list([x, x + 1, y, y + 1])
                geom = this_region.export_as_geom()
                if geom is not None:
                    surveys.append({'Name': fn.text.split('.')[0].split('/')[-1], 'ID': sid, 'Agency': 'NASA', 'Date': utils.this_date(),
                                    'MetadataLink': '', 'MetadataDate': utils.this_date(), 'DataLink': self.nasadem_url + fn.text.split('/')[-1] + '?token=',
                                    'DataType': '1', 'DataSource': 'nasadem', 'HorizontalDatum': 4326, 'Etcetra': self.nasadem_rurl,
                                    'VerticalDatum': 'msl', 'Info': '', 'geom': geom})

        if self.verbose:
            _prog.end(0, 'scanned {} tiles in {}.'.format(len(fns), self.nasadem_url))
            utils.echo_msg('added {} NASADEM DEM tiles'.format(len(surveys)))
        self.FRED._add_surveys(surveys)
        self.FRED._close_ds()
Exemplo n.º 26
0
    def yield_xyz(self, entry):
        """process stations"""

        src_data = 'tides_tmp.json'
        src_csv = 'tides_tmp.csv'
        ln = 0

        if f_utils.Fetch(entry[0],
                         callback=self.callback,
                         verbose=self.verbose).fetch_file(src_data) == 0:
            with open(src_data, 'r') as json_file:
                r = json.load(json_file)

            if len(r) > 0:
                for feature in r['features']:
                    if self.station_id is not None:
                        if self.station_id != feature['attributes']['id']:
                            continue
                    lon = feature['attributes']['longitude']
                    lat = feature['attributes']['latitude']
                    z = feature['attributes'][
                        self.s_datum] - feature['attributes'][self.t_datum]
                    if self.units == 'm':
                        z = z * 0.3048

                    xyz = xyzfun.XYZPoint(src_srs='epsg:4326').from_list(
                        [lon, lat, z])
                    if self.dst_srs is not None:
                        xyz.warp(dst_srs=self.dst_srs)

                    ln += 1
                    yield (xyz)

        else:
            utils.echo_error_msg(
                'failed to fetch remote file, {}...'.format(src_data))

        if self.verbose:
            utils.echo_msg('parsed {} data records from {}'.format(
                ln, src_data))

        utils.remove_glob('{}*'.format(src_data))
Exemplo n.º 27
0
    def run(self):
        '''Run the SRTM fetching module.'''

        if self.region is None: return ([])

        self.data = {
            'north': self.region.ymax,
            'west': self.region.xmin,
            'south': self.region.ymin,
            'east': self.region.xmax,
        }

        _req = f_utils.Fetch(self._srtm_url,
                             verify=False).fetch_req(params=self.data)
        if _req is not None:
            outf = 'srtm_{}.xyz'.format(self.region.format('fn'))
            self.results.append(
                [_req.url, os.path.join(self._outdir, outf), 'srtm'])

        return (self)
Exemplo n.º 28
0
    def run(self):
        if self.region is None:
            return([])

        _data = {
            'where': self.where,
            'outFields': '*',
            'geometry': self.region.format('bbox'),
            'inSR':4326,
            'outSR':4326,
            'f':'pjson',
            'returnGeometry':'False',
        }
        _req = f_utils.Fetch(self._trackline_query_url, verbose=self.verbose).fetch_req(params=_data)
        if _req is not None:
            features = _req.json()
            ids = []
            for feature in features['features']:
                ids.append(feature['attributes']['SURVEY_ID'])

            print('http://www.ngdc.noaa.gov/trackline/request/?surveyIds={}'.format(','.join(ids)))
Exemplo n.º 29
0
    def yield_xyz(self, entry):

        ## use globathy.py to get lake depths and return those
        #pass

        if f_utils.Fetch(entry[0],
                         callback=self.callback,
                         verbose=self.verbose,
                         headers=self.headers).fetch_file(entry[1]) == 0:
            v_shp = None
            v_zip = entry[1]
            v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
            for v in v_shps:
                if v.split('.')[-1] == 'shp':
                    v_shp = v
                    break

            if v_shp is not None:
                r_shp = self.extract_region(v_shp)

            utils.remove_glob(v_zip, *v_shps)
Exemplo n.º 30
0
 def _datasets(self, dataset=None):
     _req = f_utils.Fetch(self._tnm_dataset_url).fetch_req()
     if _req is not None and _req.status_code == 200:
         try:
             _datasets = _req.json()
             if dataset is not None:
                 for ds in _datasets:
                     tags = ds['tags']
                     if len(tags) > 0:
                         for t in tags:
                             if dataset == t['sbDatasetTag']:
                                 _datasets = t
                                 break
                     else:
                         if dataset == ds['sbDatasetTag']:
                             _datasets = ds
                             break
         except Exception as e:
             utils.echo_error_msg('try again, {}'.format(e))
     else:
         _datasets = None
     return (_datasets)