Ejemplo n.º 1
0
    def update(self):
        self.FRED._open_ds()
        v_zip = os.path.basename(self._hrdem_footprints_url)
        status = f_utils.Fetch(self._hrdem_footprints_url,
                               verbose=self.verbose).fetch_ftp_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v
        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else:
                shp_region = this_region
        geom = shp_region.export_as_geom()

        self.FRED._attribute_filter(["ID = '{}'".format('HRDEM-1')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name='High-Resolution DEM (Canada)',
                                  ID='HRDEM-1',
                                  Agency='NRCAN',
                                  Date=utils.this_year(),
                                  MetadataLink=self._hrdem_info_url,
                                  MetadataDate=utils.this_year(),
                                  DataLink=self._hrdem_footprints_url,
                                  IndexLink=self._hrdem_footprints_url,
                                  DataType='raster',
                                  DataSource='hrdem',
                                  Info='Canada Only',
                                  geom=geom)
        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
Ejemplo n.º 2
0
    def _update_all(self):
        self.FRED._open_ds(1)
        v_zip = os.path.basename(self._hrdem_footprints_url)
        status = f_utils.Fetch(self._hrdem_footprints_url,
                               verbose=self.verbose).fetch_ftp_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v
        try:
            v_ds = ogr.Open(v_shp)
        except:
            v_ds = None
            status = -1
        if v_ds is not None:
            layer = v_ds.GetLayer()
            fcount = layer.GetFeatureCount()
            if self.verbose:
                _prog = utils.CliProgress(
                    'scanning {} datasets...'.format(fcount))
            for f in range(0, fcount):
                feature = layer[f]
                name = feature.GetField('Tile_name')
                if self.verbose:
                    _prog.update_perc((f, fcount))
                try:
                    self.FRED.layer.SetAttributeFilter(
                        "Name = '{}'".format(name))
                except:
                    pass
                if self.FRED.layer is None or len(self.FRED.layer) == 0:
                    data_link = feature.GetField('Ftp_dtm')
                    if data_link is not None:
                        geom = feature.GetGeometryRef()
                        self.FRED._add_survey(
                            Name=name,
                            ID=feature.GetField('Project'),
                            Agency='NRCAN',
                            Date=utils.this_year(),
                            MetadataLink=feature.GetField('Meta_dtm'),
                            MetadataDate=utils.this_year(),
                            DataLink=data_link.replace('http', 'ftp'),
                            IndexLink=self._hrdem_footprints_url,
                            DataType='raster',
                            DataSource='hrdem',
                            HorizontalDatum=feature.GetField(
                                'Coord_Sys').split(':')[-1],
                            Info=feature.GetField('Provider'),
                            geom=geom)

            if self.verbose:
                _prog.end('scanned {} datasets.'.format(fcount))
        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
Ejemplo n.º 3
0
def write_datalist(data_list, outname=None):
    if outname is None:
        outname = '{}_{}'.format(self.metadata['name'], utils.this_year())

    if os.path.exists('{}.datalist'.format(outname)):
        utils.remove_glob('{}.datalist*'.format(outname))

    with open('{}.datalist'.format(outname), 'w') as tmp_dl:
        [tmp_dl.write('{}\n'.format(x.format_entry())) for x in data_list]

    return ('{}.datalist'.format(outname))
Ejemplo n.º 4
0
    def update(self):
        self.FRED._open_ds()
        v_shp = None
        v_zip = os.path.basename(self._hydrolakes_poly_zip)
        status = f_utils.Fetch(self._hydrolakes_poly_zip,
                               verbose=self.verbose).fetch_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])
        for v in v_shps:
            if '.shp' in v:
                v_shp = v

        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else:
                shp_region = this_region

        geom = shp_region.export_as_geom()
        self.FRED._attribute_filter(["ID = '{}'".format('HYDROLAKES')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name='HydorLakes',
                                  ID='HYDROLAKES',
                                  Agency='HydroSheds',
                                  Date=utils.this_year(),
                                  MetadataLink=self._hydrolakes_prods,
                                  MetadataDate=utils.this_year(),
                                  DataLink=self._hydrolakes_poly_zip,
                                  IndexLink=self._hydrolakes_poly_zip,
                                  DataType='vector',
                                  DataSource='hydrolakes',
                                  Info='World-wide lakes',
                                  geom=geom)

        utils.remove_glob(v_zip, *v_shps)
        self.FRED._close_ds()
Ejemplo n.º 5
0
 def _update_dataset(self, ds, fmt, geom, h_epsg, v_epsg):
     self.FRED._attribute_filter(["ID = '{}'".format(ds['id'])])
     if self.FRED.layer is None or len(self.FRED.layer) == 0:
         if 'IMG' in fmt or 'TIFF' in fmt:
             datatype = 'raster'
         elif 'LAS' in fmt or 'LAZ' in fmt:
             datatype = 'lidar'
         else:
             datatype = 'tnm'
         url_enc = f_utils.urlencode({'datasets': ds['sbDatasetTag']})
         try:
             pubDate = ds['lastPublishedDate']
         except:
             pubDate = utils.this_year()
         try:
             metadataDate = ds['lastUpdatedDate']
         except:
             metadataDate = utils.this_year()
         if geom is not None:
             self.FRED._add_survey(Name=ds['sbDatasetTag'],
                                   ID=ds['id'],
                                   Agency='USGS',
                                   Date=pubDate[-4:],
                                   MetadataLink=ds['infoUrl'],
                                   MetadataDate=metadataDate,
                                   DataLink='{}{}'.format(
                                       self._tnm_product_url, url_enc),
                                   Link=ds['dataGovUrl'],
                                   Resolution=','.join(ds['extents']),
                                   DataType=datatype,
                                   DataSource='tnm',
                                   HorizontalDatum=h_epsg,
                                   VerticalDatum=v_epsg,
                                   Etcetra=fmt,
                                   Info=ds['refreshCycle'],
                                   geom=geom)
Ejemplo n.º 6
0
    def yield_xyz(self, entry):
        src_data = os.path.basename(entry[1])
        src_mb = src_data[:-4]
        try:
            survey, src_data, mb_fmt, mb_perc, mb_date = self.parse_entry_inf(entry)
        except TypeError:
            return
        this_inf = self.parse_entry_inf(entry)

        if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_data) == 0:
            src_xyz = os.path.basename(src_data) + '.xyz'
            if not self.process:
                this_weight = self.weight
                out, status = utils.run_cmd('mblist -OXYZ -I{} -M{} > {}'.format(src_data, 'X{}'.format(self.exclude) if self.exclude is not None else 'A', src_xyz), verbose=True)
            else:
                #this_weight = (float(mb_perc) * (1 + (2*((int(mb_date)-2015)/100))))/100.
                this_year = int(utils.this_year()) if self.min_year is None else self.min_year
                this_weight = float(mb_perc) * ((int(mb_date)-2000)/(this_year-2000))/100.
                if this_weight <= 0.: this_weight = 0.0000001
                #this_weight = (float(mb_perc) * ((int(mb_date)-2000)/int(this_year) - 2000))/100.
                out, status = utils.run_cmd('mblist -OXYZ -I{} -MX{} > {}'.format(src_data, str(100-float(mb_perc)), src_xyz), verbose=True)
            
                if status != 0:
                    if f_utils.Fetch('{}.inf'.format(entry[0]), callback=self.callback, verbose=self.verbose).fetch_file('{}.inf'.format(src_mb)) == 0:
                        mb_fmt = self.mb_inf_data_format('{}.inf'.format(src_mb))
                        mb_date = self.mb_inf_data_date('{}.inf'.format(src_mb))
                        out, status = utils.run_cmd('mblist -F{} -OXYZ -I{} -MX{}  > {}'.format(mb_fmt, src_data, str(100-float(mb_perc)), src_xyz), verbose=True)
                    
            if status == 0:
                _ds = datasets.XYZFile(
                    fn=src_xyz,
                    delim='\t',
                    data_format=168,
                    src_srs='epsg:4326',
                    dst_srs=self.dst_srs,
                    #name=os.path.basename(entry[1]),
                    src_region=self.region,
                    verbose=self.verbose,
                    weight=this_weight,
                    remote=True
                )

                if self.inc is not None:
                    xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True)
                    for xyz in utils.yield_cmd(
                            'gmt blockmedian -I{:.10f} {} -r -V'.format(
                                self.inc, self.region.format('gmt')
                            ),
                            verbose=self.verbose,
                            data_fun=xyz_func
                    ):
                        yield(xyzfun.XYZPoint().from_list([float(x) for x in xyz.split()]))
                else:
                    for xyz in _ds.yield_xyz():
                        yield(xyz)

                utils.remove_glob(src_data, '{}*'.format(src_xyz), '{}*.inf'.format(src_mb))
            else:
                utils.echo_error_msg('failed to process local file, {} [{}]...'.format(src_data, entry[0]))
                with open(
                        '{}'.format(os.path.join(self._outdir, 'fetch_{}_{}.err'.format(self.name, self.region.format('fn')))),
                        'a'
                ) as mb_err:
                    mb_err.write('{}\n'.format(','.join([src_mb, entry[0]])))
                    
                os.rename(src_data, os.path.join(self._outdir, src_data))
                utils.remove_glob(src_xyz)
        else:
            utils.echo_error_msg(
                'failed to fetch remote file, {}...'.format(src_data)
            )
Ejemplo n.º 7
0
    def update(self):
        self.FRED._open_ds()        
        v_zip = os.path.basename(self._arctic_dem_index_url)
        status = f_utils.Fetch(self._arctic_dem_index_url, verbose=self.verbose).fetch_file(v_zip)
        v_shps = utils.p_unzip(v_zip, ['shp', 'shx', 'dbf', 'prj'])

        v_shp = None
        for v in v_shps:
            if '.shp' in v: v_shp = v

        utils.run_cmd('ogr2ogr arctic_tmp.shp {} -t_srs epsg:4326'.format(v_shp), verbose=True)
        utils.remove_glob(v_zip, *v_shps)
        v_shp = 'arctic_tmp.shp'
        v_shps = ['arctic_tmp.shp','arctic_tmp.dbf','arctic_tmp.shx','arctic_tmp.prj']
        shp_regions = regions.gdal_ogr_regions(v_shp)
        shp_region = regions.Region()
        for this_region in shp_regions:
            #this_region.src_srs = 'epsg:3413'
            #this_region.warp('epsg:4326')
            if shp_region.valid_p(check_xy=True):
                shp_region = regions.regions_merge(shp_region, this_region)
            else: shp_region = this_region
        geom = shp_region.export_as_geom()
        
        self.FRED._attribute_filter(["ID = '{}'".format('ARCTICDEM-1')])
        if self.FRED.layer is None or len(self.FRED.layer) == 0:
            self.FRED._add_survey(Name = 'ArcticDEM', ID = 'ARCTICDEM-1', Agency = 'UMN', Date = utils.this_year(),
                                  MetadataLink = 'https://data.pgc.umn.edu/elev/dem/setsm/ArcticDEM/', MetadataDate = utils.this_year(),
                                  DataLink = self._arctic_dem_index_url, IndexLink = self._arctic_dem_index_url,
                                  DataType = 'raster', DataSource = 'arcticdem', Info = 'Arctic Only', geom = geom)
        utils.remove_glob(*v_shps)
        self.FRED._close_ds()