def yield_xyz(self, entry): src_data = 'mar_grav_tmp.xyz' if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose, verify=False).fetch_file(src_data) == 0: _ds = datasets.XYZFile( fn=src_data, data_format=168, skip=1, x_offset=-360, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=src_data, src_region=self.region, verbose=self.verbose, remote=True) for xyz in _ds.yield_xyz(): yield (xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data)) utils.remove_glob('{}*'.format(src_data))
def yield_xyz(self, entry): src_nos = os.path.basename(entry[1]) dt = None if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_nos) == 0: dt = self._data_type(src_nos) if dt == 'geodas_xyz': nos_fns = utils.p_unzip(src_nos, ['xyz', 'dat']) for nos_f_r in nos_fns: _ds = datasets.XYZFile( fn=nos_f_r, data_format=168, skip=1, xpos=2, ypos=1, zpos=3, z_scale=-1, src_srs='epsg:4326', dst_srs=self.dst_srs, name=nos_f_r, src_region=self.region, verbose=self.verbose, remote=True ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(*nos_fns, *[x+'.inf' for x in nos_fns]) elif dt == 'grid_bag': src_bags = utils.p_unzip(src_nos, exts=['bag']) for src_bag in src_bags: _ds = datasets.RasterFile( fn=src_bag, data_format=200, dst_srs=self.dst_srs, name=src_bag, src_region=self.region, verbose=self.verbose ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(*src_bags) utils.remove_glob(src_nos)
def yield_xyz(self, entry): """ENC data comes as a .000 file in a zip. The data is referenced to MHW and is represente as a depth. In U.S. waters, MHW can be transformed to MSL or the local GEOID using VDatum and/or it's associated grids (mhw.gtx or tss.gtx) """ ## create the tidal transformation grid from mhw to geoid src_zip = os.path.basename(entry[1]) if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_zip) == 0: if entry[2].lower() == 'enc': src_encs = utils.p_unzip(src_zip, ['000']) for src_ch in src_encs: dst_xyz = src_ch.split('.')[0] + '.xyz' try: ds_ogr = ogr.Open(src_ch) layer_s = ds_ogr.GetLayerByName('SOUNDG') if layer_s is not None: with open(dst_xyz, 'w') as o_xyz: for f in layer_s: g = json.loads(f.GetGeometryRef().ExportToJson()) for xyz in g['coordinates']: xyzfun.XYZPoint().from_list([float(x) for x in xyz]).dump(dst_port=o_xyz, encode=False) ds_ogr = layer_s = None except: utils.echo_warning_msg('could not parse {}'.format(src_ch)) _ds = datasets.XYZFile( fn=dst_xyz, data_format=168, z_scale=-1, src_srs='epsg:4326', #src_srs='+proj=longlat +datum=WGS84 +geoidgrids=./{}'.format(vdatum_grid), dst_srs=self.dst_srs, #name=dst_xyz, src_region=self.region, verbose=self.verbose, remote=True ) for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(dst_xyz, o_xyz, *src_encs) utils.remove_glob(src_zip)
def yield_xyz(self, entry): src_data = os.path.basename(entry[1]) src_mb = src_data[:-4] try: survey, src_data, mb_fmt, mb_perc, mb_date = self.parse_entry_inf(entry) except TypeError: return this_inf = self.parse_entry_inf(entry) if f_utils.Fetch(entry[0], callback=self.callback, verbose=self.verbose).fetch_file(src_data) == 0: src_xyz = os.path.basename(src_data) + '.xyz' if not self.process: this_weight = self.weight out, status = utils.run_cmd('mblist -OXYZ -I{} -M{} > {}'.format(src_data, 'X{}'.format(self.exclude) if self.exclude is not None else 'A', src_xyz), verbose=True) else: #this_weight = (float(mb_perc) * (1 + (2*((int(mb_date)-2015)/100))))/100. this_year = int(utils.this_year()) if self.min_year is None else self.min_year this_weight = float(mb_perc) * ((int(mb_date)-2000)/(this_year-2000))/100. if this_weight <= 0.: this_weight = 0.0000001 #this_weight = (float(mb_perc) * ((int(mb_date)-2000)/int(this_year) - 2000))/100. out, status = utils.run_cmd('mblist -OXYZ -I{} -MX{} > {}'.format(src_data, str(100-float(mb_perc)), src_xyz), verbose=True) if status != 0: if f_utils.Fetch('{}.inf'.format(entry[0]), callback=self.callback, verbose=self.verbose).fetch_file('{}.inf'.format(src_mb)) == 0: mb_fmt = self.mb_inf_data_format('{}.inf'.format(src_mb)) mb_date = self.mb_inf_data_date('{}.inf'.format(src_mb)) out, status = utils.run_cmd('mblist -F{} -OXYZ -I{} -MX{} > {}'.format(mb_fmt, src_data, str(100-float(mb_perc)), src_xyz), verbose=True) if status == 0: _ds = datasets.XYZFile( fn=src_xyz, delim='\t', data_format=168, src_srs='epsg:4326', dst_srs=self.dst_srs, #name=os.path.basename(entry[1]), src_region=self.region, verbose=self.verbose, weight=this_weight, remote=True ) if self.inc is not None: xyz_func = lambda p: _ds.dump_xyz(dst_port=p, encode=True) for xyz in utils.yield_cmd( 'gmt blockmedian -I{:.10f} {} -r -V'.format( self.inc, self.region.format('gmt') ), verbose=self.verbose, data_fun=xyz_func ): yield(xyzfun.XYZPoint().from_list([float(x) for x in xyz.split()])) else: for xyz in _ds.yield_xyz(): yield(xyz) utils.remove_glob(src_data, '{}*'.format(src_xyz), '{}*.inf'.format(src_mb)) else: utils.echo_error_msg('failed to process local file, {} [{}]...'.format(src_data, entry[0])) with open( '{}'.format(os.path.join(self._outdir, 'fetch_{}_{}.err'.format(self.name, self.region.format('fn')))), 'a' ) as mb_err: mb_err.write('{}\n'.format(','.join([src_mb, entry[0]]))) os.rename(src_data, os.path.join(self._outdir, src_data)) utils.remove_glob(src_xyz) else: utils.echo_error_msg( 'failed to fetch remote file, {}...'.format(src_data) )