def inf_entry(src_entry, overwrite=False, epsg=None): """Read .inf file and extract minmax info. the .inf file can either be an MBSystem style inf file or the result of `gmt gmtinfo file.xyz -C`, which is a 6 column line with minmax info, etc. Args: src_entry (list): the source datalist entry list overwrite (bool): overwrite the inf file epsg (int): EPSG code Returns: list: the region [xmin, xmax, ymin, ymax] of the inf file. """ ei = {'name': src_entry, 'numpts': 0, 'minmax': [0,0,0,0,0,0], 'wkt': regions.region2wkt([0,0,0,0,0,0])} if entry_exists_p(src_entry[0]) or src_entry[1] == -4: path_i = src_entry[0] + '.inf' if not os.path.exists(path_i) or overwrite: ei = _dl_dl_h[src_entry[1]]['inf'](src_entry, epsg) else: ei = inf_parse(path_i) if regions.region_is_zeros(ei['minmax']): ei = inf_entry(src_entry, overwrite=True, epsg=epsg) if not regions.region_valid_p(ei['minmax']): utils.echo_error_msg('invalid inf file {}'.format(path_i)) return({}) return(ei)
def inf_parse(src_inf): """parse an inf file (mbsystem or gmt) Try to parse the infos file, could be native waffles infos, gmt-style infos, of mbsystem .inf file. Args: src_inf (str): path to sourc infos file Returns: list: the region [xmin, xmax, ymin, ymax, zmin, zmax] of the info dict """ xyzi = {'name': src_inf, 'numpts': 0, 'minmax': [0,0,0,0,0,0], 'wkt': regions.region2wkt([0,0,0,0,0,0])} try: with open(src_inf) as iob: xyzi = json.load(iob) except: try: xyzi = gmtfun.gmt_inf_parse(src_inf) except: try: xyzi = mbsfun.mb_inf_parse(src_inf) except: utils.echo_error_msg('could not parse inf file {}'.format(src_inf)) return(xyzi)
def gmt_inf_parse(src_inf): """parse the xyz info from the source inf file Args: src_inf (str): the source inf file Returns: dict: the xyz info dictionary """ xyzi = { 'name': src_inf, 'numpts': 0, 'minmax': [0, 0, 0, 0, 0, 0], 'wkt': regions.region2wkt([0, 0, 0, 0, 0, 0]) } with open(src_inf) as iob: for il in iob: til = il.split() if len(til) > 1: xyzi['minmax'] = [float(x) for x in til] xyzi['wkt'] = regions.region2wkt(xyzi['minmax']) return (xyzi)
def xyz_inf(src_xyz): """generate and return or read and return an xyz inf file. Args: src_xyz (generataor): list/generator of xyz data Returns: dict: an xyz infos dictionary """ pts = [] xyzi = {} xyzi['name'] = src_xyz.name xyzi['numpts'] = 0 xyzi['minmax'] = [0, 0, 0, 0, 0, 0] for i, l in enumerate(xyz_parse(src_xyz)): if i == 0: xyzi['minmax'] = [l[0], l[0], l[1], l[1], l[2], l[2]] else: try: if l[0] < xyzi['minmax'][0]: xyzi['minmax'][0] = l[0] elif l[0] > xyzi['minmax'][1]: xyzi['minmax'][1] = l[0] if l[1] < xyzi['minmax'][2]: xyzi['minmax'][2] = l[1] elif l[1] > xyzi['minmax'][3]: xyzi['minmax'][3] = l[1] if l[2] < xyzi['minmax'][4]: xyzi['minmax'][4] = l[2] elif l[2] > xyzi['minmax'][5]: xyzi['minmax'][5] = l[2] except: pass pts.append(l) xyzi['numpts'] = i if xyzi['numpts'] > 0: try: out_hull = [ pts[i] for i in spatial.ConvexHull(pts, qhull_options='Qt').vertices ] out_hull.append(out_hull[0]) xyzi['wkt'] = regions.create_wkt_polygon(out_hull, xpos=0, ypos=1) with open('{}.inf'.format(src_xyz.name), 'w') as inf: inf.write(json.dumps(xyzi)) except: xyzi['wkt'] = regions.region2wkt(xyzi['minmax']) return (xyzi)
def archive_inf(archive, inf_file=True, epsg=None, overwrite=False): """return the region of the datalist and generate an associated `.inf` file if `inf_file` is True. Args: archive (str): a datalist archive entry pathname inf_file (bool): generate an inf file epsg (int): EPSG code overwrite (bool): overwrite a possibly existing inf_file Returns: list: the region [xmin, xmax, ymin, ymax] """ out_regions = [] dl_i = {'name': archive, 'minmax': None, 'numpts': 0, 'wkt': None} utils.echo_msg('generating inf for archive {}'.format(archive)) entries = archive2dl(archive) for entry in entries: entry_inf = inf_entry(entry, epsg=epsg, overwrite=overwrite) if entry_inf is not None: out_regions.append(entry_inf['minmax'][:6]) dl_i['numpts'] += entry_inf['numpts'] out_regions = [x for x in out_regions if x is not None] if len(out_regions) == 0: dl_i['minmax'] = None elif len(out_regions) == 1: dl_i['minmax'] = out_regions[0] else: out_region = out_regions[0] for x in out_regions[1:]: out_region = regions.regions_merge(out_region, x) dl_i['minmax'] = out_region dl_i['wkt'] = regions.region2wkt(dl_i['minmax']) if dl_i['minmax'] is not None and inf_file: with open('{}.inf'.format(archive), 'w') as inf: inf.write(json.dumps(dl_i)) [utils.remove_glob('{}'.format(x[0])) for x in entries] [utils.remove_glob('{}.inf'.format(x[0])) for x in entries] return(dl_i)
def las_inf(src_las): pts = [] lasi = {} lasi['name'] = src_las lasi['numpts'] = 0 lasi['minmax'] = [0, 0, 0, 0, 0, 0] utils.echo_msg('generating inf file for {}'.format(src_las)) for i, l in enumerate(las_yield_entry([src_las, 400, None])): if i == 0: lasi['minmax'] = [l[0], l[0], l[1], l[1], l[2], l[2]] else: try: if l[0] < lasi['minmax'][0]: lasi['minmax'][0] = l[0] elif l[0] > lasi['minmax'][1]: lasi['minmax'][1] = l[0] if l[1] < lasi['minmax'][2]: lasi['minmax'][2] = l[1] elif l[1] > lasi['minmax'][3]: lasi['minmax'][3] = l[1] if l[2] < lasi['minmax'][4]: lasi['minmax'][4] = l[2] elif l[2] > lasi['minmax'][5]: lasi['minmax'][5] = l[2] except: pass pts.append(l) lasi['numpts'] = i try: out_hull = [ pts[i] for i in spatial.ConvexHull(pts, qhull_options='Qt').vertices ] out_hull.append(out_hull[0]) lasi['wkt'] = utils.create_wkt_polygon(out_hull, xpos=0, ypos=1) except: lasi['wkt'] = regions.region2wkt(lasi['minmax']) if lasi['numpts'] > 0: with open('{}.inf'.format(src_las), 'w') as inf: inf.write(json.dumps(lasi)) return (lasi)
def mb_inf_parse(src_inf): """parse an mbsystem .inf file Args: stc_inf (str): the source mbsystem .inf file pathname Returns: dict: xyz infos dictionary """ xyzi = { 'name': src_inf, 'numpts': 0, 'minmax': [0, 0, 0, 0, 0, 0], 'wkt': regions.region2wkt([0, 0, 0, 0, 0, 0]) } dims = [] this_row = 0 with open(src_inf) as iob: for il in iob: til = il.split() if len(til) > 1: if til[0] == 'Swath': if til[2] == 'File:': xyzi['name'] = til[3] if til[0] == 'Number': if til[2] == 'Records:': xyzi['numpts'] = int(til[3]) if til[0] == 'Minimum': if til[1] == 'Longitude:': xyzi['minmax'][0] = float(til[2]) xyzi['minmax'][1] = float(til[5]) elif til[1] == 'Latitude:': xyzi['minmax'][2] = float(til[2]) xyzi['minmax'][3] = float(til[5]) elif til[1] == 'Depth:': xyzi['minmax'][4] = float(til[5]) * -1 xyzi['minmax'][5] = float(til[2]) * -1 if til[0] == 'CM': if til[1] == 'dimensions:': dims = [int(til[2]), int(til[3])] cm_array = np.zeros((dims[0], dims[1])) if til[0] == 'CM:': for j in range(0, dims[0]): cm_array[this_row][j] = int(til[j + 1]) this_row += 1 xinc = (xyzi['minmax'][1] - xyzi['minmax'][0]) / dims[0] yinc = (xyzi['minmax'][2] - xyzi['minmax'][3]) / dims[1] if abs(xinc) > 0 and abs(yinc) > 0: xcount, ycount, dst_gt = regions.region2gt(xyzi['minmax'], xinc, y_inc=yinc) ds_config = { 'nx': dims[0], 'ny': dims[1], 'nb': dims[1] * dims[0], 'geoT': dst_gt, 'proj': utils.sr_wkt(4326), 'dt': gdal.GDT_Float32, 'ndv': 0, 'fmt': 'GTiff' } driver = gdal.GetDriverByName('MEM') ds = driver.Create('tmp', ds_config['nx'], ds_config['ny'], 1, ds_config['dt']) ds.SetGeoTransform(ds_config['geoT']) ds.SetProjection(ds_config['proj']) ds_band = ds.GetRasterBand(1) ds_band.SetNoDataValue(ds_config['ndv']) ds_band.WriteArray(cm_array) tmp_ds = ogr.GetDriverByName('Memory').CreateDataSource('tmp_poly') tmp_layer = tmp_ds.CreateLayer('tmp_poly', None, ogr.wkbMultiPolygon) tmp_layer.CreateField(ogr.FieldDefn('DN', ogr.OFTInteger)) gdal.Polygonize(ds_band, ds_band, tmp_layer, 0) ## TODO: scan all features feat = tmp_layer.GetFeature(0) geom = feat.GetGeometryRef() wkt = geom.ExportToWkt() tmp_ds = ds = None else: wkt = regions.region2wkt(xyzi['minmax']) xyzi['wkt'] = wkt return (xyzi)