def xyz_block_t(src_xyz, src_region, inc, verbose=False): """block the src_xyz data to the mean block value Args: src_xyz (generataor): list/generator of xyz data src_region (list): a `region` list [xmin, xmax, ymin, ymax] inc (float): blocking increment, in native units verbose (bool): increase verbosity """ xcount, ycount, dst_gt = regions.region2gt(src_region, inc) blkArray = np.empty((ycount, xcount), dtype=object) for y in range(0, ycount): for x in range(0, xcount): blkArray[y, x] = [] xyzArray = [] gdt = gdal.GDT_Float32 if verbose: utils.echo_msg('blocking data to {}/{} grid'.format(ycount, xcount)) it = 0 for this_xyz in src_xyz: x = this_xyz[0] y = this_xyz[1] z = this_xyz[2] if x > src_region[0] and x < src_region[1]: if y > src_region[2] and y < src_region[3]: xpos, ypos = utils._geo2pixel(x, y, dst_gt) if xpos < xcount and ypos < ycount: xyzArray.append(this_xyz) blkArray[ypos, xpos].append(it) it += 1 return (blkArray, xyzArray)
def las_inf2(src_las): '''scan an xyz file and find it's min/max values and write an associated inf file for the src_xyz file. returns region [xmin, xmax, ymin, ymax, zmin, zmax] of the src_xyz file.''' minmax = [] out, status = utils.run_cmd( 'lasinfo -nc -nv -stdout -i {}'.format(src_las), verbose=False) for i in out.split('\n'): if 'min x y z' in i: xyz_min = [ float(y) for y in [x.strip() for x in i.split(':')][1].split() ] if 'max x y z' in i: xyz_max = [ float(y) for y in [x.strip() for x in i.split(':')][1].split() ] minmax = [ xyz_min[0], xyz_max[0], xyz_min[1], xyz_max[1], xyz_min[2], xyz_max[2] ] with open('{}.inf'.format(src_las), 'w') as inf: utils.echo_msg('generating inf file for {}'.format(src_las)) inf.write('{}\n'.format(' '.join([str(x) for x in minmax]))) return (minmax)
def gmt_yield_entry(entry, region=None, verbose=False, z_region=None): """yield the xyz data from the xyz datalist entry Args: entry (list): a datalist entry region (list): a region list [xmin, xmax, ymin, ymax] verbose (bool): increase verbosity z_region (list): a z-regin [zmin, zmax] Yields: list: [x, y, z, <w, ...>] """ ln = 0 delim = ' ' if z_region is not None: z_region = ['-' if x is None else str(x) for x in z_region] out, status = utils.run_cmd('gmt gmtset IO_COL_SEPARATOR = SPACE', verbose=False) for line in utils.yield_cmd('gmt gmtselect {} {} {}\ '.format(entry[0], '' if region is None else regions.region_format(region, 'gmt'), '' if z_region is None else '-Z{}'.format('/'.join(z_region))), data_fun=None, verbose=False): ln += 1 yield (line) if verbose: utils.echo_msg('read {} data points from {}'.format(ln, entry[0]))
def regions_sort(trainers, t_num=25, verbose=False): """sort regions by distance; regions is a list of regions [xmin, xmax, ymin, ymax]. returns the sorted region-list """ train_sorted = [] for z, train in enumerate(trainers): train_d = [] np.random.shuffle(train) train_total = len(train) while True: if verbose: utils.echo_msg_inline('sorting training tiles [{}]'.format(len(train))) if len(train) == 0: break this_center = region_center(train[0][0]) train_d.append(train[0]) train = train[1:] if len(train_d) > t_num or len(train) == 0: break dsts = [utils.euc_dst(this_center, region_center(x[0])) for x in train] min_dst = np.percentile(dsts, 50) d_t = lambda t: utils.euc_dst(this_center, region_center(t[0])) > min_dst np.random.shuffle(train) train.sort(reverse=True, key=d_t) if verbose: utils.echo_msg(' '.join([region_format(x[0], 'gmt') for x in train_d[:t_num]])) train_sorted.append(train_d) if verbose: utils.echo_msg_inline('sorting training tiles [OK]\n') return(train_sorted)
def xyz_block(src_xyz, region, inc, weights=False, verbose=False): """block the src_xyz data to the mean block value Args: src_xyz (generataor): list/generator of xyz data region (list): a `region` list [xmin, xmax, ymin, ymax] inc (float): blocking increment, in native units weights (bool): block using weights verbose (bool): increase verbosity Yields: list: xyz data for each block with data """ xcount, ycount, dst_gt = regions.region2gt(region, inc) sumArray = np.zeros((ycount, xcount)) gdt = gdal.GDT_Float32 ptArray = np.zeros((ycount, xcount)) if weights: wtArray = np.zeros((ycount, xcount)) if verbose: utils.echo_msg('blocking data to {}/{} grid'.format(ycount, xcount)) for this_xyz in src_xyz: x = this_xyz[0] y = this_xyz[1] z = this_xyz[2] if weights: z * this_xyz[3] #w = this_xyz[3] #z = z * w if x > region[0] and x < region[1]: if y > region[2] and y < region[3]: xpos, ypos = utils._geo2pixel(x, y, dst_gt) try: sumArray[ypos, xpos] += z ptArray[ypos, xpos] += 1 if weights: wtArray[ypos, xpos] += this_xyz[3] except: pass ptArray[ptArray == 0] = np.nan if weights: wtArray[wtArray == 0] = 1 outarray = (sumArray / wtArray) / ptArray else: outarray = sumArray / ptArray sumArray = ptArray = None if weights: wtArray = None outarray[np.isnan(outarray)] = -9999 for y in range(0, ycount): for x in range(0, xcount): geo_x, geo_y = utils._pixel2geo(x, y, dst_gt) z = outarray[y, x] if z != -9999: yield ([geo_x, geo_y, z])
def archive_inf(archive, inf_file=True, epsg=None, overwrite=False): """return the region of the datalist and generate an associated `.inf` file if `inf_file` is True. Args: archive (str): a datalist archive entry pathname inf_file (bool): generate an inf file epsg (int): EPSG code overwrite (bool): overwrite a possibly existing inf_file Returns: list: the region [xmin, xmax, ymin, ymax] """ out_regions = [] dl_i = {'name': archive, 'minmax': None, 'numpts': 0, 'wkt': None} utils.echo_msg('generating inf for archive {}'.format(archive)) entries = archive2dl(archive) for entry in entries: entry_inf = inf_entry(entry, epsg=epsg, overwrite=overwrite) if entry_inf is not None: out_regions.append(entry_inf['minmax'][:6]) dl_i['numpts'] += entry_inf['numpts'] out_regions = [x for x in out_regions if x is not None] if len(out_regions) == 0: dl_i['minmax'] = None elif len(out_regions) == 1: dl_i['minmax'] = out_regions[0] else: out_region = out_regions[0] for x in out_regions[1:]: out_region = regions.regions_merge(out_region, x) dl_i['minmax'] = out_region dl_i['wkt'] = regions.region2wkt(dl_i['minmax']) if dl_i['minmax'] is not None and inf_file: with open('{}.inf'.format(archive), 'w') as inf: inf.write(json.dumps(dl_i)) [utils.remove_glob('{}'.format(x[0])) for x in entries] [utils.remove_glob('{}.inf'.format(x[0])) for x in entries] return(dl_i)
def las_yield_entry(entry, region=None, verbose=False, z_region=None): '''yield the xyz data from the xyz datalist entry yields [x, y, z, <w, ...>]''' ln = 0 if z_region is not None: min_z = None if z_region[0] is None else z_region[0] max_z = None if z_region[1] is None else z_region[1] else: min_z = max_z = None for line in utils.yield_cmd('las2txt -parse xyz -stdout -keep_class 2 29 -i {} {} {} {}\ ' .format(entry[0], '' if region is None else '-keep_xy {}'.format(regions.region_format(region, 'te')),\ '' if min_z is None else '-drop_z_below {}'.format(min_z),\ '' if max_z is None else '-drop_z_above {}'.format(max_z)), data_fun = None, verbose = False): ln += 1 xyz = [float(x) for x in line.strip().split()] yield (xyz + [entry[2]] if entry[2] is not None else xyz) if verbose: utils.echo_msg('read {} data points from {}'.format(ln, entry[0]))
def las_inf(src_las): pts = [] lasi = {} lasi['name'] = src_las lasi['numpts'] = 0 lasi['minmax'] = [0, 0, 0, 0, 0, 0] utils.echo_msg('generating inf file for {}'.format(src_las)) for i, l in enumerate(las_yield_entry([src_las, 400, None])): if i == 0: lasi['minmax'] = [l[0], l[0], l[1], l[1], l[2], l[2]] else: try: if l[0] < lasi['minmax'][0]: lasi['minmax'][0] = l[0] elif l[0] > lasi['minmax'][1]: lasi['minmax'][1] = l[0] if l[1] < lasi['minmax'][2]: lasi['minmax'][2] = l[1] elif l[1] > lasi['minmax'][3]: lasi['minmax'][3] = l[1] if l[2] < lasi['minmax'][4]: lasi['minmax'][4] = l[2] elif l[2] > lasi['minmax'][5]: lasi['minmax'][5] = l[2] except: pass pts.append(l) lasi['numpts'] = i try: out_hull = [ pts[i] for i in spatial.ConvexHull(pts, qhull_options='Qt').vertices ] out_hull.append(out_hull[0]) lasi['wkt'] = utils.create_wkt_polygon(out_hull, xpos=0, ypos=1) except: lasi['wkt'] = regions.region2wkt(lasi['minmax']) if lasi['numpts'] > 0: with open('{}.inf'.format(src_las), 'w') as inf: inf.write(json.dumps(lasi)) return (lasi)
def xyz_parse(src_xyz, xyz_c=_xyz_config, region=None, verbose=False): """xyz file parsing generator Args: src_xyz (generataor): list/generator of xyz data xyz_c (dict): xyz config dictionary region (list): a `region` list [xmin, xmax, ymin, ymax] verbose (bool): increase verbosity Yields: list: xyz data [x, y, z, ...] """ ln = 0 pass_d = True skip = int(xyz_c['skip']) if xyz_c['epsg'] == xyz_c['warp'] or xyz_c['epsg'] is None: xyz_c['warp'] = None if xyz_c['warp'] is not None: src_srs = osr.SpatialReference() src_srs.ImportFromEPSG(int(xyz_c['epsg'])) dst_srs = osr.SpatialReference() dst_srs.ImportFromEPSG(int(xyz_c['warp'])) try: src_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) dst_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) except: pass dst_trans = osr.CoordinateTransformation(src_srs, dst_srs) else: src_srs = dst_srs = dst_trans = None for xyz in src_xyz: pass_d = True if ln >= skip: this_xyz = xyz_parse_line(xyz, xyz_c) if this_xyz is not None: if xyz_c['warp'] is not None: this_xyz = xyz_warp(this_xyz, dst_trans) if region is not None: if regions.region_valid_p: if not xyz_in_region_p(this_xyz, region): pass_d = False if xyz_c['upper_limit'] is not None or xyz_c[ 'lower_limit'] is not None: if not regions.z_pass(this_xyz[2], upper_limit=xyz_c['upper_limit'], lower_limit=xyz_c['lower_limit']): pass_d = False else: pass_d = False if pass_d: ln += 1 yield (this_xyz) else: skip -= 1 if verbose: if ln == 0: status = -1 else: status = 0 utils.echo_msg('parsed {} data records from {}'.format( ln, xyz_c['name']))