def refine(L, input, region, method='bilinear'): """change resolution back to base resolution and resample a raster""" input_padded = '_'.join(['tmp', input, '_padded']) + \ ''.join([random.choice(string.ascii_letters + string.digits) for n in range(4)]) TMP_RAST[L].append(input_padded) cell_padding(input=input, output=input_padded, radius=2) Region.write(region) input = '_'.join(['tmp', input, 'refined_base_resolution']) + \ ''.join([random.choice(string.ascii_letters + string.digits) for n in range(4)]) TMP_RAST[L].append(input) if method == 'bilinear': r.resamp_interp(input=input_padded, output=input, method="bilinear") if method == 'average': r.resamp_stats(input=input_padded, output=input, method='average', flags='w') return input
def worker(cmd): window, src = cmd reg = Region() old_reg = deepcopy(reg) try: # update region reg.north = dict(window)['north'] reg.south = dict(window)['south'] reg.west = dict(window)['west'] reg.east = dict(window)['east'] reg.set_current() reg.write() reg.set_raster_region() # read raster data with RasterRow(src) as rs: arr = np.asarray(rs) except: pass finally: # reset region old_reg.write() reg.set_raster_region() return(arr)
def cleanup(): grass.message("Deleting intermediate files...") for k, v in TMP_RAST.items(): for f in v: if len(grass.find_file(f)['fullname']) > 0: grass.run_command( "g.remove", type="raster", name=f, flags="f", quiet=True) Region.write(current_region)
def cleanup(): grass.message("Deleting intermediate files...") for k, v in TMP_RAST.iteritems(): for f in v: if len(grass.find_file(f)['fullname']) > 0: grass.run_command( "g.remove", type="raster", name=f, flags="f", quiet=True) Region.write(current_region)
def main(): in_vector = options["input"].split("@")[0] if len(options["input"].split("@")) > 1: in_mapset = options["input"].split("@")[1] else: in_mapset = None raster_maps = options["raster"].split( ",") # raster file(s) to extract from output = options["output"] methods = tuple(options["methods"].split(",")) percentile = (None if options["percentile"] == "" else map( float, options["percentile"].split(","))) column_prefix = tuple(options["column_prefix"].split(",")) buffers = options["buffers"].split(",") types = options["type"].split(",") layer = options["layer"] sep = options["separator"] update = flags["u"] tabulate = flags["t"] percent = flags["p"] remove = flags["r"] use_label = flags["l"] empty_buffer_warning = ( "No data in raster map {} within buffer {} around geometry {}") # Do checks using pygrass for rmap in raster_maps: r_map = RasterAbstractBase(rmap) if not r_map.exist(): grass.fatal("Could not find raster map {}.".format(rmap)) user_mask = False m_map = RasterAbstractBase("MASK", Mapset().name) if m_map.exist(): grass.warning("Current MASK is temporarily renamed.") user_mask = True unset_mask() invect = VectorTopo(in_vector) if not invect.exist(): grass.fatal("Vector file {} does not exist".format(in_vector)) if output: if output == "-": out = None else: out = open(output, "w") # Check if input map is in current mapset (and thus editable) if in_mapset and unicode(in_mapset) != unicode(Mapset()): grass.fatal( "Input vector map is not in current mapset and cannot be modified. \ Please consider copying it to current mapset.".format( output)) buffers = [] for buf in options["buffers"].split(","): try: b = float(buf) if b.is_integer(): buffers.append(int(b)) else: buffers.append(b) except: grass.fatal("") if b < 0: grass.fatal("Negative buffer distance not supported!") ### Define column types depenting on statistic, map type and ### DB backend (SQLite supports only double and not real) # int: statistic produces allways integer precision # double: statistic produces allways floating point precision # map_type: precision f statistic depends on map type int_dict = { "number": (0, "int", "n"), "number_null": (1, "int", "null_cells"), "minimum": (3, "map_type", "min"), "maximum": (4, "map_type", "max"), "range": (5, "map_type", "range"), "average": (6, "double", "mean"), "average_abs": (7, "double", "mean_of_abs"), "stddev": (8, "double", "stddev"), "variance": (9, "double", "variance"), "coeff_var": (10, "double", "coeff_var"), "sum": (11, "map_type", "sum"), "first_quartile": (12, "map_type", "first_quartile"), "median": (13, "map_type", "median"), "third_quartile": (14, "map_type", "third_quartile"), "percentile": (15, "map_type", "percentile"), } if len(raster_maps) != len(column_prefix): grass.fatal( "Number of maps and number of column prefixes has to be equal!") # Generate list of required column names and types col_names = [] valid_labels = [] col_types = [] for p in column_prefix: rmaptype, val_lab, rcats = raster_type( raster_maps[column_prefix.index(p)], tabulate, use_label) valid_labels.append(val_lab) for b in buffers: b_str = str(b).replace(".", "_") if tabulate: if rmaptype == "double precision": grass.fatal( "{} has floating point precision. Can only tabulate integer maps" .format(raster_maps[column_prefix.index(p)])) col_names.append("{}_{}_b{}".format(p, "ncats", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "mode", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "null", b_str)) col_types.append("double precision") col_names.append("{}_{}_b{}".format(p, "area_tot", b_str)) col_types.append("double precision") for rcat in rcats: if use_label and valid_labels: rcat = rcat[0].replace(" ", "_") else: rcat = rcat[1] col_names.append("{}_{}_b{}".format(p, rcat, b_str)) col_types.append("double precision") else: for m in methods: col_names.append("{}_{}_b{}".format( p, int_dict[m][2], b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) if percentile: for perc in percentile: col_names.append("{}_percentile_{}_b{}".format( p, int(perc) if (perc).is_integer() else perc, b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) # Open input vector map in_vect = VectorTopo(in_vector, layer=layer) in_vect.open(mode="r") # Get name for temporary map global TMP_MAPS TMP_MAPS.append(tmp_map) # Setup stats collectors if tabulate: # Collector for raster category statistics stats = Module("r.stats", run_=False, stdout_=PIPE) stats.inputs.sort = "desc" stats.inputs.null_value = "null" stats.flags.quiet = True stats.flags.l = True if percent: stats.flags.p = True stats.flags.n = True else: stats.flags.a = True else: # Collector for univariat statistics univar = Module("r.univar", run_=False, stdout_=PIPE) univar.inputs.separator = sep univar.flags.g = True univar.flags.quiet = True # Add extended statistics if requested if set(methods).intersection( set(["first_quartile", "median", "third_quartile"])): univar.flags.e = True if percentile is not None: univar.flags.e = True univar.inputs.percentile = percentile # Check if attribute table exists if not output: if not in_vect.table: grass.fatal( "No attribute table found for vector map {}".format(in_vect)) # Modify table as needed tab = in_vect.table tab_name = tab.name tab_cols = tab.columns # Add required columns existing_cols = list(set(tab_cols.names()).intersection(col_names)) if len(existing_cols) > 0: if not update: in_vect.close() grass.fatal( "Column(s) {} already exist! Please use the u-flag \ if you want to update values in those columns". format(",".join(existing_cols))) else: grass.warning("Column(s) {} already exist!".format( ",".join(existing_cols))) for e in existing_cols: idx = col_names.index(e) del col_names[idx] del col_types[idx] tab_cols.add(col_names, col_types) conn = tab.conn cur = conn.cursor() sql_str_start = "UPDATE {} SET ".format(tab_name) elif output == "-": print("cat{0}raster_map{0}buffer{0}statistic{0}value".format(sep)) else: out.write("cat{0}raster_map{0}buffer{0}statistic{0}value{1}".format( sep, os.linesep)) # Get computational region grass.use_temp_region() r = Region() r.read() # Adjust region extent to buffer around geometry # reg = deepcopy(r) # Create iterator for geometries of all selected types geoms = chain() geoms_n = 0 n_geom = 1 for geom_type in types: geoms_n += in_vect.number_of(geom_type) if in_vect.number_of(geom_type) > 0: geoms = chain(in_vect.viter(geom_type)) # Loop over geometries for geom in geoms: # Get cat cat = geom.cat # Add where clause to UPDATE statement sql_str_end = " WHERE cat = {};".format(cat) # Loop over ser provided buffer distances for buf in buffers: b_str = str(buf).replace(".", "_") # Buffer geometry if buf <= 0: buffer_geom = geom else: buffer_geom = geom.buffer(buf) # Create temporary vector map with buffered geometry tmp_vect = VectorTopo(tmp_map, quiet=True) tmp_vect.open(mode="w") tmp_vect.write(Boundary(points=buffer_geom[0].to_list())) # , c_cats=int(cat), set_cats=True if callable(buffer_geom[1]): tmp_vect.write(Centroid(x=buffer_geom[1]().x, y=buffer_geom[1]().y), cat=int(cat)) else: tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y), cat=int(cat)) ################################################# # How to silence VectorTopo??? ################################################# # Save current stdout # original = sys.stdout # f = open(os.devnull, 'w') # with open('output.txt', 'w') as f: # sys.stdout = io.BytesIO() # sys.stdout.fileno() = os.devnull # sys.stderr = f # os.environ.update(dict(GRASS_VERBOSE='0')) tmp_vect.close(build=False) grass.run_command("v.build", map=tmp_map, quiet=True) # os.environ.update(dict(GRASS_VERBOSE='1')) # reg = Region() # reg.read() # r.from_vect(tmp_map) r = align_current(r, buffer_geom[0].bbox()) r.write() # Check if the following is needed # needed specially with r.stats -p # grass.run_command('g.region', vector=tmp_map, flags='a') # Create a MASK from buffered geometry if user_mask: grass.run_command( "v.to.rast", input=tmp_map, output=tmp_map, use="val", value=int(cat), quiet=True, ) mc_expression = ( "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())". format(tmp_map, cat)) grass.run_command("r.mapcalc", expression=mc_expression, quiet=True) else: grass.run_command( "v.to.rast", input=tmp_map, output="MASK", use="val", value=int(cat), quiet=True, ) # reg.write() updates = [] # Compute statistics for every raster map for rm, rmap in enumerate(raster_maps): # rmap = raster_maps[rm] prefix = column_prefix[rm] if tabulate: # Get statistics on occurrence of raster categories within buffer stats.inputs.input = rmap stats.run() t_stats = (stats.outputs["stdout"].value.rstrip( os.linesep).replace(" ", " ").replace( "no data", "no_data").replace( " ", "_b{} = ".format(b_str)).split(os.linesep)) if t_stats == [""]: grass.warning( empty_buffer_warning.format(rmap, buf, cat)) continue if (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1] != "null"): mode = (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1]) elif len(t_stats) == 1: mode = "NULL" else: mode = (t_stats[1].split( "_b{} = ".format(b_str))[0].split("_")[-1]) if not output: updates.append("\t{}_{}_b{} = {}".format( prefix, "ncats", b_str, len(t_stats))) updates.append("\t{}_{}_b{} = {}".format( prefix, "mode", b_str, mode)) area_tot = 0 for l in t_stats: # check if raster maps has category or not if len(l.split("=")) == 2: updates.append("\t{}_{}".format( prefix, l.rstrip("%"))) elif not l.startswith("null"): vals = l.split("=") updates.append("\t{}_{} = {}".format( prefix, vals[-2].strip() if valid_labels[rm] else vals[0].strip(), vals[-1].strip().rstrip("%"), )) if not l.startswith("null"): area_tot += float( l.rstrip("%").split("= ")[-1]) if not percent: updates.append("\t{}_{}_b{} = {}".format( prefix, "area_tot", b_str, area_tot)) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "ncats", len(t_stats), os.linesep) out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "mode", mode, os.linesep) area_tot = 0 for l in t_stats: rcat = (l.split("= ")[1].rstrip( "_b{} = ".format(b_str)) if valid_labels[rm] else l.split("_")[0]) area = l.split("= ")[-1] out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area {}".format(rcat), area, os.linesep, ) if rcat != "null": area_tot = area_tot + float( l.rstrip("%").split("= ")[-1]) if not percent: out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area total", area_tot, os.linesep, ) if output == "-": print(out_str.rstrip(os.linesep)) else: out.write(out_str) else: # Get univariate statistics within buffer univar.inputs.map = rmap univar.run() u_stats = (univar.outputs["stdout"].value.rstrip( os.linesep).replace( "=", "_b{} = ".format(b_str)).split(os.linesep)) # Test if u_stats is empty and give warning # Needs to be adjusted to number of requested stats? if ((percentile and len(u_stats) < 14) or (univar.flags.e and len(u_stats) < 13) or len(u_stats) < 12): grass.warning( empty_buffer_warning.format(rmap, buf, cat)) break # Extract statistics for selected methods for m in methods: if not output: # Add to list of UPDATE statements updates.append("\t{}_{}".format( prefix, u_stats[int_dict[m][0]] if is_number( u_stats[int_dict[m][0]].split(" = ")[1]) else " = ".join([ u_stats[int_dict[m][0]].split(" = ")[0], "NULL", ]), )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, m, u_stats[int_dict[m][0]].split("= ")[1], ) if output == "-": print(out_str) else: out.write("{}{}".format(out_str, os.linesep)) if percentile: perc_count = 0 for perc in percentile: if not output: updates.append( "{}_percentile_{}_b{} = {}".format( p, int(perc) if (perc).is_integer() else perc, b_str, u_stats[15 + perc_count].split("= ")[1], )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, "percentile_{}".format( int(perc) if ( perc).is_integer() else perc), u_stats[15 + perc_count].split("= ")[1], ) if output == "-": print(out_str) else: out.write(out_str) perc_count = perc_count + 1 if not output and len(updates) > 0: cur.execute("{}{}{}".format(sql_str_start, ",\n".join(updates), sql_str_end)) # Remove temporary maps # , stderr=os.devnull, stdout_=os.devnull) grass.run_command("g.remove", flags="f", type="raster", name="MASK", quiet=True) grass.run_command("g.remove", flags="f", type="vector", name=tmp_map, quiet=True) # Give progress information grass.percent(n_geom, geoms_n, 1) n_geom = n_geom + 1 if not output: conn.commit() # Close cursor and DB connection if not output and not output == "-": cur.close() conn.close() # Update history grass.vector.vector_history(in_vector) elif output != "-": # write results to file out.close() if remove and not output: dropcols = [] selectnum = "select count({}) from {}" for i in col_names: thisrow = grass.read_command("db.select", flags="c", sql=selectnum.format(i, in_vector)) if int(thisrow) == 0: dropcols.append(i) grass.debug("Columns to delete: {}".format(", ".join(dropcols)), debug=2) if dropcols: grass.run_command("v.db.dropcolumn", map=in_vector, columns=dropcols)
def main(): in_vector = options['input'].split('@')[0] if len(options['input'].split('@')) > 1: in_mapset = options['input'].split('@')[1] else: in_mapset = None raster_maps = options['raster'].split(',') # raster file(s) to extract from output = options['output'] methods = tuple(options['methods'].split(',')) percentile = None if options['percentile'] == '' else map(float, options['percentile'].split(',')) column_prefix = tuple(options['column_prefix'].split(',')) buffers = options['buffers'].split(',') types = options['type'].split(',') layer = options['layer'] sep = options['separator'] update = flags['u'] tabulate = flags['t'] percent = flags['p'] remove = flags['r'] use_lable = False empty_buffer_warning = 'No data in raster map {} within buffer {} around geometry {}' # Do checks using pygrass for rmap in raster_maps: r_map = RasterAbstractBase(rmap) if not r_map.exist(): grass.fatal('Could not find raster map {}.'.format(rmap)) user_mask = False m_map = RasterAbstractBase('MASK', Mapset().name) if m_map.exist(): grass.warning("Current MASK is temporarily renamed.") user_mask = True unset_mask() invect = VectorTopo(in_vector) if not invect.exist(): grass.fatal("Vector file {} does not exist".format(in_vector)) if output: if output == '-': out = None else: out = open(output, 'w') # Check if input map is in current mapset (and thus editable) if in_mapset and unicode(in_mapset) != unicode(Mapset()): grass.fatal("Input vector map is not in current mapset and cannot be modified. \ Please consider copying it to current mapset.".format(output)) buffers = [] for buf in options['buffers'].split(','): try: b = float(buf) if b.is_integer(): buffers.append(int(b)) else: buffers.append(b) except: grass.fatal('') if b < 0: grass.fatal("Negative buffer distance not supported!") ### Define column types depenting on statistic, map type and ### DB backend (SQLite supports only double and not real) # int: statistic produces allways integer precision # double: statistic produces allways floating point precision # map_type: precision f statistic depends on map type int_dict = {'number': (0, 'int', 'n'), 'number_null': (1, 'int', 'null_cells'), 'minimum': (3, 'map_type', 'min'), 'maximum': (4, 'map_type', 'max'), 'range': (5, 'map_type', 'range'), 'average': (6, 'double', 'mean'), 'average_abs': (7, 'double', 'mean_of_abs'), 'stddev': (8, 'double', 'stddev'), 'variance': (9, 'double', 'variance'), 'coeff_var': (10, 'double', 'coeff_var'), 'sum': (11, 'map_type', 'sum'), 'first_quartile': (12, 'map_type', 'first_quartile'), 'median': (13, 'map_type', 'median'), 'third_quartile': (14, 'map_type', 'third_quartile'), 'percentile': (15, 'map_type', 'percentile')} if len(raster_maps) != len(column_prefix): grass.fatal('Number of maps and number of column prefixes has to be equal!') # Generate list of required column names and types col_names = [] col_types = [] for p in column_prefix: rmaptype, rcats = raster_type(raster_maps[column_prefix.index(p)], tabulate, use_lable) for b in buffers: b_str = str(b).replace('.', '_') if tabulate: if rmaptype == 'double precision': grass.fatal('{} has floating point precision. Can only tabulate integer maps'.format(raster_maps[column_prefix.index(p)])) col_names.append('{}_{}_b{}'.format(p, 'ncats', b_str)) col_types.append('int') col_names.append('{}_{}_b{}'.format(p, 'mode', b_str)) col_types.append('int') col_names.append('{}_{}_b{}'.format(p, 'null', b_str)) col_types.append('double precision') col_names.append('{}_{}_b{}'.format(p, 'area_tot', b_str)) col_types.append('double precision') for rcat in rcats: if use_lable: rcat = rcat[1].replace(" ", "_") else: rcat = rcat[0] col_names.append('{}_{}_b{}'.format(p, rcat, b_str)) col_types.append('double precision') else: for m in methods: col_names.append('{}_{}_b{}'.format(p, int_dict[m][2], b_str)) col_types.append(rmaptype if int_dict[m][1] == 'map_type' else int_dict[m][1]) if percentile: for perc in percentile: col_names.append('{}_percentile_{}_b{}'.format(p, int(perc) if (perc).is_integer() else perc, b_str)) col_types.append(rmaptype if int_dict[m][1] == 'map_type' else int_dict[m][1]) # Open input vector map in_vect = VectorTopo(in_vector, layer=layer) in_vect.open(mode='r') # Get name for temporary map TMP_MAPS.append(tmp_map) # Setup stats collectors if tabulate: # Collector for raster category statistics stats = Module('r.stats', run_=False, stdout_=PIPE) stats.inputs.sort = 'desc' stats.inputs.null_value = 'null' stats.flags.quiet = True if percent: stats.flags.p = True stats.flags.n = True else: stats.flags.a = True else: # Collector for univariat statistics univar = Module('r.univar', run_=False, stdout_=PIPE) univar.inputs.separator = sep univar.flags.g = True univar.flags.quiet = True # Add extended statistics if requested if set(methods).intersection(set(['first_quartile', 'median', 'third_quartile'])): univar.flags.e = True if percentile is not None: univar.flags.e = True univar.inputs.percentile = percentile # Check if attribute table exists if not output: if not in_vect.table: grass.fatal('No attribute table found for vector map {}'.format(in_vect)) # Modify table as needed tab = in_vect.table tab_name = tab.name tab_cols = tab.columns # Add required columns existing_cols = list(set(tab_cols.names()).intersection(col_names)) if len(existing_cols) > 0: if not update: grass.fatal('Column(s) {} already exist! Please use the u-flag \ if you want to update values in those columns'.format(','.join(existing_cols))) else: grass.warning('Column(s) {} already exist!'.format(','.join(existing_cols))) for e in existing_cols: idx = col_names.index(e) del col_names[idx] del col_types[idx] tab_cols.add(col_names, col_types) conn = tab.conn cur = conn.cursor() sql_str_start = 'UPDATE {} SET '.format(tab_name) elif output == '-': print('cat{0}raster_map{0}buffer{0}statistic{0}value'.format(sep)) else: out.write('cat{0}raster_map{0}buffer{0}statistic{0}value{1}'.format(sep, os.linesep)) # Get computational region grass.use_temp_region() r = Region() r.read() # Adjust region extent to buffer around geometry #reg = deepcopy(r) # Create iterator for geometries of all selected types geoms = chain() geoms_n = 0 n_geom = 1 for geom_type in types: geoms_n += in_vect.number_of(geom_type) if in_vect.number_of(geom_type) > 0: geoms = chain(in_vect.viter(geom_type)) # Loop over geometries for geom in geoms: # Get cat cat = geom.cat # Add where clause to UPDATE statement sql_str_end = ' WHERE cat = {};'.format(cat) # Loop over ser provided buffer distances for buf in buffers: b_str = str(buf).replace('.', '_') # Buffer geometry if buf <= 0: buffer_geom = geom else: buffer_geom = geom.buffer(buf) # Create temporary vector map with buffered geometry tmp_vect = VectorTopo(tmp_map, quiet=True) tmp_vect.open(mode='w') #print(int(cat)) tmp_vect.write(Boundary(points=buffer_geom[0].to_list())) # , c_cats=int(cat), set_cats=True tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y), cat=int(cat)) ################################################# # How to silence VectorTopo??? ################################################# # Save current stdout #original = sys.stdout #f = open(os.devnull, 'w') #with open('output.txt', 'w') as f: #sys.stdout = io.BytesIO() #sys.stdout.fileno() = os.devnull #sys.stderr = f #os.environ.update(dict(GRASS_VERBOSE='0')) tmp_vect.close(build=False) grass.run_command('v.build', map=tmp_map, quiet=True) #os.environ.update(dict(GRASS_VERBOSE='1')) #reg = Region() #reg.read() #r.from_vect(tmp_map) r = align_current(r, buffer_geom[0].bbox()) r.write() # Check if the following is needed # needed specially with r.stats -p #grass.run_command('g.region', vector=tmp_map, flags='a') # Create a MASK from buffered geometry if user_mask: grass.run_command('v.to.rast', input=tmp_map, output=tmp_map, use='val', value=int(cat), quiet=True) mc_expression = "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())".format(tmp_map, cat) grass.run_command('r.mapcalc', expression=mc_expression, quiet=True) else: grass.run_command('v.to.rast', input=tmp_map, output='MASK', use='val', value=int(cat), quiet=True) #reg.write() updates = [] # Compute statistics for every raster map for rm in range(len(raster_maps)): rmap = raster_maps[rm] prefix = column_prefix[rm] if tabulate: # Get statistics on occurrence of raster categories within buffer stats.inputs.input = rmap stats.run() t_stats = stats.outputs['stdout'].value.rstrip(os.linesep).replace(' ', '_b{} = '.format(b_str)).split(os.linesep) if t_stats[0].split('_b{} = '.format(b_str))[0].split('_')[-1] != 'null': mode = t_stats[0].split('_b{} = '.format(b_str))[0].split('_')[-1] elif len(t_stats) == 1: mode = 'NULL' else: mode = t_stats[1].split('_b{} = '.format(b_str))[0].split('_')[-1] if not output: updates.append('\t{}_{}_b{} = {}'.format(prefix, 'ncats', b_str, len(t_stats))) updates.append('\t{}_{}_b{} = {}'.format(prefix, 'mode', b_str, mode)) area_tot = 0 for l in t_stats: updates.append('\t{}_{}'.format(prefix, l.rstrip('%'))) if l.split('_b{} ='.format(b_str))[0].split('_')[-1] != 'null': area_tot = area_tot + float(l.rstrip('%').split('= ')[1]) if not percent: updates.append('\t{}_{}_b{} = {}'.format(prefix, 'area_tot', b_str, area_tot)) else: out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'ncats', len(t_stats), os.linesep) out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'mode', mode, os.linesep) area_tot = 0 if not t_stats[0]: grass.warning(empty_buffer_warning.format(rmap, buf, cat)) continue for l in t_stats: rcat = l.split('_b{} ='.format(b_str))[0].split('_')[-1] area = l.split('= ')[1] out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'area {}'.format(rcat), area, os.linesep) if rcat != 'null': area_tot = area_tot + float(l.rstrip('%').split('= ')[1]) out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'area_tot', area_tot, os.linesep) if output == '-': print(out_str.rstrip(os.linesep)) else: out.write(out_str) else: # Get univariate statistics within buffer univar.inputs.map = rmap univar.run() u_stats = univar.outputs['stdout'].value.rstrip(os.linesep).replace('=', '_b{} = '.format(b_str)).split(os.linesep) # Test if u_stats is empty and give warning # Needs to be adjusted to number of requested stats? if (percentile and len(u_stats) < 14) or (univar.flags.e and len(u_stats) < 13) or len(u_stats) < 12: grass.warning(empty_buffer_warning.format(rmap, buf, cat)) break # Extract statistics for selected methods for m in methods: if not output: # Add to list of UPDATE statements updates.append('\t{}_{}'.format(prefix, u_stats[int_dict[m][0]])) else: out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}'.format(sep, cat, prefix, buf, m, u_stats[int_dict[m][0]].split('= ')[1]) if output == '-': print(out_str) else: out.write("{}{}".format(out_str, os.linesep)) if percentile: perc_count = 0 for perc in percentile: if not output: updates.append('{}_percentile_{}_b{} = {}'.format(p, int(perc) if (perc).is_integer() else perc, b_str, u_stats[15+perc_count].split('= ')[1])) else: out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}'.format(sep, cat, prefix, buf, 'percentile_{}'.format(int(perc) if (perc).is_integer() else perc), u_stats[15+perc_count].split('= ')[1]) if output == '-': print(out_str) else: out.write(out_str) perc_count = perc_count + 1 if not output and len(updates) > 0: cur.execute('{}{}{}'.format(sql_str_start, ',\n'.join(updates), sql_str_end)) # Remove temporary maps #, stderr=os.devnull, stdout_=os.devnull) grass.run_command('g.remove', flags='f', type='raster', name='MASK', quiet=True) grass.run_command('g.remove', flags='f', type='vector', name=tmp_map, quiet=True) # Give progress information grass.percent(n_geom, geoms_n, 1) n_geom = n_geom + 1 if not output: conn.commit() # Close cursor and DB connection if not output and not output == "-": cur.close() conn.close() # Update history grass.vector.vector_history(in_vector) elif output != "-": # write results to file out.close() if remove: dropcols = [] selectnum = 'select count({}) from {}' for i in col_names: thisrow = grass.read_command('db.select', flags='c', sql=selectnum.format(i, in_vector)) if int(thisrow) == 0: dropcols.append(i) grass.debug("Columns to delete: {}".format(', '.join(dropcols)), debug=2) grass.run_command('v.db.dropcolumn', map=in_vector, columns=dropcols)
def main(): # options and flags options, flags = gs.parser() input_raster = options["input"] minradius = int(options["minradius"]) maxradius = int(options["maxradius"]) steps = int(options["steps"]) output_raster = options["output"] region = Region() res = np.mean([region.nsres, region.ewres]) # some checks if "@" in output_raster: output_raster = output_raster.split("@")[0] if maxradius <= minradius: gs.fatal("maxradius must be greater than minradius") if steps < 2: gs.fatal("steps must be greater than 1") # calculate radi for generalization radi = np.logspace(np.log(minradius), np.log(maxradius), steps, base=np.exp(1), dtype=np.int) radi = np.unique(radi) sizes = radi * 2 + 1 # multiscale calculation ztpi_maps = list() for step, (radius, size) in enumerate(zip(radi[::-1], sizes[::-1])): gs.message( "Calculating the TPI at radius {radius}".format(radius=radius)) # generalize the dem step_res = res * size step_res_pretty = str(step_res).replace(".", "_") generalized_dem = gs.tempname(4) if size > 15: step_dem = gs.tempname(4) gg.region(res=str(step_res)) gr.resamp_stats( input=input_raster, output=step_dem, method="average", flags="w", ) gr.resamp_rst( input=step_dem, ew_res=res, ns_res=res, elevation=generalized_dem, quiet=True, ) region.write() gg.remove(type="raster", name=step_dem, flags="f", quiet=True) else: gr.neighbors(input=input_raster, output=generalized_dem, size=size) # calculate the tpi tpi = gs.tempname(4) gr.mapcalc(expression="{x} = {a} - {b}".format( x=tpi, a=input_raster, b=generalized_dem)) gg.remove(type="raster", name=generalized_dem, flags="f", quiet=True) # standardize the tpi raster_stats = gr.univar(map=tpi, flags="g", stdout_=PIPE).outputs.stdout raster_stats = parse_key_val(raster_stats) tpi_mean = float(raster_stats["mean"]) tpi_std = float(raster_stats["stddev"]) ztpi = gs.tempname(4) ztpi_maps.append(ztpi) RAST_REMOVE.append(ztpi) gr.mapcalc(expression="{x} = ({a} - {mean})/{std}".format( x=ztpi, a=tpi, mean=tpi_mean, std=tpi_std)) gg.remove(type="raster", name=tpi, flags="f", quiet=True) # integrate if step > 1: tpi_updated2 = gs.tempname(4) gr.mapcalc("{x} = if(abs({a}) > abs({b}), {a}, {b})".format( a=ztpi_maps[step], b=tpi_updated1, x=tpi_updated2)) RAST_REMOVE.append(tpi_updated2) tpi_updated1 = tpi_updated2 else: tpi_updated1 = ztpi_maps[0] RAST_REMOVE.pop() gg.rename(raster=(tpi_updated2, output_raster), quiet=True) # set color theme with RasterRow(output_raster) as src: color_rules = """{minv} blue -1 0:34:198 0 255:255:255 1 255:0:0 {maxv} 110:15:0 """ color_rules = color_rules.format(minv=src.info.min, maxv=src.info.max) gr.colors(map=output_raster, rules="-", stdin_=color_rules, quiet=True)
def main(): r_elevation = options["elevation"] mrvbf = options["mrvbf"].split("@")[0] mrrtf = options["mrrtf"].split("@")[0] t_slope = float(options["t_slope"]) t_pctl_v = float(options["t_pctl_v"]) t_pctl_r = float(options["t_pctl_r"]) t_vf = float(options["t_rf"]) t_rf = float(options["t_rf"]) p_slope = float(options["p_slope"]) p_pctl = float(options["p_pctl"]) moving_window_square = flags["s"] min_cells = int(options["min_cells"]) global current_region, TMP_RAST, L TMP_RAST = {} current_region = Region() # some checks if (t_slope <= 0 or t_pctl_v <= 0 or t_pctl_r <= 0 or t_vf <= 0 or t_rf <= 0 or p_slope <= 0 or p_pctl <= 0): gs.fatal("Parameter values cannot be <= 0") if min_cells < 2: gs.fatal( "Minimum number of cells in generalized DEM cannot be less than 2") if min_cells > current_region.cells: gs.fatal( "Minimum number of cells in the generalized DEM cannot exceed the ungeneralized number of cells" ) # calculate the number of levels levels = 2 remaining_cells = current_region.cells while remaining_cells >= min_cells: levels += 1 g.region(nsres=Region().nsres * 3, ewres=Region().ewres * 3) remaining_cells = Region().cells current_region.write() if levels < 3: gs.fatal( "MRVBF algorithm requires a greater level of generalization. Reduce number of min_cells or use a larger computational region." ) gs.message("Parameter Settings") gs.message("------------------") gs.message("min_cells = %d will result in %d generalization steps" % (min_cells, levels)) # intermediate outputs Xres_step = list() Yres_step = list() DEM = list() SLOPE = list() F = list() PCTL = list() PVF = list() PVF_RF = list() VF = list() VF_RF = list() MRVBF = list() MRRTF = list() # step 1 at base resolution ------------------------------------------------------- L = 0 TMP_RAST[L] = list() Xres_step.append(current_region.ewres) Yres_step.append(current_region.nsres) DEM.append(r_elevation) radius = 3 step_message(L, Xres_step[L], Yres_step[L], current_region.cells, t_slope) # calculation of slope (S1) and calculation of flatness (F1) (equation 2) SLOPE.append(calc_slope(DEM[L])) F.append(flatness(SLOPE[L], t_slope, p_slope)) # calculation of elevation percentile PCTL for step 1 PCTL.append(elevation_percentile(DEM[L], radius, moving_window_square)) # transform elevation percentile to local lowness for step 1 (equation 3) PVF.append(prelim_flatness_valleys(F[L], PCTL[L], t_pctl_v, p_pctl)) if mrrtf != "": PVF_RF.append(prelim_flatness_ridges(F[L], PCTL[L], t_pctl_r, p_pctl)) # calculation of the valley flatness step 1 VF1 (equation 4) VF.append(valley_flatness(PVF[L], t_vf, p_slope)) MRVBF.append(None) if mrrtf != "": VF_RF.append(valley_flatness(PVF_RF[L], t_rf, p_slope)) MRRTF.append(None) # step 2 at base scale resolution ------------------------------------------------- L = 1 TMP_RAST[L] = list() Xres_step.append(current_region.ewres) Yres_step.append(current_region.nsres) DEM.append(r_elevation) t_slope /= 2.0 radius = 6 step_message(L, Xres_step[L], Yres_step[L], current_region.cells, t_slope) # calculation of flatness for step 2 (equation 5) SLOPE.append(SLOPE[L - 1]) F.append(flatness(SLOPE[L], t_slope, p_slope)) # calculation of elevation percentile PCTL for step 2 (radius of 6 cells) PCTL.append(elevation_percentile(r_elevation, radius, moving_window_square)) # PVF for step 2 (equation 6) PVF.append(prelim_flatness_valleys(F[L], PCTL[L], t_pctl_v, p_pctl)) if mrrtf != "": PVF_RF.append(prelim_flatness_ridges(F[L], PCTL[L], t_pctl_r, p_pctl)) # calculation of the valley flatness VF for step 2 (equation 7) VF.append(valley_flatness(PVF[L], t_vf, p_slope)) if mrrtf != "": VF_RF.append(valley_flatness(PVF_RF[L], t_rf, p_slope)) # calculation of MRVBF for step 2 MRVBF.append(calc_mrvbf(VF1=VF[L - 1], VF2=VF[L], t=t_pctl_v)) if mrrtf != "": MRRTF.append(calc_mrvbf(VF1=VF_RF[L - 1], VF2=VF_RF[L], t=t_pctl_r)) # update flatness for step 2 with combined flatness from F1 and F2 (equation 10) F[L] = combined_flatness(F[L - 1], F[L]) # remaining steps ----------------------------------------------------------------- # for steps >= 2, each step uses the smoothing radius of the current step # but at the dem resolution of the previous step remaining_cells = current_region.cells while remaining_cells >= min_cells: L += 1 TMP_RAST[L] = list() t_slope /= 2.0 Xres_step.append(Xres_step[L - 1] * 3) Yres_step.append(Yres_step[L - 1] * 3) radius = 6 # delete temporary maps from L-2 for tmap in TMP_RAST[L - 2]: if len(gs.find_file(tmap)["fullname"]) > 0: g.remove(type="raster", name=tmap, flags="f", quiet=True) # coarsen resolution to resolution of previous step (step L-1) and smooth DEM if L > 2: g.region(ewres=Xres_step[L - 1], nsres=Yres_step[L - 1]) step_message(L, Xres_step[L], Yres_step[L], remaining_cells, t_slope) DEM.append(smooth_dem(DEM[L - 1])) # calculate slope at coarser resolution SLOPE.append(calc_slope(DEM[L])) # refine slope back to base resolution if L > 2: SLOPE[L] = refine(SLOPE[L], current_region, method="bilinear") # coarsen resolution to current step L and calculate PCTL g.region(ewres=Xres_step[L], nsres=Yres_step[L]) remaining_cells = Region().cells DEM[L] = refine(DEM[L], Region(), method="average") PCTL.append(elevation_percentile(DEM[L], radius, moving_window_square)) # refine PCTL to base resolution PCTL[L] = refine(PCTL[L], current_region, method="bilinear") # calculate flatness F at the base resolution F.append(flatness(SLOPE[L], t_slope, p_slope)) # update flatness with combined flatness CF from the previous step F[L] = combined_flatness(F1=F[L - 1], F2=F[L]) # calculate preliminary valley flatness index PVF at the base resolution PVF.append(prelim_flatness_valleys(F[L], PCTL[L], t_pctl_v, p_pctl)) if mrrtf != "": PVF_RF.append( prelim_flatness_ridges(F[L], PCTL[L], t_pctl_r, p_pctl)) # calculate valley flatness index VF VF.append(valley_flatness(PVF[L], t_vf, p_slope)) if mrrtf != "": VF_RF.append(valley_flatness(PVF_RF[L], t_rf, p_slope)) # calculation of MRVBF MRVBF.append(calc_mrvbf(VF1=MRVBF[L - 1], VF2=VF[L], t=t_pctl_v)) if mrrtf != "": MRRTF.append(calc_mrvbf(VF1=MRRTF[L - 1], VF2=VF_RF[L], t=t_pctl_r)) # output final MRVBF -------------------------------------------------------------- current_region.write() gs.mapcalc("$x = $y", x=mrvbf, y=MRVBF[L]) if mrrtf != "": gs.mapcalc("$x = $y", x=mrrtf, y=MRRTF[L])