def cleanup(): if temporary_vect: if gscript.find_file(temporary_vect, element='vector')['name']: gscript.run_command('g.remove', flags='f', type_='vector', name=temporary_vect, quiet=True) if gscript.db_table_exist(temporary_vect): gscript.run_command('db.execute', sql='DROP TABLE %s' % temporary_vect, quiet=True)
def cleanup(): if temporary_vect: if gscript.find_file(temporary_vect, element="vector")["name"]: gscript.run_command("g.remove", flags="f", type_="vector", name=temporary_vect, quiet=True) if gscript.db_table_exist(temporary_vect): gscript.run_command("db.execute", sql="DROP TABLE %s" % temporary_vect, quiet=True)
def main(): table = options['table'] force = flags['f'] if not options['driver'] or not options['database']: # check if DB parameters are set, and if not set them. grass.run_command('db.connect', flags='c', quiet=True) kv = grass.db_connection() if options['database']: database = options['database'] else: database = kv['database'] if options['driver']: driver = options['driver'] else: driver = kv['driver'] # schema needed for PG? if force: grass.message(_("Forcing ...")) # check if table exists if not grass.db_table_exist(table): grass.warning( _("Table <%s> not found in database <%s>") % (table, database)) sys.exit(0) # check if table is used somewhere (connected to vector map) used = grass.db.db_table_in_vector(table) if used: grass.warning( _("Deleting table <%s> which is attached to following map(s):") % table) for vect in used: grass.warning("%s" % vect) if not force: grass.message(_("The table <%s> would be deleted.") % table) grass.message("") grass.message( _("You must use the force flag to actually remove it. Exiting.")) sys.exit(0) p = grass.feed_command('db.execute', input='-', database=database, driver=driver) p.stdin.write(encode("DROP TABLE " + table)) p.stdin.close() p.wait() if p.returncode != 0: grass.fatal(_("Cannot continue (problem deleting table)."))
def main(): table = options['table'] force = flags['f'] if not options['driver'] or not options['database']: # check if DB parameters are set, and if not set them. grass.run_command('db.connect', flags='c') kv = grass.db_connection() if options['database']: database = options['database'] else: database = kv['database'] if options['driver']: driver = options['driver'] else: driver = kv['driver'] # schema needed for PG? if force: grass.message(_("Forcing ...")) # check if table exists if not grass.db_table_exist(table): grass.fatal(_("Table <%s> not found in database <%s>") % (table, database)) # check if table is used somewhere (connected to vector map) used = grass.db.db_table_in_vector(table) if used: grass.warning(_("Deleting table <%s> which is attached to following map(s):") % table) for vect in used: grass.warning("%s" % vect) if not force: grass.message(_("The table <%s> would be deleted.") % table) grass.message("") grass.message(_("You must use the force flag to actually remove it. Exiting.")) sys.exit(0) p = grass.feed_command('db.execute', input='-', database=database, driver=driver) p.stdin.write("DROP TABLE " + table) p.stdin.close() p.wait() if p.returncode != 0: grass.fatal(_("Cannot continue (problem deleting table)."))
def main(): table = options['table'] force = flags['f'] # check if DB parameters are set, and if not set them. grass.run_command('db.connect', flags = 'c') kv = grass.db_connection() database = kv['database'] driver = kv['driver'] # schema needed for PG? if force: grass.message(_("Forcing ...")) # check if table exists nuldev = file(os.devnull, 'w') if not grass.db_table_exist(table, stdout = nuldev, stderr = nuldev): grass.fatal(_("Table <%s> not found in current mapset") % table) # check if table is used somewhere (connected to vector map) used = [] vects = grass.list_strings('vect') for vect in vects: for f in grass.vector_db(vect, stderr = nuldev).itervalues(): if not f: continue if f['table'] == table: used.append(vect) break if used: grass.warning(_("Deleting table <%s> which is attached to following map(s):") % table) for vect in used: grass.message(vect) if not force: grass.message(_("The table <%s> would be deleted.") % table) grass.message("") grass.message(_("You must use the force flag to actually remove it. Exiting.")) sys.exit(0) p = grass.feed_command('db.execute', input = '-', database = database, driver = driver) p.stdin.write("DROP TABLE " + table) p.stdin.close() p.wait() if p.returncode != 0: grass.fatal(_("Cannot continue (problem deleting table)."))
def cleanup(): if temporary_vect: if gscript.find_file(temporary_vect, element='vector')['name']: gscript.run_command('g.remove', flags='f', type_='vector', name=temporary_vect, quiet=True) if gscript.db_table_exist(temporary_vect): gscript.run_command('db.execute', sql='DROP TABLE %s' % temporary_vect, quiet=True) if insert_sql: os.remove(insert_sql) if stats_temp_file: os.remove(stats_temp_file) if rasters: for tempfile in glob.glob(stats_temp_file + ".*"): os.remove(tempfile)
def cleanup(): if temporary_vect: if gscript.find_file(temporary_vect, element="vector")["name"]: gscript.run_command( "g.remove", flags="f", type_="vector", name=temporary_vect, quiet=True ) if gscript.db_table_exist(temporary_vect): gscript.run_command( "db.execute", sql="DROP TABLE %s" % temporary_vect, quiet=True ) if insert_sql: os.remove(insert_sql) if stats_temp_file: os.remove(stats_temp_file) if rasters: for tempfile in glob.glob(stats_temp_file + ".*"): os.remove(tempfile)
def main(): global insert_sql insert_sql = None global temporary_vect temporary_vect = None global stats_temp_file stats_temp_file = None global content content = None global raster raster = options['raster'] global decimals decimals = int(options['decimals']) global zone_map zone_map = options['zone_map'] csvfile = options['csvfile'] if options['csvfile'] else [] separator = gscript.separator(options['separator']) prefix = options['prefix'] if options['prefix'] else [] classes_list = options['classes_list'].split( ',') if options['classes_list'] else [] vectormap = options['vectormap'] if options['vectormap'] else [] prop = False if 'proportion' not in options['statistics'].split( ',') else True mode = False if 'mode' not in options['statistics'].split(',') else True # Check if input layer is CELL if gscript.parse_command('r.info', flags='g', map=raster)['datatype'] != 'CELL': gscript.fatal( _("The type of the input map 'raster' is not CELL. Please use raster with integer values" )) if gscript.parse_command('r.info', flags='g', map=zone_map)['datatype'] != 'CELL': gscript.fatal( _("The type of the input map 'zone_map' is not CELL. Please use raster with integer values" )) # Check if 'decimals' is + and with credible value if decimals <= 0: gscript.fatal(_("The number of decimals should be positive")) if decimals > 100: gscript.fatal(_("The number of decimals should not be more than 100")) # Adjust region to input map is flag active if flags['r']: gscript.use_temp_region() gscript.run_command('g.region', raster=zone_map) # R.STATS tmpfile = gscript.tempfile() try: if flags['n']: gscript.run_command( 'r.stats', overwrite=True, flags='c', input='%s,%s' % (zone_map, raster), output=tmpfile, separator=separator) # Consider null values in R.STATS else: gscript.run_command( 'r.stats', overwrite=True, flags='cn', input='%s,%s' % (zone_map, raster), output=tmpfile, separator=separator) # Do not consider null values in R.STATS gscript.message(_("r.stats command finished...")) except: gscript.fatal(_("The execution of r.stats failed")) # COMPUTE STATISTICS # Open csv file and create a csv reader rstatsfile = open(tmpfile, 'r') reader = csv.reader(rstatsfile, delimiter=separator) # Total pixels per category per zone totals_dict = {} for row in reader: if row[0] not in totals_dict: # Will pass the condition only if the current zone ID does not exists in the dictionary totals_dict[row[0]] = { } # Declare a new embedded dictionnary for the current zone ID totals_dict[row[0]][row[1]] = int(row[2]) # Delete key '*' in 'totals_dict' that could append if there are null values on the zone raster if '*' in totals_dict: del totals_dict['*'] # Close file rstatsfile.close() # Mode if mode: modalclass_dict = {} for ID in totals_dict: # The trick was found here : https://stackoverflow.com/a/268285/8013239 mode = max(iter(totals_dict[ID].items()), key=operator.itemgetter(1))[0] if mode == '*': # If the mode is NULL values modalclass_dict[ID] = 'NULL' else: modalclass_dict[ID] = mode # Classes proportions if prop: # Get list of categories to output if classes_list: #If list of classes provided by user class_dict = {str(a): '' for a in classes_list } #To be sure it's string format else: class_dict = {} # Proportion of each category per zone proportion_dict = {} for ID in totals_dict: proportion_dict[ID] = {} for cl in totals_dict[ID]: if flags['p']: proportion_dict[ID][cl] = round( float(totals_dict[ID][cl]) / sum(totals_dict[ID].values()) * 100, decimals) else: proportion_dict[ID][cl] = round( float(totals_dict[ID][cl]) / sum(totals_dict[ID].values()), decimals) if cl == '*': class_dict['NULL'] = '' else: class_dict[cl] = '' # Fill class not met in the raster with zero for ID in proportion_dict: for cl in class_dict: if cl not in proportion_dict[ID].keys(): proportion_dict[ID][cl] = '{:.{}f}'.format(0, decimals) # Get list of class sorted by value (arithmetic) if 'NULL' in class_dict.keys(): class_list = [int(k) for k in class_dict.keys() if k != 'NULL'] class_list.sort() class_list.append('NULL') else: class_list = [int(k) for k in class_dict.keys()] class_list.sort() gscript.verbose(_("Statistics computed...")) # OUTPUT CONTENT # Header header = [ 'cat', ] if mode: if prefix: header.append('%s_mode' % prefix) else: header.append('mode') if prop: if prefix: [header.append('%s_prop_%s' % (prefix, cl)) for cl in class_list] else: [header.append('prop_%s' % cl) for cl in class_list] # Values value_dict = {} for ID in totals_dict: value_dict[ID] = [] if mode: value_dict[ID].append(modalclass_dict[ID]) if prop: for cl in class_list: value_dict[ID].append(proportion_dict[ID]['%s' % cl]) # WRITE OUTPUT if csvfile: outfile = open(csvfile, 'w') writer = csv.writer(outfile, delimiter=separator) writer.writerow(header) csvcontent_dict = copy.deepcopy(value_dict) [csvcontent_dict[ID].insert(0, ID) for ID in csvcontent_dict] [csvcontent_dict[ID] for ID in csvcontent_dict] writer.writerows(csvcontent_dict.values()) outfile.close() if vectormap: gscript.message(_("Creating output vector map...")) temporary_vect = 'rzonalclasses_tmp_vect_%d' % os.getpid() gscript.run_command('r.to.vect', input_=zone_map, output=temporary_vect, type_='area', flags='vt', overwrite=True, quiet=True) insert_sql = gscript.tempfile() fsql = open(insert_sql, 'w') fsql.write('BEGIN TRANSACTION;\n') if gscript.db_table_exist(temporary_vect): if gscript.overwrite(): fsql.write('DROP TABLE %s;' % temporary_vect) else: gscript.fatal( _("Table %s already exists. Use --o to overwrite") % temporary_vect) create_statement = 'CREATE TABLE ' + temporary_vect + ' (cat int PRIMARY KEY);\n' fsql.write(create_statement) for col in header[1:]: if col.split('_')[-1] == 'mode': # Mode column should be integer addcol_statement = 'ALTER TABLE %s ADD COLUMN %s integer;\n' % ( temporary_vect, col) else: # Proportions column should be double precision addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % ( temporary_vect, col) fsql.write(addcol_statement) for key in value_dict: insert_statement = 'INSERT INTO %s VALUES (%s, %s);\n' % ( temporary_vect, key, ','.join( [str(x) for x in value_dict[key]])) fsql.write(insert_statement) fsql.write('END TRANSACTION;') fsql.close() gscript.run_command('db.execute', input=insert_sql, quiet=True) gscript.run_command('v.db.connect', map_=temporary_vect, table=temporary_vect, quiet=True) gscript.run_command('g.copy', vector='%s,%s' % (temporary_vect, vectormap), quiet=True)
def main(): global insert_sql insert_sql = None global temporary_vect temporary_vect = None global stats_temp_file stats_temp_file = None segment_map = options['map'] csvfile = options['csvfile'] if options['csvfile'] else [] vectormap = options['vectormap'] if options['vectormap'] else [] global rasters rasters = options['rasters'].split(',') if options['rasters'] else [] area_measures = options['area_measures'].split(',') if ( options['area_measures'] and not flags['s']) else [] if area_measures: if not gscript.find_program('r.object.geometry', '--help'): message = _( "You need to install the addon r.object.geometry to be able") message += _(" to calculate area measures.\n") message += _( " You can install the addon with 'g.extension r.object.geometry'" ) gscript.fatal(message) neighborhood = True if flags['n'] else False if neighborhood: if not gscript.find_program('r.neighborhoodmatrix', '--help'): message = _( "You need to install the addon r.neighborhoodmatrix to be able" ) message += _(" to calculate area measures.\n") message += _( " You can install the addon with 'g.extension r.neighborhoodmatrix'" ) gscript.fatal(message) raster_statistics = options['raster_statistics'].split( ',') if options['raster_statistics'] else [] separator = gscript.separator(options['separator']) processes = int(options['processes']) output_header = ['cat'] output_dict = collections.defaultdict(list) raster_stat_dict = { 'zone': 0, 'min': 4, 'third_quart': 16, 'max': 5, 'sum': 12, 'null_cells': 3, 'median': 15, 'label': 1, 'first_quart': 14, 'range': 6, 'mean_of_abs': 8, 'stddev': 9, 'non_null_cells': 2, 'coeff_var': 11, 'variance': 10, 'sum_abs': 13, 'perc_90': 17, 'mean': 7 } geometry_stat_dict = { 'cat': 0, 'area': 1, 'perimeter': 2, 'compact_square': 3, 'compact_circle': 4, 'fd': 5, 'xcoords': 6, 'ycoords': 7 } if flags['r']: gscript.use_temp_region() gscript.run_command('g.region', raster=segment_map) stats_temp_file = gscript.tempfile() if area_measures: gscript.message(_("Calculating geometry statistics...")) output_header += area_measures stat_indices = [geometry_stat_dict[x] for x in area_measures] gscript.run_command('r.object.geometry', input_=segment_map, output=stats_temp_file, overwrite=True, quiet=True) firstline = True with open(stats_temp_file, 'r') as fin: for line in fin: if firstline: firstline = False continue values = line.rstrip().split('|') output_dict[values[0]] = [values[x] for x in stat_indices] if rasters: if not flags['c']: gscript.message(_("Checking usability of raster maps...")) rasters_to_remove = [] for raster in rasters: null_values_found = False if not gscript.find_file(raster, element='cell')['name']: gscript.message(_("Cannot find raster '%s'" % raster)) gscript.message(_("Removing this raster from list.")) rasters_to_remove.append(raster) continue current_mapset = gscript.gisenv()['MAPSET'] if gscript.find_file('MASK', element='cell', mapset=current_mapset)['name']: null_test = gscript.read_command('r.stats', flags='N', input_=['MASK', raster], quiet=True).splitlines() if '1 *' in null_test: null_values_found = True else: raster_info = gscript.parse_command('r.univar', flags='g', map_=raster, quiet=True) if len(raster_info) == 0 or int( raster_info['null_cells']) > 0: null_values_found = True if null_values_found: message = 'Raster <%s> contains null values.\n' % raster message += 'This can lead to errors in the calculations.\n' message += 'Check region settings and raster extent.\n' message += 'Possibly fill null values of raster.\n' message += 'Removing this raster from list.' gscript.warning(message) rasters_to_remove.append(raster) for raster in rasters_to_remove: rasters.remove(raster) if len(rasters) > 0: gscript.message( _("Calculating statistics for the following raster maps:")) gscript.message(','.join(rasters)) if len(rasters) < processes: processes = len(rasters) gscript.message( _("Only one process per raster. Reduced number of processes to %i." % processes)) stat_indices = [raster_stat_dict[x] for x in raster_statistics] pool = Pool(processes) func = partial(worker, segment_map, stats_temp_file) pool.map(func, rasters) pool.close() pool.join() for raster in rasters: rastername = raster.split('@')[0] rastername = rastername.replace('.', '_') temp_file = stats_temp_file + '.' + rastername output_header += [ rastername + "_" + x for x in raster_statistics ] firstline = True with open(temp_file, 'r') as fin: for line in fin: if firstline: firstline = False continue values = line.rstrip().split('|') output_dict[values[0]] = output_dict[values[0]] + [ values[x] for x in stat_indices ] # Calculating neighborhood statistics if requested if neighborhood: gscript.message(_("Calculating neighborhood statistics...")) # Add neighbordhood statistics to headers original_nb_values = len(output_header) - 1 new_headers = ['neighbors_count'] for i in range(1, len(output_header)): new_headers.append('%s_nbrmean' % output_header[i]) new_headers.append('%s_nbrstddev' % output_header[i]) output_header += new_headers # Get sorted neighborhood matrix nbr_matrix = sorted([ x.split('|') for x in gscript.read_command('r.neighborhoodmatrix', input_=segment_map, flags='d', quiet=True).splitlines() ]) # Calculate mean and stddev of neighbor values for each variable in the # output_dict for key, group in groupby(nbr_matrix, lambda x: x[0]): d = {} for i in range(original_nb_values): d[i] = (0, 0, 0) nbrlist = [str(x[1]) for x in group] if len(nbrlist) > 1: for nbr in nbrlist: for i in range(original_nb_values): d[i] = update(d[i], float(output_dict[nbr][i])) output_dict[key] = output_dict[key] + [str(len(nbrlist))] output_dict[key] = output_dict[key] + [ str(i) for sub in [finalize(x) for x in d.values()] for i in sub ] else: newvalues = ['1'] nbr = nbrlist[0] for i in range(original_nb_values): newvalues.append(output_dict[nbr][i]) newvalues.append('0') output_dict[key] = output_dict[key] + newvalues message = _("Some values could not be calculated for the objects below. ") message += _("These objects are thus not included in the results. ") message += _("HINT: Check some of the raster maps for null values ") message += _("and possibly fill these values with r.fillnulls.") error_objects = [] if csvfile: with open(csvfile, 'w') as f: f.write(separator.join(output_header) + "\n") for key in output_dict: if len(output_dict[key]) + 1 == len(output_header): f.write(key + separator + separator.join(output_dict[key]) + "\n") else: error_objects.append(key) f.close() if vectormap: gscript.message(_("Creating output vector map...")) temporary_vect = 'segmstat_tmp_vect_%d' % os.getpid() gscript.run_command('r.to.vect', input_=segment_map, output=temporary_vect, type_='area', flags='vt', overwrite=True, quiet=True) insert_sql = gscript.tempfile() fsql = open(insert_sql, 'w') fsql.write('BEGIN TRANSACTION;\n') if gscript.db_table_exist(temporary_vect): if gscript.overwrite(): fsql.write('DROP TABLE %s;' % temporary_vect) else: gscript.fatal( _("Table %s already exists. Use --o to overwrite" % temporary_vect)) create_statement = 'CREATE TABLE ' + temporary_vect + ' (cat int PRIMARY KEY);\n' fsql.write(create_statement) for header in output_header[1:]: addcol_statement = 'ALTER TABLE %s ADD COLUMN %s double precision;\n' % ( temporary_vect, header) fsql.write(addcol_statement) for key in output_dict: if len(output_dict[key]) + 1 == len(output_header): sql = "INSERT INTO %s VALUES (%s, %s);\n" % ( temporary_vect, key, ",".join(output_dict[key])) sql = sql.replace('inf', 'NULL') sql = sql.replace('nan', 'NULL') fsql.write(sql) else: if not csvfile: error_objects.append(key) fsql.write('END TRANSACTION;') fsql.close() gscript.run_command('db.execute', input=insert_sql, quiet=True) gscript.run_command('v.db.connect', map_=temporary_vect, table=temporary_vect, quiet=True) gscript.run_command('g.copy', vector="%s,%s" % (temporary_vect, vectormap), quiet=True) if error_objects: object_string = ', '.join(error_objects[:100]) message += _( "\n\nObjects with errors (only first 100 are shown):\n%s" % object_string) gscript.message(message)
def main(): global insert_sql insert_sql = None global temporary_vect temporary_vect = None global stats_temp_file stats_temp_file = None segment_map = options["map"] csvfile = options["csvfile"] if options["csvfile"] else [] vectormap = options["vectormap"] if options["vectormap"] else [] global rasters rasters = options["rasters"].split(",") if options["rasters"] else [] area_measures = ( options["area_measures"].split(",") if (options["area_measures"] and not flags["s"]) else [] ) neighborhood = True if flags["n"] else False if neighborhood: if not gscript.find_program("r.neighborhoodmatrix", "--help"): message = _("You need to install the addon r.neighborhoodmatrix to be able") message += _(" to calculate area measures.\n") message += _( " You can install the addon with 'g.extension r.neighborhoodmatrix'" ) gscript.fatal(message) raster_statistics = ( options["raster_statistics"].split(",") if options["raster_statistics"] else [] ) separator = gscript.separator(options["separator"]) processes = int(options["processes"]) output_header = ["cat"] output_dict = collections.defaultdict(list) raster_stat_dict = { "zone": 0, "min": 4, "third_quart": 16, "max": 5, "sum": 12, "null_cells": 3, "median": 15, "label": 1, "first_quart": 14, "range": 6, "mean_of_abs": 8, "stddev": 9, "non_null_cells": 2, "coeff_var": 11, "variance": 10, "sum_abs": 13, "perc_90": 17, "mean": 7, } geometry_stat_dict = { "cat": 0, "area": 1, "perimeter": 2, "compact_square": 3, "compact_circle": 4, "fd": 5, "xcoords": 6, "ycoords": 7, } if flags["r"]: gscript.use_temp_region() gscript.run_command("g.region", raster=segment_map) stats_temp_file = gscript.tempfile() if area_measures: gscript.message(_("Calculating geometry statistics...")) output_header += area_measures stat_indices = [geometry_stat_dict[x] for x in area_measures] gscript.run_command( "r.object.geometry", input_=segment_map, output=stats_temp_file, overwrite=True, quiet=True, ) firstline = True with open(stats_temp_file, "r") as fin: for line in fin: if firstline: firstline = False continue values = line.rstrip().split("|") output_dict[values[0]] = [values[x] for x in stat_indices] if rasters: if not flags["c"]: gscript.message(_("Checking usability of raster maps...")) rasters_to_remove = [] for raster in rasters: null_values_found = False if not gscript.find_file(raster, element="cell")["name"]: gscript.message(_("Cannot find raster '%s'" % raster)) gscript.message(_("Removing this raster from list.")) rasters_to_remove.append(raster) continue current_mapset = gscript.gisenv()["MAPSET"] if gscript.find_file("MASK", element="cell", mapset=current_mapset)[ "name" ]: null_test = gscript.read_command( "r.stats", flags="N", input_=["MASK", raster], quiet=True ).splitlines() if "1 *" in null_test: null_values_found = True else: raster_info = gscript.parse_command( "r.univar", flags="g", map_=raster, quiet=True ) if len(raster_info) == 0 or int(raster_info["null_cells"]) > 0: null_values_found = True if null_values_found: message = "Raster <%s> contains null values.\n" % raster message += "This can lead to errors in the calculations.\n" message += "Check region settings and raster extent.\n" message += "Possibly fill null values of raster.\n" message += "Removing this raster from list." gscript.warning(message) rasters_to_remove.append(raster) for raster in rasters_to_remove: rasters.remove(raster) if len(rasters) > 0: gscript.message(_("Calculating statistics for the following raster maps:")) gscript.message(",".join(rasters)) if len(rasters) < processes: processes = len(rasters) gscript.message( _( "Only one process per raster. Reduced number of processes to %i." % processes ) ) stat_indices = [raster_stat_dict[x] for x in raster_statistics] pool = Pool(processes) func = partial(worker, segment_map, stats_temp_file) pool.map(func, rasters) pool.close() pool.join() for raster in rasters: rastername = raster.split("@")[0] rastername = rastername.replace(".", "_") temp_file = stats_temp_file + "." + rastername output_header += [rastername + "_" + x for x in raster_statistics] firstline = True with open(temp_file, "r") as fin: for line in fin: if firstline: firstline = False continue values = line.rstrip().split("|") output_dict[values[0]] = output_dict[values[0]] + [ values[x] for x in stat_indices ] # Calculating neighborhood statistics if requested if neighborhood: gscript.message(_("Calculating neighborhood statistics...")) # Add neighbordhood statistics to headers original_nb_values = len(output_header) - 1 new_headers = ["neighbors_count"] for i in range(1, len(output_header)): new_headers.append("%s_nbrmean" % output_header[i]) new_headers.append("%s_nbrstddev" % output_header[i]) output_header += new_headers # Get sorted neighborhood matrix nbr_matrix = sorted( [ x.split("|") for x in gscript.read_command( "r.neighborhoodmatrix", input_=segment_map, flags="d", quiet=True ).splitlines() ] ) # Calculate mean and stddev of neighbor values for each variable in the # output_dict for key, group in groupby(nbr_matrix, lambda x: x[0]): d = {} for i in range(original_nb_values): d[i] = (0, 0, 0) nbrlist = [str(x[1]) for x in group] if len(nbrlist) > 1: for nbr in nbrlist: for i in range(original_nb_values): d[i] = update(d[i], float(output_dict[nbr][i])) output_dict[key] = output_dict[key] + [str(len(nbrlist))] output_dict[key] = output_dict[key] + [ str(i) for sub in [finalize(x) for x in d.values()] for i in sub ] else: newvalues = ["1"] nbr = nbrlist[0] for i in range(original_nb_values): newvalues.append(output_dict[nbr][i]) newvalues.append("0") output_dict[key] = output_dict[key] + newvalues message = _("Some values could not be calculated for the objects below. ") message += _("These objects are thus not included in the results. ") message += _("HINT: Check some of the raster maps for null values ") message += _("and possibly fill these values with r.fillnulls.") error_objects = [] if csvfile: with open(csvfile, "w") as f: f.write(separator.join(output_header) + "\n") for key in output_dict: if len(output_dict[key]) + 1 == len(output_header): f.write(key + separator + separator.join(output_dict[key]) + "\n") else: error_objects.append(key) f.close() if vectormap: gscript.message(_("Creating output vector map...")) temporary_vect = "segmstat_tmp_vect_%d" % os.getpid() gscript.run_command( "r.to.vect", input_=segment_map, output=temporary_vect, type_="area", flags="vt", overwrite=True, quiet=True, ) insert_sql = gscript.tempfile() fsql = open(insert_sql, "w") fsql.write("BEGIN TRANSACTION;\n") if gscript.db_table_exist(temporary_vect): if gscript.overwrite(): fsql.write("DROP TABLE %s;" % temporary_vect) else: gscript.fatal( _("Table %s already exists. Use --o to overwrite" % temporary_vect) ) create_statement = ( "CREATE TABLE " + temporary_vect + " (cat int PRIMARY KEY);\n" ) fsql.write(create_statement) for header in output_header[1:]: addcol_statement = "ALTER TABLE %s ADD COLUMN %s double precision;\n" % ( temporary_vect, header, ) fsql.write(addcol_statement) for key in output_dict: if len(output_dict[key]) + 1 == len(output_header): sql = "INSERT INTO %s VALUES (%s, %s);\n" % ( temporary_vect, key, ",".join(output_dict[key]), ) sql = sql.replace("inf", "NULL") sql = sql.replace("nan", "NULL") fsql.write(sql) else: if not csvfile: error_objects.append(key) fsql.write("END TRANSACTION;") fsql.close() gscript.run_command("db.execute", input=insert_sql, quiet=True) gscript.run_command( "v.db.connect", map_=temporary_vect, table=temporary_vect, quiet=True ) gscript.run_command( "g.copy", vector="%s,%s" % (temporary_vect, vectormap), quiet=True ) if error_objects: object_string = ", ".join(error_objects[:100]) message += _( "\n\nObjects with errors (only first 100 are shown):\n%s" % object_string ) gscript.message(message)
def main(): global insert_sql insert_sql = None global temporary_vect temporary_vect = None global stats_temp_file stats_temp_file = None global content content = None global raster raster = options["raster"] global decimals decimals = int(options["decimals"]) global zone_map zone_map = options["zone_map"] csvfile = options["csvfile"] if options["csvfile"] else [] separator = gscript.separator(options["separator"]) prefix = options["prefix"] if options["prefix"] else [] classes_list = options["classes_list"].split( ",") if options["classes_list"] else [] vectormap = options["vectormap"] if options["vectormap"] else [] prop = False if "proportion" not in options["statistics"].split( ",") else True mode = False if "mode" not in options["statistics"].split(",") else True if flags[ "c"]: # Check only if flag activated - Can be bottleneck in case of very large raster. # Check if input layer is CELL if gscript.parse_command("r.info", flags="g", map=raster)["datatype"] != "CELL": gscript.fatal( _("The type of the input map 'raster' is not CELL. Please use raster with integer values" )) if (gscript.parse_command("r.info", flags="g", map=zone_map)["datatype"] != "CELL"): gscript.fatal( _("The type of the input map 'zone_map' is not CELL. Please use raster with integer values" )) # Check if 'decimals' is + and with credible value if decimals <= 0: gscript.fatal(_("The number of decimals should be positive")) if decimals > 100: gscript.fatal(_("The number of decimals should not be more than 100")) # Adjust region to input map is flag active if flags["r"]: gscript.use_temp_region() gscript.run_command("g.region", raster=zone_map) # R.STATS tmpfile = gscript.tempfile() try: if flags["n"]: gscript.run_command( "r.stats", overwrite=True, flags="c", input="%s,%s" % (zone_map, raster), output=tmpfile, separator=separator, ) # Consider null values in R.STATS else: gscript.run_command( "r.stats", overwrite=True, flags="cn", input="%s,%s" % (zone_map, raster), output=tmpfile, separator=separator, ) # Do not consider null values in R.STATS gscript.message(_("r.stats command finished...")) except: gscript.fatal(_("The execution of r.stats failed")) # COMPUTE STATISTICS # Open csv file and create a csv reader rstatsfile = open(tmpfile, "r") reader = csv.reader(rstatsfile, delimiter=separator) # Total pixels per category per zone totals_dict = {} for row in reader: if ( row[0] not in totals_dict ): # Will pass the condition only if the current zone ID does not exists yet in the dictionary totals_dict[row[0]] = { } # Declare a new embedded dictionnary for the current zone ID if ( flags["l"] and row[1] in classes_list ): # Will pass only if flag -l is active and if the current class is in the 'classes_list' totals_dict[row[0]][row[1]] = int(row[2]) else: totals_dict[row[0]][row[1]] = int(row[2]) # Delete key '*' in 'totals_dict' that could append if there are null values on the zone raster if "*" in totals_dict: del totals_dict["*"] # Close file rstatsfile.close() # Get list of ID id_list = [ID for ID in totals_dict] # Mode if mode: modalclass_dict = {} for ID in id_list: # The trick was found here : https://stackoverflow.com/a/268285/8013239 mode = max(iter(totals_dict[ID].items()), key=operator.itemgetter(1))[0] if mode == "*": # If the mode is NULL values modalclass_dict[ID] = "NULL" else: modalclass_dict[ID] = mode # Class proportions if prop: # Get list of categories to output if classes_list: # If list of classes provided by user class_dict = {str(int(a)): "" for a in classes_list } # To be sure it's string format else: class_dict = {} # Proportion of each category per zone proportion_dict = {} for ID in id_list: proportion_dict[ID] = {} for cl in totals_dict[ID]: if ( flags["l"] and cl not in classes_list ): # with flag -l, output will contain only classes from 'classes_list' continue if flags["p"]: prop_value = (float(totals_dict[ID][cl]) / sum(totals_dict[ID].values()) * 100) else: prop_value = float(totals_dict[ID][cl]) / sum( totals_dict[ID].values()) proportion_dict[ID][cl] = "{:.{}f}".format( prop_value, decimals) if cl == "*": class_dict["NULL"] = "" else: class_dict[cl] = "" # Fill class not met in the raster with zero for ID in proportion_dict: for cl in class_dict: if cl not in proportion_dict[ID].keys(): proportion_dict[ID][cl] = "{:.{}f}".format(0, decimals) # Get list of class sorted by value (arithmetic ordering) if "NULL" in class_dict.keys(): class_list = sorted( [int(k) for k in class_dict.keys() if k != "NULL"]) class_list.append("NULL") else: class_list = sorted([int(k) for k in class_dict.keys()]) gscript.verbose(_("Statistics computed...")) # Set 'totals_dict' to None to try RAM release totals_dict = None # OUTPUT CONTENT # Header header = [ "cat", ] if mode: if prefix: header.append("%s_mode" % prefix) else: header.append("mode") if prop: if prefix: [header.append("%s_prop_%s" % (prefix, cl)) for cl in class_list] else: [header.append("prop_%s" % cl) for cl in class_list] # Values value_dict = {} for ID in id_list: value_dict[ID] = [] value_dict[ID].append(ID) if mode: value_dict[ID].append(modalclass_dict[ID]) if prop: for cl in class_list: value_dict[ID].append(proportion_dict[ID]["%s" % cl]) # WRITE OUTPUT if csvfile: with open(csvfile, "w", newline="") as outfile: writer = csv.writer(outfile, delimiter=separator) writer.writerow(header) writer.writerows(value_dict.values()) if vectormap: gscript.message(_("Creating output vector map...")) temporary_vect = "rzonalclasses_tmp_vect_%d" % os.getpid() gscript.run_command( "r.to.vect", input_=zone_map, output=temporary_vect, type_="area", flags="vt", overwrite=True, quiet=True, ) insert_sql = gscript.tempfile() with open(insert_sql, "w", newline="") as fsql: fsql.write("BEGIN TRANSACTION;\n") if gscript.db_table_exist(temporary_vect): if gscript.overwrite(): fsql.write("DROP TABLE %s;" % temporary_vect) else: gscript.fatal( _("Table %s already exists. Use --o to overwrite") % temporary_vect) create_statement = ("CREATE TABLE %s (cat int PRIMARY KEY);\n" % temporary_vect) fsql.write(create_statement) for col in header[1:]: if col.split( "_")[-1] == "mode": # Mode column should be integer addcol_statement = "ALTER TABLE %s ADD COLUMN %s integer;\n" % ( temporary_vect, col, ) else: # Proportions column should be double precision addcol_statement = ( "ALTER TABLE %s ADD COLUMN %s double precision;\n" % (temporary_vect, col)) fsql.write(addcol_statement) for key in value_dict: insert_statement = "INSERT INTO %s VALUES (%s);\n" % ( temporary_vect, ",".join(value_dict[key]), ) fsql.write(insert_statement) fsql.write("END TRANSACTION;") gscript.run_command("db.execute", input=insert_sql, quiet=True) gscript.run_command("v.db.connect", map_=temporary_vect, table=temporary_vect, quiet=True) gscript.run_command("g.copy", vector="%s,%s" % (temporary_vect, vectormap), quiet=True)