def conv_segpoints(seg, output): segments, mset = (seg.split('@') if '@' in seg else (seg, '')) # convert the map with segments in a map with intakes and restitution new_vec = VectorTopo(output) #TODO: check if the vector already exists new_vec.layer = 1 new_vec.open('w', tab_cols=COLS_points) reg = Region() seg = VectorTopo(segments, mapset=mset) seg.layer = 1 seg.open('r') for pla in seg: #import ipdb; ipdb.set_trace() new_vec.write(pla[-1], (2, pla.attrs['plant_id'], 'restitution', pla.attrs['stream_id'], pla.attrs['elev_down'], pla.attrs['discharge'], pla.attrs['pot_power'])) #import ipdb; ipdb.set_trace() new_vec.write(pla[0], (1, pla.attrs['plant_id'], 'intake', pla.attrs['stream_id'], pla.attrs['elev_up'], pla.attrs['discharge'], pla.attrs['pot_power'])) new_vec.table.conn.commit() new_vec.comment = (' '.join(sys.argv)) new_vec.write_header() #pdb.set_trace() new_vec.close() return new_vec
def write_points(plants, output, efficiency, min_power): # create vetor segment new_vec = VectorTopo(output) #TODO: check if the vector already exists new_vec.layer = 1 new_vec.open('w', tab_cols=COLS_points) reg = Region() # import ipdb; ipdb.set_trace() for pla in plants: power = pla.potential_power(efficiency=efficiency) if power > min_power: new_vec.write(pla.line[-1], (pla.restitution.id, pla.id, 'restitution', pla.id_stream, float(pla.restitution.elevation), float(pla.restitution.discharge), power)) for ink in pla.intakes: new_vec.write( pla.line[0], (ink.id, pla.id, 'intake', pla.id_stream, float(ink.elevation), float(ink.discharge), power)) new_vec.table.conn.commit() new_vec.comment = (' '.join(sys.argv)) new_vec.write_header() #pdb.set_trace() new_vec.close()
def setUp(self): """Create input data """ self.runModule("g.region", res=1, n=90, s=0, w=0, e=90) self.runModule("r.mapcalc", expression="map_a = 100 + row() + col()", overwrite=True) self.runModule("r.mapcalc", expression="zone_map = if(row() < 20, 1,2)", overwrite=True) self.runModule("r.mapcalc", expression="row_map = row()", overwrite=True) self.runModule("r.to.vect", input="zone_map", output="zone_map", type="area", overwrite=True) cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR(20)')] vt = VectorTopo('test_line') vt.open('w', tab_cols=cols) line1 = Line([(1, 1), (2, 1), (2, 2)]) line2 = Line([(10, 20), (15, 22), (20, 32), (30, 40)]) vt.write(line1, ('first',)) vt.write(line2, ('second',)) vt.table.conn.commit() vt.close() vt = VectorTopo('test_small_area') vt.open('w', tab_cols=cols) area1 = Boundary(points=[(0, 0), (0, 0.2), (0.2, 0.2), (0.2, 0), (0, 0)]) area2 = Boundary(points=[(2.7, 2.7), (2.7, 2.8), (2.8, 2.8), (2.8, 2.7), (2.7, 2.7)]) cent1 = Centroid(x=0.1, y=0.1) cent2 = Centroid(x=2.75, y=2.75) vt.write(area1) vt.write(area2) vt.write(cent1, ('first',)) vt.write(cent2, ('second',)) vt.table.conn.commit() vt.close()
def rand_vect_points(name, npoints=10, overwrite=True): new = VectorTopo(name) new.open('w', overwrite=overwrite) for pnt in get_random_points(npoints): new.write(pnt) new.close() return new
def new_map(self, mapa, layer, tab_sufix, objs, values, tab_subname=''): """Return """ map_out = VectorTopo(mapa) if objs == [] or objs is None: return None tab_sufix_out = OUT_TABLES_NAMES[tab_sufix] tab_name = self.road_name + tab_sufix_out + tab_subname columns = OUT_TABLES[tab_sufix] if layer == 1: map_out.open('w', layer=layer, with_z=True, tab_name=tab_name, tab_cols=columns) else: map_out.open('rw') link = Link(layer, tab_name, tab_name, 'cat' + str(layer)) map_out.dblinks.add(link) table = link.table() if not table.exist(): table.create(columns) table.conn.commit() map_out.close() map_out.open('rw', layer=layer, with_z=True) for i, obj in enumerate(objs): map_out.write(obj, i + 1, values[i]) map_out.table.conn.commit() map_out.close()
def rand_vect_points(name, npoints=10, overwrite=True): new = VectorTopo(name) new.open('w', overwrite=overwrite) for pnt in get_random_points(npoints): new.write(pnt) new.close() return new
def sample(vect_in_name, rast_in_name): """sample('point00', 'field')""" # instantiate the object maps vect_in = VectorTopo(vect_in_name) rast_in = RasterRow(rast_in_name) vect_out = VectorTopo('test_' + vect_in_name) # define the columns of the attribute table of the new vector map columns = [(u'cat', 'INTEGER PRIMARY KEY'), (rast_in_name, 'DOUBLE')] # open the maps vect_in.open('r') rast_in.open('r') vect_out.open('w', tab_cols=columns, link_driver='sqlite') # get the current region region = Region() # initialize the counter counter = 0 data = [] for pnt in vect_in.viter('points'): counter += 1 # transform the spatial coordinates in row and col value x, y = coor2pixel(pnt.coords(), region) value = rast_in[int(x)][int(y)] data.append((counter, None if np.isnan(value) else float(value))) # write the geometry features vect_out.write(pnt) # write the attributes vect_out.table.insert(data, many=True) vect_out.table.conn.commit() # close the maps vect_in.close() rast_in.close() vect_out.close()
def write_plants(plants, output, efficiency, min_power): # create vetor segment new_vec = VectorTopo(output) #TODO: check if the vector already exists new_vec.layer = 1 new_vec.open('w', tab_cols=COLS) reg = Region() for pla in plants: power = pla.potential_power(efficiency=efficiency) if power > min_power: for cat, ink in enumerate(pla.intakes): if version == 70: new_vec.write( pla.line, (pla.id, pla.id_stream, power, float(pla.restitution.discharge), float( ink.elevation), float(pla.restitution.elevation))) else: new_vec.write(pla.line, cat=cat, attrs=(pla.id, pla.id_stream, power, float(pla.restitution.discharge), float(ink.elevation), float(pla.restitution.elevation))) new_vec.table.conn.commit() new_vec.comment = (' '.join(sys.argv)) #pdb.set_trace() new_vec.close()
def split_maps(self): """ Return """ grass.message("Spliting in points and breaklines maps") topo = VectorTopo(self.name_map) topo.open('r') points = [] lines = [] for algo in range(1, topo.number_of("points") + 1): if isinstance(topo.read(algo), Point): points.append(topo.read(algo)) if isinstance(topo.read(algo), Line): lines.append(topo.read(algo)) topo.close() new1 = VectorTopo(self.ptosmap) new1.open('w', with_z=True) for pnt in points: new1.write(pnt) new1.close() new1 = VectorTopo(self.breakmap) new1.open('w', layer=1, with_z=True) for line in lines: new1.write(line) new1.close()
def get_electro_length(opts): # open vector plant pname = opts['struct'] pname, vmapset = pname.split('@') if '@' in pname else (pname, '') with VectorTopo(pname, mapset=vmapset, layer=int(opts['struct_layer']), mode='r') as vect: kcol = opts['struct_column_kind'] ktype = opts['struct_kind_turbine'] # check if electro_length it is alredy in the table if 'electro_length' not in vect.table.columns: vect.table.columns.add('electro_length', 'double precision') # open vector map with the existing electroline ename = opts['electro'] ename, emapset = ename.split('@') if '@' in ename else (ename, '') ltemp = [] with VectorTopo(ename, mapset=emapset, layer=int(opts['electro_layer']), mode='r') as electro: pid = os.getpid() elines = (opts['elines'] if opts['elines'] else ('tmprgreen_%i_elines' % pid)) for cat, line in enumerate(vect): if line.attrs[kcol] == ktype: # the turbine is the last point of the penstock turbine = line[-1] # find the closest electro line eline = electro.find['by_point'].geo(turbine, maxdist=1e6) dist = eline.distance(turbine) line.attrs['electro_length'] = dist.dist if line.attrs['side'] == 'option1': ltemp.append([ geo.Line([turbine, dist.point]), (line.attrs['plant_id'], line.attrs['side']) ]) else: line.attrs['electro_length'] = 0. vect.table.conn.commit() new = VectorTopo(elines) # new vec with elines new.layer = 1 cols = [ (u'cat', 'INTEGER PRIMARY KEY'), (u'plant_id', 'VARCHAR(10)'), (u'side', 'VARCHAR(10)'), ] new.open('w', tab_cols=cols) reg = Region() for cat, line in enumerate(ltemp): if version == 70: new.write(line[0], line[1]) else: new.write(line[0], cat=cat, attrs=line[1]) new.table.conn.commit() new.comment = (' '.join(sys.argv)) new.close()
def get_electro_length(opts): # open vector plant pname = opts["struct"] pname, vmapset = pname.split("@") if "@" in pname else (pname, "") with VectorTopo(pname, mapset=vmapset, layer=int(opts["struct_layer"]), mode="r") as vect: kcol = opts["struct_column_kind"] ktype = opts["struct_kind_turbine"] # check if electro_length it is alredy in the table if "electro_length" not in vect.table.columns: vect.table.columns.add("electro_length", "double precision") # open vector map with the existing electroline ename = opts["electro"] ename, emapset = ename.split("@") if "@" in ename else (ename, "") ltemp = [] with VectorTopo(ename, mapset=emapset, layer=int(opts["electro_layer"]), mode="r") as electro: pid = os.getpid() elines = opts["elines"] if opts["elines"] else ( "tmprgreen_%i_elines" % pid) for cat, line in enumerate(vect): if line.attrs[kcol] == ktype: # the turbine is the last point of the penstock turbine = line[-1] # find the closest electro line eline = electro.find["by_point"].geo(turbine, maxdist=1e6) dist = eline.distance(turbine) line.attrs["electro_length"] = dist.dist if line.attrs["side"] == "option1": ltemp.append([ geo.Line([turbine, dist.point]), (line.attrs["plant_id"], line.attrs["side"]), ]) else: line.attrs["electro_length"] = 0.0 vect.table.conn.commit() new = VectorTopo(elines) # new vec with elines new.layer = 1 cols = [ (u"cat", "INTEGER PRIMARY KEY"), (u"plant_id", "VARCHAR(10)"), (u"side", "VARCHAR(10)"), ] new.open("w", tab_cols=cols) reg = Region() for cat, line in enumerate(ltemp): if version == 70: new.write(line[0], line[1]) else: new.write(line[0], cat=cat, attrs=line[1]) new.table.conn.commit() new.comment = " ".join(sys.argv) new.close()
def write_objs(allrectas, radio): """R""" new2 = VectorTopo("AACC__" + str(int(radio))) # cols = [(u'cat', 'INTEGER PRIMARY KEY'), # (u'elev', 'INTEGER')] new2.open("w") for obj in allrectas: new2.write(obj) # new2.table.conn.commit() new2.close()
def run(self): logging.debug("Computation started") psc = self.input_psc.getValue() map_name = 'obce_psc_{}'.format(psc) obce = VectorTopo('obce', mapset='psc') obce.open('r') vystup = VectorTopo(map_name) vystup.open('w', tab_cols=[('cat', 'INTEGER PRIMARY KEY'), ('nazev', 'TEXT'), ('psc', 'INTEGER')]) obec_id = None obce_psc = set() for prvek in obce.viter('areas'): if prvek.attrs is None: continue if prvek.attrs['psc'] == psc: if obec_id is None: obec_id = prvek.id for b in prvek.boundaries(): for n in b.read_area_ids(): if n != -1 and n != obec_id: obce_psc.add(n) obce_psc.add(obec_id) hranice = list() cat = 1 for prvek in obce.viter('areas'): if prvek.id not in obce_psc: continue for b in prvek.boundaries(): if b.id not in hranice: hranice.append(b.id) vystup.write(b) vystup.write(prvek.centroid(), cat=cat, attrs=(prvek.attrs['nazev'], prvek.attrs['psc'])) cat += 1 vystup.table.conn.commit() vystup.close() obce.close() logging.debug("Computation finished") return map_name
def conv_segpoints(seg, output): segments, mset = seg.split("@") if "@" in seg else (seg, "") # convert the map with segments in a map with intakes and restitution new_vec = VectorTopo(output) # TODO: check if the vector already exists new_vec.layer = 1 new_vec.open("w", tab_cols=COLS_points) reg = Region() seg = VectorTopo(segments, mapset=mset) seg.layer = 1 seg.open("r") for pla in seg: # import ipdb; ipdb.set_trace() new_vec.write( pla[-1], ( 2, pla.attrs["plant_id"], "restitution", pla.attrs["stream_id"], pla.attrs["elev_down"], pla.attrs["discharge"], pla.attrs["pot_power"], ), ) # import ipdb; ipdb.set_trace() new_vec.write( pla[0], ( 1, pla.attrs["plant_id"], "intake", pla.attrs["stream_id"], pla.attrs["elev_up"], pla.attrs["discharge"], pla.attrs["pot_power"], ), ) new_vec.table.conn.commit() new_vec.comment = " ".join(sys.argv) new_vec.write_header() # pdb.set_trace() new_vec.close() return new_vec
def setUp(self): """Create input data """ self.runModule("g.region", res=1, n=90, s=0, w=0, e=90) self.runModule("r.mapcalc", expression="map_a = 100 + row() + col()", overwrite=True) self.runModule("r.mapcalc", expression="zone_map = if(row() < 20, 1,2)", overwrite=True) self.runModule("r.to.vect", input="zone_map", output="zone_map", type="area", overwrite=True) cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR(20)')] vt = VectorTopo('test_line') vt.open('w', tab_cols=cols) line1 = Line([(1, 1), (2, 1), (2, 2)]) line2 = Line([(10, 20), (15, 22), (20, 32), (30, 40)]) vt.write(line1, ('first',)) vt.write(line2, ('second',)) vt.table.conn.commit() vt.close()
def obce_psc(psc): obce = VectorTopo('obce') obce.open('r') vystup = VectorTopo('obce_psc_{}'.format(psc)) vystup.open('w', tab_cols=[('cat', 'INTEGER PRIMARY KEY'), ('nazev', 'TEXT'), ('psc', 'INTEGER')]) obec_id = None obce_psc = set() for prvek in obce.viter('areas'): if prvek.attrs is None: continue if prvek.attrs['psc'] == psc: if obec_id is None: obec_id = prvek.id for b in prvek.boundaries(): for n in b.read_area_ids(): if n != -1 and n != obec_id: obce_psc.add(n) obce_psc.add(obec_id) hranice = list() for prvek in obce.viter('areas'): if prvek.id not in obce_psc: continue for b in prvek.boundaries(): if b.id not in hranice: hranice.append(b.id) vystup.write(b, attrs=(None, None)) vystup.write(prvek.centroid(), attrs=(prvek.attrs['nazev'], prvek.attrs['psc'])) vystup.table.conn.commit() vystup.close() obce.close()
def curved(self): """Return""" mapset = GrassGis.G_find_vector2(self.nametin, "") if not mapset: sys.exit("Vector map <%s> not found" % self.nametin) # define map structure map_info = GrassGis.pointer(GrassVect.Map_info()) # define open level (level 2: topology) GrassVect.Vect_set_open_level(2) # open existing vector map GrassVect.Vect_open_old(map_info, self.nametin, mapset) print("Calculating curves") allrectas = [] rectas = self.get_rectas(map_info) for nivel in rectas: for recta in nivel: allrectas.append(recta) GrassVect.Vect_close(map_info) new = VectorTopo(self.namelines) new.open("w", with_z=True) for line in allrectas: new.write(Line(line)) new.close() grass.run_command( "v.build.polylines", input=self.namelines, output=self.namecurved, overwrite=True, quiet=True, )
def euclidean_distance_fields(prefix, region, overwrite=False): """ Generate euclidean distance fields from map corner and centre coordinates Parameters ---------- prefix : str Name to use as prefix to save distance maps region : grass.pygrass.gis.region.Region Region overwrite : bool Whether to overwrite existing maps """ point_topleft = Point(region.west + region.ewres / 2, region.north - region.nsres / 2) point_topright = Point(region.east - region.ewres / 2, region.north - region.nsres / 2) point_lowerleft = Point(region.west + region.ewres / 2, region.south + region.nsres / 2) point_lowerright = Point(region.east - region.ewres / 2, region.south + region.nsres / 2) point_centre = Point( region.west + (region.east - region.west) / 2, region.south + (region.north - region.south) / 2, ) points = { "topleft": point_topleft, "topright": point_topright, "lowerleft": point_lowerleft, "lowerright": point_lowerright, "centre": point_centre, } for name, p in points.items(): point_name = "_".join([prefix, name]) vect = VectorTopo(name=point_name) vect.open( mode="w", tab_name=point_name, tab_cols=[("cat", "INTEGER PRIMARY KEY"), ("name", "TEXT")], ) vect.write(p, ("point", )) vect.table.conn.commit() vect.close() gvect.to_rast( input=point_name, type="point", use="val", output=point_name, overwrite=overwrite, ) grast.grow_distance(point_name, distance="distance_to_" + point_name, overwrite=overwrite) g.remove(name=point_name, type="raster", flags="f") g.remove(name=point_name, type="raster", flags="f")
def main(): in_vector = options['input'].split('@')[0] if len(options['input'].split('@')) > 1: in_mapset = options['input'].split('@')[1] else: in_mapset = None raster_maps = options['raster'].split(',') # raster file(s) to extract from output = options['output'] methods = tuple(options['methods'].split(',')) percentile = None if options['percentile'] == '' else map(float, options['percentile'].split(',')) column_prefix = tuple(options['column_prefix'].split(',')) buffers = options['buffers'].split(',') types = options['type'].split(',') layer = options['layer'] sep = options['separator'] update = flags['u'] tabulate = flags['t'] percent = flags['p'] remove = flags['r'] use_lable = False empty_buffer_warning = 'No data in raster map {} within buffer {} around geometry {}' # Do checks using pygrass for rmap in raster_maps: r_map = RasterAbstractBase(rmap) if not r_map.exist(): grass.fatal('Could not find raster map {}.'.format(rmap)) user_mask = False m_map = RasterAbstractBase('MASK', Mapset().name) if m_map.exist(): grass.warning("Current MASK is temporarily renamed.") user_mask = True unset_mask() invect = VectorTopo(in_vector) if not invect.exist(): grass.fatal("Vector file {} does not exist".format(in_vector)) if output: if output == '-': out = None else: out = open(output, 'w') # Check if input map is in current mapset (and thus editable) if in_mapset and unicode(in_mapset) != unicode(Mapset()): grass.fatal("Input vector map is not in current mapset and cannot be modified. \ Please consider copying it to current mapset.".format(output)) buffers = [] for buf in options['buffers'].split(','): try: b = float(buf) if b.is_integer(): buffers.append(int(b)) else: buffers.append(b) except: grass.fatal('') if b < 0: grass.fatal("Negative buffer distance not supported!") ### Define column types depenting on statistic, map type and ### DB backend (SQLite supports only double and not real) # int: statistic produces allways integer precision # double: statistic produces allways floating point precision # map_type: precision f statistic depends on map type int_dict = {'number': (0, 'int', 'n'), 'number_null': (1, 'int', 'null_cells'), 'minimum': (3, 'map_type', 'min'), 'maximum': (4, 'map_type', 'max'), 'range': (5, 'map_type', 'range'), 'average': (6, 'double', 'mean'), 'average_abs': (7, 'double', 'mean_of_abs'), 'stddev': (8, 'double', 'stddev'), 'variance': (9, 'double', 'variance'), 'coeff_var': (10, 'double', 'coeff_var'), 'sum': (11, 'map_type', 'sum'), 'first_quartile': (12, 'map_type', 'first_quartile'), 'median': (13, 'map_type', 'median'), 'third_quartile': (14, 'map_type', 'third_quartile'), 'percentile': (15, 'map_type', 'percentile')} if len(raster_maps) != len(column_prefix): grass.fatal('Number of maps and number of column prefixes has to be equal!') # Generate list of required column names and types col_names = [] col_types = [] for p in column_prefix: rmaptype, rcats = raster_type(raster_maps[column_prefix.index(p)], tabulate, use_lable) for b in buffers: b_str = str(b).replace('.', '_') if tabulate: if rmaptype == 'double precision': grass.fatal('{} has floating point precision. Can only tabulate integer maps'.format(raster_maps[column_prefix.index(p)])) col_names.append('{}_{}_b{}'.format(p, 'ncats', b_str)) col_types.append('int') col_names.append('{}_{}_b{}'.format(p, 'mode', b_str)) col_types.append('int') col_names.append('{}_{}_b{}'.format(p, 'null', b_str)) col_types.append('double precision') col_names.append('{}_{}_b{}'.format(p, 'area_tot', b_str)) col_types.append('double precision') for rcat in rcats: if use_lable: rcat = rcat[1].replace(" ", "_") else: rcat = rcat[0] col_names.append('{}_{}_b{}'.format(p, rcat, b_str)) col_types.append('double precision') else: for m in methods: col_names.append('{}_{}_b{}'.format(p, int_dict[m][2], b_str)) col_types.append(rmaptype if int_dict[m][1] == 'map_type' else int_dict[m][1]) if percentile: for perc in percentile: col_names.append('{}_percentile_{}_b{}'.format(p, int(perc) if (perc).is_integer() else perc, b_str)) col_types.append(rmaptype if int_dict[m][1] == 'map_type' else int_dict[m][1]) # Open input vector map in_vect = VectorTopo(in_vector, layer=layer) in_vect.open(mode='r') # Get name for temporary map TMP_MAPS.append(tmp_map) # Setup stats collectors if tabulate: # Collector for raster category statistics stats = Module('r.stats', run_=False, stdout_=PIPE) stats.inputs.sort = 'desc' stats.inputs.null_value = 'null' stats.flags.quiet = True if percent: stats.flags.p = True stats.flags.n = True else: stats.flags.a = True else: # Collector for univariat statistics univar = Module('r.univar', run_=False, stdout_=PIPE) univar.inputs.separator = sep univar.flags.g = True univar.flags.quiet = True # Add extended statistics if requested if set(methods).intersection(set(['first_quartile', 'median', 'third_quartile'])): univar.flags.e = True if percentile is not None: univar.flags.e = True univar.inputs.percentile = percentile # Check if attribute table exists if not output: if not in_vect.table: grass.fatal('No attribute table found for vector map {}'.format(in_vect)) # Modify table as needed tab = in_vect.table tab_name = tab.name tab_cols = tab.columns # Add required columns existing_cols = list(set(tab_cols.names()).intersection(col_names)) if len(existing_cols) > 0: if not update: grass.fatal('Column(s) {} already exist! Please use the u-flag \ if you want to update values in those columns'.format(','.join(existing_cols))) else: grass.warning('Column(s) {} already exist!'.format(','.join(existing_cols))) for e in existing_cols: idx = col_names.index(e) del col_names[idx] del col_types[idx] tab_cols.add(col_names, col_types) conn = tab.conn cur = conn.cursor() sql_str_start = 'UPDATE {} SET '.format(tab_name) elif output == '-': print('cat{0}raster_map{0}buffer{0}statistic{0}value'.format(sep)) else: out.write('cat{0}raster_map{0}buffer{0}statistic{0}value{1}'.format(sep, os.linesep)) # Get computational region grass.use_temp_region() r = Region() r.read() # Adjust region extent to buffer around geometry #reg = deepcopy(r) # Create iterator for geometries of all selected types geoms = chain() geoms_n = 0 n_geom = 1 for geom_type in types: geoms_n += in_vect.number_of(geom_type) if in_vect.number_of(geom_type) > 0: geoms = chain(in_vect.viter(geom_type)) # Loop over geometries for geom in geoms: # Get cat cat = geom.cat # Add where clause to UPDATE statement sql_str_end = ' WHERE cat = {};'.format(cat) # Loop over ser provided buffer distances for buf in buffers: b_str = str(buf).replace('.', '_') # Buffer geometry if buf <= 0: buffer_geom = geom else: buffer_geom = geom.buffer(buf) # Create temporary vector map with buffered geometry tmp_vect = VectorTopo(tmp_map, quiet=True) tmp_vect.open(mode='w') #print(int(cat)) tmp_vect.write(Boundary(points=buffer_geom[0].to_list())) # , c_cats=int(cat), set_cats=True tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y), cat=int(cat)) ################################################# # How to silence VectorTopo??? ################################################# # Save current stdout #original = sys.stdout #f = open(os.devnull, 'w') #with open('output.txt', 'w') as f: #sys.stdout = io.BytesIO() #sys.stdout.fileno() = os.devnull #sys.stderr = f #os.environ.update(dict(GRASS_VERBOSE='0')) tmp_vect.close(build=False) grass.run_command('v.build', map=tmp_map, quiet=True) #os.environ.update(dict(GRASS_VERBOSE='1')) #reg = Region() #reg.read() #r.from_vect(tmp_map) r = align_current(r, buffer_geom[0].bbox()) r.write() # Check if the following is needed # needed specially with r.stats -p #grass.run_command('g.region', vector=tmp_map, flags='a') # Create a MASK from buffered geometry if user_mask: grass.run_command('v.to.rast', input=tmp_map, output=tmp_map, use='val', value=int(cat), quiet=True) mc_expression = "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())".format(tmp_map, cat) grass.run_command('r.mapcalc', expression=mc_expression, quiet=True) else: grass.run_command('v.to.rast', input=tmp_map, output='MASK', use='val', value=int(cat), quiet=True) #reg.write() updates = [] # Compute statistics for every raster map for rm in range(len(raster_maps)): rmap = raster_maps[rm] prefix = column_prefix[rm] if tabulate: # Get statistics on occurrence of raster categories within buffer stats.inputs.input = rmap stats.run() t_stats = stats.outputs['stdout'].value.rstrip(os.linesep).replace(' ', '_b{} = '.format(b_str)).split(os.linesep) if t_stats[0].split('_b{} = '.format(b_str))[0].split('_')[-1] != 'null': mode = t_stats[0].split('_b{} = '.format(b_str))[0].split('_')[-1] elif len(t_stats) == 1: mode = 'NULL' else: mode = t_stats[1].split('_b{} = '.format(b_str))[0].split('_')[-1] if not output: updates.append('\t{}_{}_b{} = {}'.format(prefix, 'ncats', b_str, len(t_stats))) updates.append('\t{}_{}_b{} = {}'.format(prefix, 'mode', b_str, mode)) area_tot = 0 for l in t_stats: updates.append('\t{}_{}'.format(prefix, l.rstrip('%'))) if l.split('_b{} ='.format(b_str))[0].split('_')[-1] != 'null': area_tot = area_tot + float(l.rstrip('%').split('= ')[1]) if not percent: updates.append('\t{}_{}_b{} = {}'.format(prefix, 'area_tot', b_str, area_tot)) else: out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'ncats', len(t_stats), os.linesep) out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'mode', mode, os.linesep) area_tot = 0 if not t_stats[0]: grass.warning(empty_buffer_warning.format(rmap, buf, cat)) continue for l in t_stats: rcat = l.split('_b{} ='.format(b_str))[0].split('_')[-1] area = l.split('= ')[1] out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'area {}'.format(rcat), area, os.linesep) if rcat != 'null': area_tot = area_tot + float(l.rstrip('%').split('= ')[1]) out_str += '{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}'.format(sep, cat, prefix, buf, 'area_tot', area_tot, os.linesep) if output == '-': print(out_str.rstrip(os.linesep)) else: out.write(out_str) else: # Get univariate statistics within buffer univar.inputs.map = rmap univar.run() u_stats = univar.outputs['stdout'].value.rstrip(os.linesep).replace('=', '_b{} = '.format(b_str)).split(os.linesep) # Test if u_stats is empty and give warning # Needs to be adjusted to number of requested stats? if (percentile and len(u_stats) < 14) or (univar.flags.e and len(u_stats) < 13) or len(u_stats) < 12: grass.warning(empty_buffer_warning.format(rmap, buf, cat)) break # Extract statistics for selected methods for m in methods: if not output: # Add to list of UPDATE statements updates.append('\t{}_{}'.format(prefix, u_stats[int_dict[m][0]])) else: out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}'.format(sep, cat, prefix, buf, m, u_stats[int_dict[m][0]].split('= ')[1]) if output == '-': print(out_str) else: out.write("{}{}".format(out_str, os.linesep)) if percentile: perc_count = 0 for perc in percentile: if not output: updates.append('{}_percentile_{}_b{} = {}'.format(p, int(perc) if (perc).is_integer() else perc, b_str, u_stats[15+perc_count].split('= ')[1])) else: out_str = '{1}{0}{2}{0}{3}{0}{4}{0}{5}'.format(sep, cat, prefix, buf, 'percentile_{}'.format(int(perc) if (perc).is_integer() else perc), u_stats[15+perc_count].split('= ')[1]) if output == '-': print(out_str) else: out.write(out_str) perc_count = perc_count + 1 if not output and len(updates) > 0: cur.execute('{}{}{}'.format(sql_str_start, ',\n'.join(updates), sql_str_end)) # Remove temporary maps #, stderr=os.devnull, stdout_=os.devnull) grass.run_command('g.remove', flags='f', type='raster', name='MASK', quiet=True) grass.run_command('g.remove', flags='f', type='vector', name=tmp_map, quiet=True) # Give progress information grass.percent(n_geom, geoms_n, 1) n_geom = n_geom + 1 if not output: conn.commit() # Close cursor and DB connection if not output and not output == "-": cur.close() conn.close() # Update history grass.vector.vector_history(in_vector) elif output != "-": # write results to file out.close() if remove: dropcols = [] selectnum = 'select count({}) from {}' for i in col_names: thisrow = grass.read_command('db.select', flags='c', sql=selectnum.format(i, in_vector)) if int(thisrow) == 0: dropcols.append(i) grass.debug("Columns to delete: {}".format(', '.join(dropcols)), debug=2) grass.run_command('v.db.dropcolumn', map=in_vector, columns=dropcols)
def main(): punti=options['points'] dem=options['dem'] TR=options['time'] outlet=options['outlets'] outlets = outlet.split(',') try: TR=int(TR) except: print 'TR is not a number' sys.exit() grass.use_temp_region() #test to access points new_points = VectorTopo(punti) if new_points.exist(): points_str=grass.read_command('v.to.db', map=punti,type='point',option='coor',flags='p') points_list = points_str.split('\n') points_list.remove('') else: print 'File %s does not exist' % punti sys.exit() #get region in order to estimate the threshold as 1/1000 of total cells grass.run_command('g.region',raster=dem) regione=grass.region() thrshold=float(regione['cells'])/300 #stream and drainage determination grass.run_command('r.watershed', elevation=dem, threshold=500, stream='raster_streams', drainage='drainage',overwrite=True,flags='s') #the radius is little more than the current resolution radius=regione['nsres']*1.4 output_points = [] ''' STARTIN CICLE ON EACH OUTLET IN LIST ''' category=1 for outlet in points_list[1:]: outlet = outlet.split('|')[1:-1] print ', '.join(outlet) grass.run_command('g.region',raster=dem) grass.run_command('r.circle', output='circle', coordinate=','.join(outlet), max=radius,overwrite=True) #get the distances and take the shortest distance distances=grass.read_command('r.distance', map='circle,raster_streams') list_dist=distances.split('\n') list_dist.remove('') list_tuple=[] for distance in list_dist: dist=distance.split(':') my_tupla=dist[0],dist[1],float(dist[2]),dist[3],dist[4],dist[5],dist[6] list_tuple.append(my_tupla) tuple_orderedByDistance=sorted(list_tuple, key=lambda distanza: distanza[2]) del(distances,list_tuple,list_dist) #calculate the basin and read its statistics outlet=tuple_orderedByDistance[0][-2:] xoutlet=float(outlet[0]) youtlet=float(outlet[1]) grass.run_command('r.water.outlet',input='drainage',output='basin',coordinates=str(xoutlet)+','+str(youtlet) , overwrite=True) statistics=grass.read_command('r.univar',map=dem, zones='basin') main_stat=statistics.splitlines()[-9:] #order the stream network grass.run_command('r.mask',raster='basin') grass.run_command('r.stream.order',stream_rast='raster_streams', direction='drainage', elevation=dem,horton='horton',overwrite=True) stream_stat=grass.read_command('r.stream.stats', stream_rast='horton', direction='drainage', elevation=dem,flags='o') network_statistics=stream_stat.split('\n') network_statistics.remove('') #get the max order network_statistics[-1].split() total_length=float(network_statistics[-1].split(',')[2]) area_basin=float(network_statistics[-1].split(',')[3]) area_basin_Ha=area_basin*100 mean_elev=float(main_stat[3].split(':')[-1]) min_elev=float(main_stat[0].split(':')[-1]) max_elev=float(main_stat[1].split(':')[-1]) deltaH=max_elev-min_elev average_slope=float(network_statistics[-1].split(',')[4]) grass.run_command('r.mask',flags='r') TcGiandotti=(4*np.sqrt(area_basin)+1.5*total_length)/(0.8*np.sqrt(mean_elev-min_elev)) TcKirpich=0.945*(total_length**3./deltaH)**0.385 if area_basin_Ha > 1000: #TODO controlla i riferimenti corrivazione = TcGiandotti grass.info('using giandotti') grass.info(str(TcGiandotti)) formula = 'Giandotti' else: corrivazione = TcKirpich formula = 'Kirpich' grass.info('using Kirpich') grass.info(str(TcKirpich)) if corrivazione < 24: aPar='a24@PERMANENT' bPar='b24@PERMANENT' kPar='k24@PERMANENT' else: aPar='a15@PERMANENT' bPar='b15@PERMANENT' kPar='k15@PERMANENT' CNmap = 'CN@PERMANENT' corrivazione=TcGiandotti aStat=grass.read_command('r.univar',map=aPar, zones='basin') aMain_stat=aStat.splitlines()[12].split(':')[-1] aMain_stat=float(aMain_stat) bStat=grass.read_command('r.univar',map=bPar, zones='basin') bMain_stat=bStat.splitlines()[12].split(':')[-1] bMain_stat=float(bMain_stat) kStat=grass.read_command('r.univar',map=kPar, zones='basin') kMain_stat=kStat.splitlines()[12].split(':')[-1] kMain_stat=float(kMain_stat) CNstat = grass.read_command('r.univar',map=CNmap, zones='basin') CN=CNstat.splitlines()[12].split(':')[-1] CN=float(CN) g.message('area basin in km2: ') print area_basin print 'mean elev: ' print mean_elev-min_elev print 'delta H:' print deltaH print 'total reach length: ' print total_length print 'a mean:' print aMain_stat print '\n b mean: ' print bMain_stat print '\n k mean: ' print kMain_stat print 'CN mean:' CN = 70.12/82.63 * CN print CN f_K_T = 1-kMain_stat*(0.45+0.799*np.log(-np.log(1-1./TR))) print 'f(k,T): ' print f_K_T h=f_K_T*aMain_stat*corrivazione**bMain_stat print '\n h main:' print h X1 = 100*corrivazione/(0.236+0.062*corrivazione) X2 = 0.003*corrivazione+0.0234 Pa = 100 - area_basin_Ha/(X1+X2*area_basin_Ha) Ha = h*Pa/100 new = VectorTopo('outlet') S1 = (1000./CN)-10 Pn = (Ha-5.08*S1)**2/(Ha+20.32*S1) Qc = (1/360.)*Pn*area_basin_Ha/corrivazione print 'discharge: ' print Qc #print table.columns.types() #[u'INTEGER', u'TEXT', u'integer', u'double precision'] ''' ------------------------------ START CALCULATION OF LOCAL UPSTREAM SLOPE ------------------------------ ''' #offsets for moving windows offsets = [d for j in xrange(1,1+1) for i in [j,-j] for d in [(i,0),(0,i),(i,i),(i,-i)]] #rename dtm as elevation for future calculation if not exist if not VectorTopo('elevation').exist(): grass.run_command('g.rename',raster="%s,elevation" % dem) elev_renamed=True #define drainage direction drainage_incoming = [2,4,3,1,6,8,7,5] drainage_outcoming = [] diag_dist= (regione['nsres']**2+regione['ewres']**2)**0.5 # [(1, 0), (0, 1), (1, 1), (1, -1), (-1, 0), (0, -1), (-1, -1), (-1, 1), cell_dists = [regione['nsres'], regione['ewres'], diag_dist, diag_dist, regione['nsres'], regione['ewres'], diag_dist, diag_dist ] # define the calculation term terms = ["(drainage[%d,%d] == %d && not(isnull(raster_streams[0,0])) && not(isnull(raster_streams[%d,%d])) )" % ((offsets[j]+tuple([drainage_incoming[j]])+offsets[j])) for j in range(len(drainage_incoming))] #define the operation expression terms_calc = [ "(elevation[%d,%d] - elevation) * %s" % (offsets[j]+(terms[j],) ) for j in range(len(terms))] terms_calc_slope = [ "( (elevation[%d,%d] - elevation)/%10.4f ) * %s" % (offsets[j]+(cell_dists[j],)+(terms[j],)) for j in range(len(terms))] expr = "num_cells_drain_into = (%s)" % " + ".join(terms) expr1 = "elevation_percentile4 = if(isnull(raster_streams),null(),(%s))" % " + ".join(terms) expr2 = "elevdiff_drain_into = %s" % " + ".join(terms_calc) expr3 = "slope_drain_into = %s" % " + ".join(terms_calc_slope) # do the r.mapcalc calculation with the moving window # exclude the num_cell_calculation_into #grass.mapcalc( expr ) #print expr2 #grass.mapcalc( expr2 , overwrite=True) #print expr3 grass.mapcalc( expr3 , overwrite=True) ''' ------------------------------ START CALCULATION OF 2KM UPSTREAM SLOPE ------------------------------ ''' #create an outlet vector new_outlet = VectorTopo('outlet') COLS = [(u'cat', 'INTEGER PRIMARY KEY')] new_outlet.open('w', tab_name='outlet', tab_cols=COLS) new_outlet.write(Point( xoutlet , youtlet ), cat=1, ) new_outlet.table.conn.commit() new_outlet.table.execute().fetchall() new_outlet.close() #find local main channel horton_order=grass.raster_what('horton', [[ xoutlet , youtlet ]]) horton_order = int( horton_order[0]['horton']['value'] ) print "Horton order for main channel:" print horton_order grass.run_command('g.region', zoom='horton') grass.mapcalc( "main_stream = if((horton == %d),1,null())" % horton_order, overwrite=True ) grass.run_command('r.to.vect', input='main_stream', output='main_stream', type='line',overwrite=True) grass.run_command('v.build.polylines', overwrite=True, input='main_stream', output='main_stream_poly', cats='first') #network analysis on main channel grass.run_command('v.net',input='main_stream_poly', points='outlet', output='main_stream_connected', operation='connect', threshold=radius*3,overwrite=True) grass.run_command('v.net.iso', input='main_stream_connected',output='main_stream_iso', center_cats=1, costs='100,200,400',overwrite=True) report=grass.read_command('v.category', input='main_stream_iso', option='report',type='line') min_max = report.split('\n')[3].split()[-2:] min_cat = int (min_max[0] ) max_cat = int (min_max[1] ) elev_outlet = grass.raster_what('elevation', [[ xoutlet , youtlet ]]) elev_outlet = float( elev_outlet[0]['elevation']['value'] ) drops = [] for i in range(min_cat,max_cat): grass.run_command('v.extract',input='main_stream_iso' ,type='line', cats=i, output='main_stream_%s' % i,overwrite=True) grass.run_command('v.to.points', input='main_stream_%s' % i,type='line', output='nodes',use='node',overwrite=True) points=grass.read_command('v.to.db',flags='p', map='nodes', type='point', option='coor', columns='x,y', layer=2) points=points.split('\n')[1:] points.remove('') elevations_drops = [] print points for point in points: xpoint = float ( point.split('|')[1] ) ypoint = float( point.split('|')[2] ) elev = grass.raster_what('elevation', [[ xpoint , ypoint ]]) elev = float( elev[0]['elevation']['value'] ) elevations_drops.append(elev-elev_outlet) elevations_drops.sort(reverse=True) drops.append(elevations_drops[0]) print 'list di drops:' print drops #sample the raster slope in the outlets slope_query=grass.raster_what('slope_drain_into', [[ xoutlet , youtlet ]]) slope = slope_query[0]['slope_drain_into']['value'] if slope == '0': slope = 1./10000 else: slope = float( slope ) dict_output = { 'xoutlet':xoutlet, 'youtlet':youtlet, 'cat':category, 'attrs':(Qc, slope, 9810.0*Qc*slope,total_length,elev_outlet,drops[0],drops[1],drops[2],drops[0]/100.,drops[1]/200.,drops[2]/400.,) } output_points.append(dict_output) print category category+=1 print category #cleaning part if elev_renamed: grass.run_command('g.rename',raster='elevation,%s' % dem) grass.del_temp_region() grass.run_command('g.remove',flags='f', type='raster', name='main_stream,basin,circle,drainage,horton,raster_streams,slope_drain_into') grass.run_command('g.remove',flags='f', type='vector', name='main_stream,nodes,outlet') grass.run_command('g.remove',type='vector',pattern='main_stream*',flags='f') #creation of output data container print output_points new = VectorTopo('output') COLS = [(u'cat', 'INTEGER PRIMARY KEY'), (u'discharge', u'double precision') , (u'local_upslope', u'double precision'), (u'TSP_local', u'double precision'), (u'ch_len', u'double precision'),(u'elev', u'double precision'), (u'drop100', u'double precision'), (u'drop200', u'double precision'), (u'drop400', u'double precision'), (u'upslope_100', u'double precision'), (u'upslope_200', u'double precision'),(u'upslope_400', u'double precision') ] new.open('w', tab_name='output', tab_cols=COLS) for elem in output_points: new.write(Point( elem['xoutlet'], elem['youtlet'] ), cat = elem['cat'], attrs=elem['attrs'] ) new.table.conn.commit() new.table.execute().fetchall() new.close()
def vect(stream_in_name, stream_out_name, direction_in_name, accumulation_in_name, distance_in_name): '''Builds vector map from stream raster map.''' # Instantiate maps print "Fetching maps..." stream_in = RasterRowIO(stream_in_name) direction_in = RasterSegment(direction_in_name) accumulation_in = RasterSegment(accumulation_in_name) distance_in = RasterSegment(distance_in_name) # Initialize output stream_out = VectorTopo(stream_out_name) # Define the new vector map attribute table columns columns = [(u"cat", "INTEGER PRIMARY KEY"), (u"fid", "INTEGER"), (u"accum", "DOUBLE"), (u"dist", "DOUBLE"), (u"source_i", "INTEGER"), (u"source_j", "INTEGER"), (u"target_i", "INTEGER"), (u"target_j", "INTEGER")] print "Opening output..." stream_out.open('w', tab_name = stream_out_name, tab_cols = columns) # Open maps print "Loading maps..." stream_in.open('r') direction_in.open(mode = 'r') accumulation_in.open(mode = 'r') distance_in.open(mode = 'r') # Get the current region to compute coordinates region = Region() x_shift = region.ewres*.5 y_shift = region.nsres*.5*(-1.0) print "Processing..." # For each stream cell... i = 0 for row in stream_in: j = 0 for cell in row: if cell < 0: j += 1 continue # Retrieve data (direction, accumulation and distance) direction = direction_in[i, j] accumulation = accumulation_in[i, j] distance = distance_in[i, j] # Get i and j shifts from direction (di, dj) = shift[direction] # Compute unit vector start and end geo coordinates (source_y, source_x) = pixel2coor((j, i), region) (target_y, target_x) = pixel2coor((j + dj, i + di), region) # Build unit vector stream_out.write(Line([(source_x + x_shift, source_y + y_shift), (target_x + x_shift, target_y + y_shift)]), (cell, accumulation, distance, i, j, i + di, j + dj) ) j += 1 i += 1 # Commit database changes stream_out.table.conn.commit() # Close maps stream_in.close() direction_in.close() accumulation_in.close() stream_out.close()
def main(): inputraster = options['input'] number_lines = int(options['number_lines']) edge_detection_algorithm = options['edge_detection'] no_edge_friction = int(options['no_edge_friction']) lane_border_multiplier = int(options['lane_border_multiplier']) min_tile_size = None if options['min_tile_size']: min_tile_size = float(options['min_tile_size']) existing_cutlines = None if options['existing_cutlines']: existing_cutlines = options['existing_cutlines'].split(',') tiles = options['output'] memory = int(options['memory']) tiled = False if options['tile_width']: tiled = True gscript.message(_("Using tiles processing for edge detection")) width = int(options['tile_width']) height = int(options['tile_height']) overlap = int(options['overlap']) processes = int(options['processes']) global temp_maps temp_maps = [] r = 'raster' v = 'vector' if existing_cutlines: existingcutlinesmap = 'temp_icutlines_existingcutlinesmap_%i' % os.getpid( ) if len(existing_cutlines) > 1: gscript.run_command('v.patch', input_=existing_cutlines, output=existingcutlinesmap, quiet=True, overwrite=True) existing_cutlines = existingcutlinesmap gscript.run_command('v.to.rast', input_=existing_cutlines, output=existingcutlinesmap, use='val', type_='line,boundary', overwrite=True, quiet=True) temp_maps.append([existingcutlinesmap, r]) temp_edge_map = "temp_icutlines_edgemap_%d" % os.getpid() temp_maps.append([temp_edge_map, r]) gscript.message( _("Creating edge map using <%s> edgedetection algorithm") % edge_detection_algorithm) if edge_detection_algorithm == 'zc': kwargs = { 'input': inputraster, 'output': temp_edge_map, 'width_': int(options['zc_width']), 'threshold': float(options['zc_threshold']), 'quiet': True } if tiled: grd = GridModule('i.zc', width=width, height=height, overlap=overlap, processes=processes, split=False, **kwargs) grd.run() else: gscript.run_command('i.zc', **kwargs) elif edge_detection_algorithm == 'canny': if not gscript.find_program('i.edge', '--help'): message = _("You need to install the addon i.edge to use ") message += _("the Canny edge detector.\n") message += _( " You can install the addon with 'g.extension i.edge'") gscript.fatal(message) kwargs = { 'input': inputraster, 'output': temp_edge_map, 'low_threshold': float(options['canny_low_threshold']), 'high_threshold': float(options['canny_high_threshold']), 'sigma': float(options['canny_sigma']), 'quiet': True } if tiled: grd = GridModule('i.edge', width=width, height=height, overlap=overlap, processes=processes, split=False, flags='n', **kwargs) grd.run() else: gscript.run_command('i.edge', flags='n', **kwargs) else: gscript.fatal( "Only zero-crossing and Canny available as edge detection algorithms." ) region = gscript.region() gscript.message(_("Finding cutlines in both directions")) nsrange = float(region.n - region.s - region.nsres) ewrange = float(region.e - region.w - region.ewres) if nsrange > ewrange: hnumber_lines = number_lines vnumber_lines = max(int(number_lines * (ewrange / nsrange)), 1) else: vnumber_lines = number_lines hnumber_lines = max(int(number_lines * (nsrange / ewrange)), 1) # Create the lines in horizonal direction nsstep = float(region.n - region.s - region.nsres) / hnumber_lines hpointsy = [((region.n - i * nsstep) - region.nsres / 2.0) for i in range(0, hnumber_lines + 1)] hlanepointsy = [y - nsstep / 2.0 for y in hpointsy] hstartpoints = listzip([region.w + 0.2 * region.ewres] * len(hpointsy), hpointsy) hstoppoints = listzip([region.e - 0.2 * region.ewres] * len(hpointsy), hpointsy) hlanestartpoints = listzip([region.w + 0.2 * region.ewres] * len(hlanepointsy), hlanepointsy) hlanestoppoints = listzip([region.e - 0.2 * region.ewres] * len(hlanepointsy), hlanepointsy) hlanemap = 'temp_icutlines_hlanemap_%i' % os.getpid() temp_maps.append([hlanemap, v]) temp_maps.append([hlanemap, r]) os.environ['GRASS_VERBOSE'] = '0' new = VectorTopo(hlanemap) new.open('w') for line in listzip(hlanestartpoints, hlanestoppoints): new.write(geom.Line(line), cat=1) new.close() del os.environ['GRASS_VERBOSE'] gscript.run_command('v.to.rast', input_=hlanemap, output=hlanemap, use='val', type_='line', overwrite=True, quiet=True) hbasemap = 'temp_icutlines_hbasemap_%i' % os.getpid() temp_maps.append([hbasemap, r]) # Building the cost maps using the following logic # - Any pixel not on an edge, nor on an existing cutline gets a # no_edge_friction cost, or no_edge_friction_cost x 10 if there are # existing cutlines # - Any pixel on an edge gets a cost of 1 if there are no existing cutlines, # and a cost of no_edge_friction if there are # - A lane line gets a very high cost (lane_border_multiplier x cost of no # edge pixel - the latter depending on the existence of cutlines). mapcalc_expression = "%s = " % hbasemap mapcalc_expression += "if(isnull(%s), " % hlanemap if existing_cutlines: mapcalc_expression += "if(%s == 0 && isnull(%s), " % ( temp_edge_map, existingcutlinesmap) mapcalc_expression += "%i, " % (no_edge_friction * 10) mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap, no_edge_friction) mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction * 10) else: mapcalc_expression += "if(%s == 0, " % temp_edge_map mapcalc_expression += "%i, " % no_edge_friction mapcalc_expression += "1), " mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction) gscript.run_command('r.mapcalc', expression=mapcalc_expression, quiet=True, overwrite=True) hcumcost = 'temp_icutlines_hcumcost_%i' % os.getpid() temp_maps.append([hcumcost, r]) hdir = 'temp_icutlines_hdir_%i' % os.getpid() temp_maps.append([hdir, r]) # Create the lines in vertical direction ewstep = float(region.e - region.w - region.ewres) / vnumber_lines vpointsx = [((region.e - i * ewstep) - region.ewres / 2.0) for i in range(0, vnumber_lines + 1)] vlanepointsx = [x + ewstep / 2.0 for x in vpointsx] vstartpoints = listzip(vpointsx, [region.n - 0.2 * region.nsres] * len(vpointsx)) vstoppoints = listzip(vpointsx, [region.s + 0.2 * region.nsres] * len(vpointsx)) vlanestartpoints = listzip(vlanepointsx, [region.n - 0.2 * region.nsres] * len(vlanepointsx)) vlanestoppoints = listzip(vlanepointsx, [region.s + 0.2 * region.nsres] * len(vlanepointsx)) vlanemap = 'temp_icutlines_vlanemap_%i' % os.getpid() temp_maps.append([vlanemap, v]) temp_maps.append([vlanemap, r]) os.environ['GRASS_VERBOSE'] = '0' new = VectorTopo(vlanemap) new.open('w') for line in listzip(vlanestartpoints, vlanestoppoints): new.write(geom.Line(line), cat=1) new.close() del os.environ['GRASS_VERBOSE'] gscript.run_command('v.to.rast', input_=vlanemap, output=vlanemap, use='val', type_='line', overwrite=True, quiet=True) vbasemap = 'temp_icutlines_vbasemap_%i' % os.getpid() temp_maps.append([vbasemap, r]) mapcalc_expression = "%s = " % vbasemap mapcalc_expression += "if(isnull(%s), " % vlanemap if existing_cutlines: mapcalc_expression += "if(%s == 0 && isnull(%s), " % ( temp_edge_map, existingcutlinesmap) mapcalc_expression += "%i, " % (no_edge_friction * 10) mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap, no_edge_friction) mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction * 10) else: mapcalc_expression += "if(%s == 0, " % temp_edge_map mapcalc_expression += "%i, " % no_edge_friction mapcalc_expression += "1), " mapcalc_expression += "%i)" % (lane_border_multiplier * no_edge_friction) gscript.run_command('r.mapcalc', expression=mapcalc_expression, quiet=True, overwrite=True) vcumcost = 'temp_icutlines_vcumcost_%i' % os.getpid() temp_maps.append([vcumcost, r]) vdir = 'temp_icutlines_vdir_%i' % os.getpid() temp_maps.append([vdir, r]) if processes > 1: pmemory = memory / 2.0 rcv = gscript.start_command('r.cost', input_=vbasemap, startcoordinates=vstartpoints, stopcoordinates=vstoppoints, output=vcumcost, outdir=vdir, memory=pmemory, quiet=True, overwrite=True) rch = gscript.start_command('r.cost', input_=hbasemap, startcoordinates=hstartpoints, stopcoordinates=hstoppoints, output=hcumcost, outdir=hdir, memory=pmemory, quiet=True, overwrite=True) rcv.wait() rch.wait() else: gscript.run_command('r.cost', input_=vbasemap, startcoordinates=vstartpoints, stopcoordinates=vstoppoints, output=vcumcost, outdir=vdir, memory=memory, quiet=True, overwrite=True) gscript.run_command('r.cost', input_=hbasemap, startcoordinates=hstartpoints, stopcoordinates=hstoppoints, output=hcumcost, outdir=hdir, memory=memory, quiet=True, overwrite=True) hlines = 'temp_icutlines_hlines_%i' % os.getpid() temp_maps.append([hlines, r]) vlines = 'temp_icutlines_vlines_%i' % os.getpid() temp_maps.append([vlines, r]) if processes > 1: rdh = gscript.start_command('r.drain', input_=hcumcost, direction=hdir, startcoordinates=hstoppoints, output=hlines, flags='d', quiet=True, overwrite=True) rdv = gscript.start_command('r.drain', input_=vcumcost, direction=vdir, startcoordinates=vstoppoints, output=vlines, flags='d', quiet=True, overwrite=True) rdh.wait() rdv.wait() else: gscript.run_command('r.drain', input_=hcumcost, direction=hdir, startcoordinates=hstoppoints, output=hlines, flags='d', quiet=True, overwrite=True) gscript.run_command('r.drain', input_=vcumcost, direction=vdir, startcoordinates=vstoppoints, output=vlines, flags='d', quiet=True, overwrite=True) # Combine horizonal and vertical lines temp_raster_tile_borders = 'temp_icutlines_raster_tile_borders_%i' % os.getpid( ) temp_maps.append([temp_raster_tile_borders, r]) gscript.run_command('r.patch', input_=[hlines, vlines], output=temp_raster_tile_borders, quiet=True, overwrite=True) gscript.message(_("Creating vector polygons")) # Create vector polygons # First we need to shrink the region a bit to make sure that all vector # points / lines fall within the raster gscript.use_temp_region() gscript.run_command('g.region', s=region.s + region.nsres, e=region.e - region.ewres, quiet=True) region_map = 'temp_icutlines_region_map_%i' % os.getpid() temp_maps.append([region_map, v]) temp_maps.append([region_map, r]) gscript.run_command('v.in.region', output=region_map, type_='line', quiet=True, overwrite=True) gscript.del_temp_region() gscript.run_command('v.to.rast', input_=region_map, output=region_map, use='val', type_='line', quiet=True, overwrite=True) temp_raster_polygons = 'temp_icutlines_raster_polygons_%i' % os.getpid() temp_maps.append([temp_raster_polygons, r]) gscript.run_command('r.patch', input_=[temp_raster_tile_borders, region_map], output=temp_raster_polygons, quiet=True, overwrite=True) temp_raster_polygons_thin = 'temp_icutlines_raster_polygons_thin_%i' % os.getpid( ) temp_maps.append([temp_raster_polygons_thin, r]) gscript.run_command('r.thin', input_=temp_raster_polygons, output=temp_raster_polygons_thin, quiet=True, overwrite=True) # Create a series of temporary map names as we have to go # through several steps until we reach the final map. temp_vector_polygons1 = 'temp_icutlines_vector_polygons1_%i' % os.getpid() temp_maps.append([temp_vector_polygons1, v]) temp_vector_polygons2 = 'temp_icutlines_vector_polygons2_%i' % os.getpid() temp_maps.append([temp_vector_polygons2, v]) temp_vector_polygons3 = 'temp_icutlines_vector_polygons3_%i' % os.getpid() temp_maps.append([temp_vector_polygons3, v]) temp_vector_polygons4 = 'temp_icutlines_vector_polygons4_%i' % os.getpid() temp_maps.append([temp_vector_polygons4, v]) gscript.run_command('r.to.vect', input_=temp_raster_polygons_thin, output=temp_vector_polygons1, type_='line', flags='t', quiet=True, overwrite=True) # Erase all category values from the lines gscript.run_command('v.category', input_=temp_vector_polygons1, op='del', cat='-1', output=temp_vector_polygons2, quiet=True, overwrite=True) # Transform lines to boundaries gscript.run_command('v.type', input_=temp_vector_polygons2, from_type='line', to_type='boundary', output=temp_vector_polygons3, quiet=True, overwrite=True) # Add centroids gscript.run_command('v.centroids', input_=temp_vector_polygons3, output=temp_vector_polygons4, quiet=True, overwrite=True) # If a threshold is given erase polygons that are too small if min_tile_size: gscript.run_command('v.clean', input_=temp_vector_polygons4, tool=['rmdangle', 'rmarea'], threshold=[-1, min_tile_size], output=tiles, quiet=True, overwrite=True) else: gscript.run_command('g.copy', vect=[temp_vector_polygons4, tiles], quiet=True, overwrite=True) gscript.vector_history(tiles)
def main(): in_vector = options["input"].split("@")[0] if len(options["input"].split("@")) > 1: in_mapset = options["input"].split("@")[1] else: in_mapset = None raster_maps = options["raster"].split( ",") # raster file(s) to extract from output = options["output"] methods = tuple(options["methods"].split(",")) percentile = (None if options["percentile"] == "" else map( float, options["percentile"].split(","))) column_prefix = tuple(options["column_prefix"].split(",")) buffers = options["buffers"].split(",") types = options["type"].split(",") layer = options["layer"] sep = options["separator"] update = flags["u"] tabulate = flags["t"] percent = flags["p"] remove = flags["r"] use_label = flags["l"] empty_buffer_warning = ( "No data in raster map {} within buffer {} around geometry {}") # Do checks using pygrass for rmap in raster_maps: r_map = RasterAbstractBase(rmap) if not r_map.exist(): grass.fatal("Could not find raster map {}.".format(rmap)) user_mask = False m_map = RasterAbstractBase("MASK", Mapset().name) if m_map.exist(): grass.warning("Current MASK is temporarily renamed.") user_mask = True unset_mask() invect = VectorTopo(in_vector) if not invect.exist(): grass.fatal("Vector file {} does not exist".format(in_vector)) if output: if output == "-": out = None else: out = open(output, "w") # Check if input map is in current mapset (and thus editable) if in_mapset and unicode(in_mapset) != unicode(Mapset()): grass.fatal( "Input vector map is not in current mapset and cannot be modified. \ Please consider copying it to current mapset.".format( output)) buffers = [] for buf in options["buffers"].split(","): try: b = float(buf) if b.is_integer(): buffers.append(int(b)) else: buffers.append(b) except: grass.fatal("") if b < 0: grass.fatal("Negative buffer distance not supported!") ### Define column types depenting on statistic, map type and ### DB backend (SQLite supports only double and not real) # int: statistic produces allways integer precision # double: statistic produces allways floating point precision # map_type: precision f statistic depends on map type int_dict = { "number": (0, "int", "n"), "number_null": (1, "int", "null_cells"), "minimum": (3, "map_type", "min"), "maximum": (4, "map_type", "max"), "range": (5, "map_type", "range"), "average": (6, "double", "mean"), "average_abs": (7, "double", "mean_of_abs"), "stddev": (8, "double", "stddev"), "variance": (9, "double", "variance"), "coeff_var": (10, "double", "coeff_var"), "sum": (11, "map_type", "sum"), "first_quartile": (12, "map_type", "first_quartile"), "median": (13, "map_type", "median"), "third_quartile": (14, "map_type", "third_quartile"), "percentile": (15, "map_type", "percentile"), } if len(raster_maps) != len(column_prefix): grass.fatal( "Number of maps and number of column prefixes has to be equal!") # Generate list of required column names and types col_names = [] valid_labels = [] col_types = [] for p in column_prefix: rmaptype, val_lab, rcats = raster_type( raster_maps[column_prefix.index(p)], tabulate, use_label) valid_labels.append(val_lab) for b in buffers: b_str = str(b).replace(".", "_") if tabulate: if rmaptype == "double precision": grass.fatal( "{} has floating point precision. Can only tabulate integer maps" .format(raster_maps[column_prefix.index(p)])) col_names.append("{}_{}_b{}".format(p, "ncats", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "mode", b_str)) col_types.append("int") col_names.append("{}_{}_b{}".format(p, "null", b_str)) col_types.append("double precision") col_names.append("{}_{}_b{}".format(p, "area_tot", b_str)) col_types.append("double precision") for rcat in rcats: if use_label and valid_labels: rcat = rcat[0].replace(" ", "_") else: rcat = rcat[1] col_names.append("{}_{}_b{}".format(p, rcat, b_str)) col_types.append("double precision") else: for m in methods: col_names.append("{}_{}_b{}".format( p, int_dict[m][2], b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) if percentile: for perc in percentile: col_names.append("{}_percentile_{}_b{}".format( p, int(perc) if (perc).is_integer() else perc, b_str)) col_types.append(rmaptype if int_dict[m][1] == "map_type" else int_dict[m][1]) # Open input vector map in_vect = VectorTopo(in_vector, layer=layer) in_vect.open(mode="r") # Get name for temporary map global TMP_MAPS TMP_MAPS.append(tmp_map) # Setup stats collectors if tabulate: # Collector for raster category statistics stats = Module("r.stats", run_=False, stdout_=PIPE) stats.inputs.sort = "desc" stats.inputs.null_value = "null" stats.flags.quiet = True stats.flags.l = True if percent: stats.flags.p = True stats.flags.n = True else: stats.flags.a = True else: # Collector for univariat statistics univar = Module("r.univar", run_=False, stdout_=PIPE) univar.inputs.separator = sep univar.flags.g = True univar.flags.quiet = True # Add extended statistics if requested if set(methods).intersection( set(["first_quartile", "median", "third_quartile"])): univar.flags.e = True if percentile is not None: univar.flags.e = True univar.inputs.percentile = percentile # Check if attribute table exists if not output: if not in_vect.table: grass.fatal( "No attribute table found for vector map {}".format(in_vect)) # Modify table as needed tab = in_vect.table tab_name = tab.name tab_cols = tab.columns # Add required columns existing_cols = list(set(tab_cols.names()).intersection(col_names)) if len(existing_cols) > 0: if not update: in_vect.close() grass.fatal( "Column(s) {} already exist! Please use the u-flag \ if you want to update values in those columns". format(",".join(existing_cols))) else: grass.warning("Column(s) {} already exist!".format( ",".join(existing_cols))) for e in existing_cols: idx = col_names.index(e) del col_names[idx] del col_types[idx] tab_cols.add(col_names, col_types) conn = tab.conn cur = conn.cursor() sql_str_start = "UPDATE {} SET ".format(tab_name) elif output == "-": print("cat{0}raster_map{0}buffer{0}statistic{0}value".format(sep)) else: out.write("cat{0}raster_map{0}buffer{0}statistic{0}value{1}".format( sep, os.linesep)) # Get computational region grass.use_temp_region() r = Region() r.read() # Adjust region extent to buffer around geometry # reg = deepcopy(r) # Create iterator for geometries of all selected types geoms = chain() geoms_n = 0 n_geom = 1 for geom_type in types: geoms_n += in_vect.number_of(geom_type) if in_vect.number_of(geom_type) > 0: geoms = chain(in_vect.viter(geom_type)) # Loop over geometries for geom in geoms: # Get cat cat = geom.cat # Add where clause to UPDATE statement sql_str_end = " WHERE cat = {};".format(cat) # Loop over ser provided buffer distances for buf in buffers: b_str = str(buf).replace(".", "_") # Buffer geometry if buf <= 0: buffer_geom = geom else: buffer_geom = geom.buffer(buf) # Create temporary vector map with buffered geometry tmp_vect = VectorTopo(tmp_map, quiet=True) tmp_vect.open(mode="w") tmp_vect.write(Boundary(points=buffer_geom[0].to_list())) # , c_cats=int(cat), set_cats=True if callable(buffer_geom[1]): tmp_vect.write(Centroid(x=buffer_geom[1]().x, y=buffer_geom[1]().y), cat=int(cat)) else: tmp_vect.write(Centroid(x=buffer_geom[1].x, y=buffer_geom[1].y), cat=int(cat)) ################################################# # How to silence VectorTopo??? ################################################# # Save current stdout # original = sys.stdout # f = open(os.devnull, 'w') # with open('output.txt', 'w') as f: # sys.stdout = io.BytesIO() # sys.stdout.fileno() = os.devnull # sys.stderr = f # os.environ.update(dict(GRASS_VERBOSE='0')) tmp_vect.close(build=False) grass.run_command("v.build", map=tmp_map, quiet=True) # os.environ.update(dict(GRASS_VERBOSE='1')) # reg = Region() # reg.read() # r.from_vect(tmp_map) r = align_current(r, buffer_geom[0].bbox()) r.write() # Check if the following is needed # needed specially with r.stats -p # grass.run_command('g.region', vector=tmp_map, flags='a') # Create a MASK from buffered geometry if user_mask: grass.run_command( "v.to.rast", input=tmp_map, output=tmp_map, use="val", value=int(cat), quiet=True, ) mc_expression = ( "MASK=if(!isnull({0}) && !isnull({0}_MASK), {1}, null())". format(tmp_map, cat)) grass.run_command("r.mapcalc", expression=mc_expression, quiet=True) else: grass.run_command( "v.to.rast", input=tmp_map, output="MASK", use="val", value=int(cat), quiet=True, ) # reg.write() updates = [] # Compute statistics for every raster map for rm, rmap in enumerate(raster_maps): # rmap = raster_maps[rm] prefix = column_prefix[rm] if tabulate: # Get statistics on occurrence of raster categories within buffer stats.inputs.input = rmap stats.run() t_stats = (stats.outputs["stdout"].value.rstrip( os.linesep).replace(" ", " ").replace( "no data", "no_data").replace( " ", "_b{} = ".format(b_str)).split(os.linesep)) if t_stats == [""]: grass.warning( empty_buffer_warning.format(rmap, buf, cat)) continue if (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1] != "null"): mode = (t_stats[0].split( "_b{} = ".format(b_str))[0].split("_")[-1]) elif len(t_stats) == 1: mode = "NULL" else: mode = (t_stats[1].split( "_b{} = ".format(b_str))[0].split("_")[-1]) if not output: updates.append("\t{}_{}_b{} = {}".format( prefix, "ncats", b_str, len(t_stats))) updates.append("\t{}_{}_b{} = {}".format( prefix, "mode", b_str, mode)) area_tot = 0 for l in t_stats: # check if raster maps has category or not if len(l.split("=")) == 2: updates.append("\t{}_{}".format( prefix, l.rstrip("%"))) elif not l.startswith("null"): vals = l.split("=") updates.append("\t{}_{} = {}".format( prefix, vals[-2].strip() if valid_labels[rm] else vals[0].strip(), vals[-1].strip().rstrip("%"), )) if not l.startswith("null"): area_tot += float( l.rstrip("%").split("= ")[-1]) if not percent: updates.append("\t{}_{}_b{} = {}".format( prefix, "area_tot", b_str, area_tot)) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "ncats", len(t_stats), os.linesep) out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "mode", mode, os.linesep) area_tot = 0 for l in t_stats: rcat = (l.split("= ")[1].rstrip( "_b{} = ".format(b_str)) if valid_labels[rm] else l.split("_")[0]) area = l.split("= ")[-1] out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area {}".format(rcat), area, os.linesep, ) if rcat != "null": area_tot = area_tot + float( l.rstrip("%").split("= ")[-1]) if not percent: out_str += "{1}{0}{2}{0}{3}{0}{4}{0}{5}{6}".format( sep, cat, prefix, buf, "area total", area_tot, os.linesep, ) if output == "-": print(out_str.rstrip(os.linesep)) else: out.write(out_str) else: # Get univariate statistics within buffer univar.inputs.map = rmap univar.run() u_stats = (univar.outputs["stdout"].value.rstrip( os.linesep).replace( "=", "_b{} = ".format(b_str)).split(os.linesep)) # Test if u_stats is empty and give warning # Needs to be adjusted to number of requested stats? if ((percentile and len(u_stats) < 14) or (univar.flags.e and len(u_stats) < 13) or len(u_stats) < 12): grass.warning( empty_buffer_warning.format(rmap, buf, cat)) break # Extract statistics for selected methods for m in methods: if not output: # Add to list of UPDATE statements updates.append("\t{}_{}".format( prefix, u_stats[int_dict[m][0]] if is_number( u_stats[int_dict[m][0]].split(" = ")[1]) else " = ".join([ u_stats[int_dict[m][0]].split(" = ")[0], "NULL", ]), )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, m, u_stats[int_dict[m][0]].split("= ")[1], ) if output == "-": print(out_str) else: out.write("{}{}".format(out_str, os.linesep)) if percentile: perc_count = 0 for perc in percentile: if not output: updates.append( "{}_percentile_{}_b{} = {}".format( p, int(perc) if (perc).is_integer() else perc, b_str, u_stats[15 + perc_count].split("= ")[1], )) else: out_str = "{1}{0}{2}{0}{3}{0}{4}{0}{5}".format( sep, cat, prefix, buf, "percentile_{}".format( int(perc) if ( perc).is_integer() else perc), u_stats[15 + perc_count].split("= ")[1], ) if output == "-": print(out_str) else: out.write(out_str) perc_count = perc_count + 1 if not output and len(updates) > 0: cur.execute("{}{}{}".format(sql_str_start, ",\n".join(updates), sql_str_end)) # Remove temporary maps # , stderr=os.devnull, stdout_=os.devnull) grass.run_command("g.remove", flags="f", type="raster", name="MASK", quiet=True) grass.run_command("g.remove", flags="f", type="vector", name=tmp_map, quiet=True) # Give progress information grass.percent(n_geom, geoms_n, 1) n_geom = n_geom + 1 if not output: conn.commit() # Close cursor and DB connection if not output and not output == "-": cur.close() conn.close() # Update history grass.vector.vector_history(in_vector) elif output != "-": # write results to file out.close() if remove and not output: dropcols = [] selectnum = "select count({}) from {}" for i in col_names: thisrow = grass.read_command("db.select", flags="c", sql=selectnum.format(i, in_vector)) if int(thisrow) == 0: dropcols.append(i) grass.debug("Columns to delete: {}".format(", ".join(dropcols)), debug=2) if dropcols: grass.run_command("v.db.dropcolumn", map=in_vector, columns=dropcols)
def main(): input = options['input'] if options['refline']: refline_cat = int(options['refline']) else: refline_cat = None nb_vertices = int(options['vertices']) if options['range']: search_range = float(options['range']) else: search_range = None output = options['output'] transversals = flags['t'] median = flags['m'] global tmp_points_map global tmp_centerpoints_map global tmp_line_map global tmp_cleaned_map global tmp_map tmp_points_map = 'points_map_tmp_%d' % os.getpid() tmp_centerpoints_map = 'centerpoints_map_tmp_%d' % os.getpid() tmp_line_map = 'line_map_tmp_%d' % os.getpid() tmp_cleaned_map = 'cleaned_map_tmp_%d' % os.getpid() tmp_map = 'generaluse_map_tmp_%d' % os.getpid() nb_lines = grass.vector_info_topo(input)['lines'] # Find best reference line and max distance between centerpoints of lines segment_input = '' categories = grass.pipe_command('v.category', input=input, option='print', quiet=True) for category in categories.stdout: segment_input += 'P ' + category.strip() segment_input += ' ' + category.strip() + ' 50%\n' grass.write_command('v.segment', input=input, output=tmp_centerpoints_map, rules='-', stdin=segment_input, quiet=True) center_distances = grass.pipe_command('v.distance', from_=tmp_centerpoints_map, to=tmp_centerpoints_map, upload='dist', flags='pa', quiet=True) cats = [] mean_dists = [] count = 0 distmax = 0 for center in center_distances.stdout: if count < 2: count += 1 continue cat = center.strip().split('|')[0] distsum = 0 for x in center.strip().split('|')[1:]: distsum += float(x) mean_dist = distsum / len(center.strip().split('|')[1:]) cats.append(cat) mean_dists.append(mean_dist) if transversals and not search_range: search_range = sum(mean_dists) / len(mean_dists) grass.message(_("Calculated search range: %.5f." % search_range)) if not refline_cat: refline_cat = sorted(zip(cats, mean_dists), key=lambda tup: tup[1])[0][0] grass.message( _("Category number of chosen reference line: %s." % refline_cat)) # Use transversals algorithm if transversals: # Break any intersections in the original lines so that # they do not interfere further on grass.run_command('v.clean', input=input, output=tmp_cleaned_map, tool='break', quiet=True) xmean = [] ymean = [] xmedian = [] ymedian = [] step = 100.0 / nb_vertices os.environ['GRASS_VERBOSE'] = '-1' for vertice in range(0, nb_vertices + 1): # v.segment sometimes cannot find points when # using 0% or 100% offset length_offset = step * vertice if length_offset < 0.00001: length_offset = 0.00001 if length_offset > 99.99999: length_offset = 99.9999 # Create endpoints of transversal segment_input = 'P 1 %s %.5f%% %f\n' % (refline_cat, length_offset, search_range) segment_input += 'P 2 %s %.5f%% %f\n' % ( refline_cat, length_offset, -search_range) grass.write_command('v.segment', input=input, output=tmp_points_map, stdin=segment_input, overwrite=True) # Create transversal grass.write_command('v.net', points=tmp_points_map, output=tmp_line_map, operation='arcs', file='-', stdin='99999 1 2', overwrite=True) # Patch transversal onto cleaned input lines maps = tmp_cleaned_map + ',' + tmp_line_map grass.run_command('v.patch', input=maps, out=tmp_map, overwrite=True) # Find intersections grass.run_command('v.clean', input=tmp_map, out=tmp_line_map, tool='break', error=tmp_points_map, overwrite=True) # Add categories to intersection points grass.run_command('v.category', input=tmp_points_map, out=tmp_map, op='add', overwrite=True) # Get coordinates of points coords = grass.pipe_command('v.to.db', map=tmp_map, op='coor', flags='p') count = 0 x = [] y = [] for coord in coords.stdout: x.append(float(coord.strip().split('|')[1])) y.append(float(coord.strip().split('|')[2])) # Calculate mean and median for this transversal if len(x) > 0: xmean.append(sum(x) / len(x)) ymean.append(sum(y) / len(y)) x.sort() y.sort() xmedian.append((x[(len(x) - 1) / 2] + x[(len(x)) / 2]) / 2) ymedian.append((y[(len(y) - 1) / 2] + y[(len(y)) / 2]) / 2) del os.environ['GRASS_VERBOSE'] # Use closest point algorithm else: # Get reference line calculate its length grass.run_command('v.extract', input=input, output=tmp_line_map, cats=refline_cat, quiet=True) os.environ['GRASS_VERBOSE'] = '0' lpipe = grass.pipe_command('v.to.db', map=tmp_line_map, op='length', flags='p') del os.environ['GRASS_VERBOSE'] for l in lpipe.stdout: linelength = float(l.strip().split('|')[1]) step = linelength / nb_vertices # Create reference points for vertice calculation grass.run_command('v.to.points', input=tmp_line_map, output=tmp_points_map, dmax=step, quiet=True) nb_points = grass.vector_info_topo(tmp_points_map)['points'] cat = [] x = [] y = [] # Get coordinates of closest points on all input lines if search_range: points = grass.pipe_command('v.distance', from_=tmp_points_map, from_layer=2, to=input, upload='to_x,to_y', dmax=search_range, flags='pa', quiet=True) else: points = grass.pipe_command('v.distance', from_=tmp_points_map, from_layer=2, to=input, upload='to_x,to_y', flags='pa', quiet=True) firstline = True for point in points.stdout: if firstline: firstline = False continue cat.append((int(point.strip().split('|')[0]))) x.append(float(point.strip().split('|')[2])) y.append(float(point.strip().split('|')[3])) # Calculate mean coordinates xsum = [0] * nb_points ysum = [0] * nb_points linecount = [0] * nb_points for i in range(len(cat)): index = cat[i] - 1 linecount[index] += 1 xsum[index] = xsum[index] + x[i] ysum[index] = ysum[index] + y[i] xmean = [0] * nb_points ymean = [0] * nb_points for c in range(0, nb_points): xmean[c] = xsum[c] / linecount[c] ymean[c] = ysum[c] / linecount[c] # Calculate the median xmedian = [0] * nb_points ymedian = [0] * nb_points for c in range(0, nb_points): xtemp = [] ytemp = [] for i in range(len(cat)): if cat[i] == c + 1: xtemp.append(x[i]) ytemp.append(y[i]) xtemp.sort() ytemp.sort() xmedian[c] = (xtemp[(len(xtemp) - 1) / 2] + xtemp[(len(xtemp)) / 2]) / 2 ymedian[c] = (ytemp[(len(ytemp) - 1) / 2] + ytemp[(len(ytemp)) / 2]) / 2 # Create new line and write to file if median and nb_lines > 2: line = geo.Line(zip(xmedian, ymedian)) else: if median and nb_lines <= 2: grass.message( _("More than 2 lines necesary for median, using mean.")) line = geo.Line(zip(xmean, ymean)) new = VectorTopo(output) new.open('w') new.write(line) new.close()
def full_maps(parsed_obs, offering, seconds_granularity, resolution, event_time, target): """Create raster maps. Maps represent represent offerings, observed props and procedures :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param seconds_granularity: Granularity in seconds :param resolution: 2D grid resolution for rasterization :param event_time: Timestamp of first/of last requested observation :param target: """ timestamp_pattern = '%Y-%m-%dT%H:%M:%S' # TODO: Timezone start_time = event_time.split('+')[0] epoch_s = int(time.mktime(time.strptime(start_time, timestamp_pattern))) end_time = event_time.split('+')[1].split('/')[1] epoch_e = int(time.mktime(time.strptime(end_time, timestamp_pattern))) for key, observation in parsed_obs.items(): print('Creating raster maps for offering ' '{}, observed property {}'.format(offering, key)) data = json.loads(observation) crs = data['crs'] crs = int(crs['properties']['name'].split(':')[-1]) transform = soslib.get_transformation(crs, target) cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'), (u'value', 'DOUBLE')] geometries = dict() intervals = {} for secondsStamp in range(epoch_s, epoch_e + 1, seconds_granularity): intervals.update({secondsStamp: dict()}) timestamp_pattern = 't%Y%m%dT%H%M%S' # TODO: Timezone for a in data['features']: name = a['properties']['name'] sx, sy, sz = a['geometry']['coordinates'] point = ogr.CreateGeometryFromWkt('POINT ({} {} {})'.format( sx, sy, sz)) point.Transform(transform) coords = (point.GetX(), point.GetY(), point.GetZ()) geometries.update({name: coords}) for timestamp, value in a['properties'].items(): if timestamp != 'name': observation_start_time = timestamp[:-4] seconds_timestamp = int( time.mktime( time.strptime(observation_start_time, timestamp_pattern))) for interval in intervals.keys(): if interval <= seconds_timestamp < ( interval + seconds_granularity): if name in intervals[interval].keys(): intervals[interval][name].append(float(value)) else: intervals[interval].update( {name: [float(value)]}) break for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp(interval).strftime( 't%Y%m%dT%H%M%S') table_name = '{}_{}_{}_{}'.format(options['output'], offering, key, timestamp) if ':' in table_name: table_name = '_'.join(table_name.split(':')) if '-' in table_name: table_name = '_'.join(table_name.split('-')) if '.' in table_name: table_name = '_'.join(table_name.split('.')) new = VectorTopo(table_name) if overwrite() is True: try: new.remove() except: pass new.open(mode='w', layer=1, tab_name=table_name, link_name=table_name, tab_cols=cols, overwrite=True) i = 0 n = None s = None e = None w = None for procedure, values in intervals[interval].items(): if new.exist() is False: i = 1 else: i += 1 if options['method'] == 'average': value = sum(values) / len(values) elif options['method'] == 'sum': value = sum(values) # TODO: Other aggregations methods new.write(Point(*geometries[procedure]), cat=i, attrs=( procedure, value, )) if options['bbox'] == '': x, y, z = geometries[procedure] if not n: n = y + resolution / 2 s = y - resolution / 2 e = x + resolution / 2 w = x - resolution / 2 else: if y >= n: n = y + resolution / 2 if y <= s: s = y - resolution / 2 if x >= e: e = x + resolution / 2 if x <= w: w = x - resolution / 2 new.table.conn.commit() new.close(build=False) run_command('v.build', quiet=True, map=table_name) if options['bbox'] == '': run_command('g.region', n=n, s=s, w=w, e=e, res=resolution) run_command('v.to.rast', input=table_name, output=table_name, use='attr', attribute_column='value', layer=1, type='point', quiet=True) if flags['k'] is False: run_command('g.remove', 'f', type='vector', name=table_name, quiet=True)
def main(): dem = options['dem'] TR = options['time'] #TODO Time of concentration outlet = options['outlets'] outlets = outlet.split(',') cleanTemporary = options['clean'] try: TR = int(TR) except: print 'TR is not a number' sys.exit() if cleanTemporary != 'no': grass.run_command( 'g.remove', flags='f', type='raster', name= 'main_stream,basin,circle,drainage,horton,raster_streams,slope_drain_into' ) grass.run_command('g.remove', flags='f', type='vector', name='main_stream,nodes,outlet') grass.run_command('g.remove', type='vector', pattern='main_stream*', flags='f') grass.use_temp_region() #get region in order to estimate the threshold as 1/1000 of total cells grass.run_command('g.region', raster=dem) regione = grass.region() thrshold = float(regione['cells']) / 300 #stream and drainage determination grass.run_command('r.watershed', elevation=dem, threshold=700, stream='raster_streams', drainage='drainage', overwrite=True, flags='s') #the radius is little more than the current resolution radius = regione['nsres'] * 1.4 grass.run_command('r.circle', output='circle', coordinate=outlet, max=radius, overwrite=True) #%(str(outlets[0]),str(outlets[1])) #get the distances and take the shortest distance distances = grass.read_command('r.distance', map='circle,raster_streams') list_dist = distances.split('\n') list_dist.remove('') list_tuple = [] for distance in list_dist: dist = distance.split(':') my_tupla = dist[0], dist[1], float( dist[2]), dist[3], dist[4], dist[5], dist[6] list_tuple.append(my_tupla) tuple_orderedByDistance = sorted(list_tuple, key=lambda distanza: distanza[2]) del (distances, list_tuple, list_dist) #calculate the basin and read its statistics outlet = tuple_orderedByDistance[0][-2:] xoutlet = float(outlet[0]) youtlet = float(outlet[1]) grass.run_command('r.water.outlet', input='drainage', output='basin', coordinates=str(xoutlet) + ',' + str(youtlet), overwrite=True) statistics = grass.read_command('r.univar', map=dem, zones='basin') main_stat = statistics.splitlines()[-9:] #order the stream network grass.run_command('r.mask', raster='basin') grass.run_command('r.stream.order', stream_rast='raster_streams', direction='drainage', elevation=dem, horton='horton', overwrite=True) stream_stat = grass.read_command('r.stream.stats', stream_rast='horton', direction='drainage', elevation=dem, flags='o') network_statistics = stream_stat.split('\n') network_statistics.remove('') #get the max order network_statistics[-1].split() total_length = float(network_statistics[-1].split(',')[2]) area_basin = float(network_statistics[-1].split(',')[3]) #area_basin in km2 area_basin_Ha = area_basin * 100 mean_elev = float(main_stat[3].split(':')[-1]) min_elev = float(main_stat[0].split(':')[-1]) max_elev = float(main_stat[1].split(':')[-1]) deltaH = max_elev - min_elev average_slope = float(network_statistics[-1].split(',')[4]) grass.run_command('r.mask', flags='r') TcGiandotti = (4 * np.sqrt(area_basin) + 1.5 * total_length) / (0.8 * np.sqrt(mean_elev - min_elev)) TcKirpich = 0.945 * (total_length**3. / deltaH)**0.385 if area_basin_Ha > 1000: #TODO controlla i riferimenti corrivazione = TcGiandotti grass.info('using giandotti') grass.info(str(TcGiandotti)) formula = 'Giandotti' else: formula = 'Kirpich' corrivazione = TcKirpich grass.info('using Kirpich') grass.info(str(TcKirpich)) if corrivazione < 24: aPar = 'a24@PERMANENT' bPar = 'b24@PERMANENT' kPar = 'k24@PERMANENT' else: aPar = 'a15@PERMANENT' bPar = 'b15@PERMANENT' kPar = 'k15@PERMANENT' CNmap = 'CN@PERMANENT' aStat = grass.read_command('r.univar', map=aPar, zones='basin') aMain_stat = aStat.splitlines()[12].split(':')[-1] aMain_stat = float(aMain_stat) bStat = grass.read_command('r.univar', map=bPar, zones='basin') bMain_stat = bStat.splitlines()[12].split(':')[-1] bMain_stat = float(bMain_stat) kStat = grass.read_command('r.univar', map=kPar, zones='basin') kMain_stat = kStat.splitlines()[12].split(':')[-1] kMain_stat = float(kMain_stat) CNstat = grass.read_command('r.univar', map=CNmap, zones='basin') CN = CNstat.splitlines()[12].split(':')[-1] CN = float(CN) g.message('area basin in km2: ') print area_basin print 'mean elev: ' print mean_elev - min_elev print 'delta H:' print deltaH print 'total reach length: ' print total_length print 'a mean:' print aMain_stat print '\n b mean: ' print bMain_stat print '\n k mean: ' print kMain_stat print 'CN mean:' print CN ##### ------------------------- ##### modifica per verifca da togliere # ~ corrivazione=3. # ~ aMain_stat=32.5 # ~ bMain_stat=0.33 # ~ kMain_stat=0.42 # ~ CN=91. # ~ area_basin=61.5 # ~ area_basin_Ha=area_basin*100 CN = 70.12 / 82.63 * CN #CN = 78.6 #####--------------------------##### f_K_T = 1 - kMain_stat * (0.45 + 0.799 * np.log(-np.log(1 - 1. / TR))) print 'f(k,T): ' print f_K_T h = f_K_T * aMain_stat * corrivazione**bMain_stat print '\n h main:' print h X1 = 100 * corrivazione / (0.236 + 0.062 * corrivazione) X2 = 0.003 * corrivazione + 0.0234 Pa = 100 - area_basin_Ha / (X1 + X2 * area_basin_Ha) Ha = h * Pa / 100 S1 = (1000. / CN) - 10 Pn = (Ha - 5.08 * S1)**2 / (Ha + 20.32 * S1) Qc = (1 / 360.) * Pn * area_basin_Ha / corrivazione print 'discharge: ' print Qc #print table.columns.types() #[u'INTEGER', u'TEXT', u'integer', u'double precision'] ''' ------------------------------ START CALCULATION OF LOCAL UPSTREAM SLOPE ------------------------------ ''' #offsets for moving windows offsets = [ d for j in xrange(1, 1 + 1) for i in [j, -j] for d in [(i, 0), (0, i), (i, i), (i, -i)] ] #rename dtm as elevation for future calculation if not exist if not VectorTopo('elevation').exist(): grass.run_command('g.rename', raster="%s,elevation" % dem) elev_renamed = True #define drainage direction drainage_incoming = [2, 4, 3, 1, 6, 8, 7, 5] drainage_outcoming = [] diag_dist = (regione['nsres']**2 + regione['ewres']**2)**0.5 # [(1, 0), (0, 1), (1, 1), (1, -1), (-1, 0), (0, -1), (-1, -1), (-1, 1), cell_dists = [ regione['nsres'], regione['ewres'], diag_dist, diag_dist, regione['nsres'], regione['ewres'], diag_dist, diag_dist ] # define the calculation term terms = [ "(drainage[%d,%d] == %d && not(isnull(raster_streams[0,0])) && not(isnull(raster_streams[%d,%d])) )" % ((offsets[j] + tuple([drainage_incoming[j]]) + offsets[j])) for j in range(len(drainage_incoming)) ] #define the operation expression terms_calc = [ "(elevation[%d,%d] - elevation) * %s" % (offsets[j] + (terms[j], )) for j in range(len(terms)) ] terms_calc_slope = [ "( (elevation[%d,%d] - elevation)/%10.4f ) * %s" % (offsets[j] + (cell_dists[j], ) + (terms[j], )) for j in range(len(terms)) ] expr = "num_cells_drain_into = (%s)" % " + ".join(terms) expr1 = "elevation_percentile4 = if(isnull(raster_streams),null(),(%s))" % " + ".join( terms) expr2 = "elevdiff_drain_into = %s" % " + ".join(terms_calc) expr3 = "slope_drain_into = %s" % " + ".join(terms_calc_slope) # do the r.mapcalc calculation with the moving window # exclude the num_cell_calculation_into #grass.mapcalc( expr ) #print expr2 #grass.mapcalc( expr2 , overwrite=True) #print expr3 #grass.mapcalc( expr3 , overwrite=True) ''' ------------------------------ START CALCULATION OF 2KM UPSTREAM SLOPE ------------------------------ ''' #create an outlet vector new = VectorTopo('outlet') COLS = [(u'cat', 'INTEGER PRIMARY KEY')] new.open('w', tab_name='outlet', tab_cols=COLS) new.write( Point(xoutlet, youtlet), cat=1, ) new.table.conn.commit() new.table.execute().fetchall() new.close() new = VectorTopo('output') COLS = [(u'cat', 'INTEGER PRIMARY KEY'), (u'discharge', u'double precision')] if new.exist(): g.message('The vector exist: it will be named as') new_name = 'output' + str(datetime.datetime.now().time())[:8].replace( ':', '_') grass.info(new_name) new = VectorTopo(new_name) new.open('w', tab_name=new_name, tab_cols=COLS) else: new.open('w', tab_name='output', tab_cols=COLS) new.write(Point(xoutlet, youtlet), (float(Qc), )) new.table.conn.commit() new.table.execute().fetchall() new.close() mean_elev_abs = mean_elev - min_elev FirsPage(formula=formula, xoutlet=xoutlet, youtlet=youtlet, Tc=corrivazione, AreaBasin=area_basin_Ha, ChLen=total_length * 1000., MeanElev=mean_elev_abs, a=aMain_stat, b=bMain_stat, k=kMain_stat, TR=TR, f=f_K_T, h=h, h_ar=Ha, x1=X1, x2=X2, Pa=Pa, CN=CN, S1=S1, Pn=Pn, Qc=Qc, dropElev=deltaH) #cleaning part if elev_renamed: grass.run_command('g.rename', raster='elevation,%s' % dem) grass.del_temp_region() grass.run_command('r.to.vect', input='basin', output='basin1', type='area', overwrite=True) if cleanTemporary != 'no': grass.run_command( 'g.remove', flags='f', type='raster', name= 'main_stream,basin,circle,drainage,horton,raster_streams,slope_drain_into' ) grass.run_command('g.remove', flags='f', type='vector', name='main_stream,nodes,outlet') grass.run_command('g.remove', type='vector', pattern='main_stream*', flags='f')
def main(): """Do the main processing """ # Parse input options: patch_map = options['input'] patches = patch_map.split('@')[0] patches_mapset = patch_map.split('@')[1] if len( patch_map.split('@')) > 1 else None pop_proxy = options['pop_proxy'] layer = options['layer'] costs = options['costs'] cutoff = float(options['cutoff']) border_dist = int(options['border_dist']) conefor_dir = options['conefor_dir'] memory = int(options['memory']) # Parse output options: prefix = options['prefix'] edge_map = '{}_edges'.format(prefix) vertex_map = '{}_vertices'.format(prefix) shortest_paths = '{}_shortest_paths'.format(prefix) # Parse flags: p_flag = flags['p'] t_flag = flags['t'] r_flag = flags['r'] dist_flags = 'kn' if flags['k'] else 'n' lin_cat = 1 zero_dist = None folder = grass.tempdir() if not os.path.exists(folder): os.makedirs(folder) # Setup counter for progress message counter = 0 # Check if location is lat/lon (only in lat/lon geodesic distance # measuring is supported) if grass.locn_is_latlong(): grass.verbose("Location is lat/lon: Geodesic distance \ measure is used") # Check if prefix is legal GRASS name if not grass.legal_name(prefix): grass.fatal('{} is not a legal name for GRASS \ maps.'.format(prefix)) if prefix[0].isdigit(): grass.fatal('Tables names starting with a digit are not SQL \ compliant.'.format(prefix)) # Check if output maps not already exists or could be overwritten for output in [edge_map, vertex_map, shortest_paths]: if grass.db.db_table_exist(output) and not grass.overwrite(): grass.fatal('Vector map <{}> already exists'.format(output)) # Check if input has required attributes in_db_connection = grass.vector.vector_db(patch_map) if not int(layer) in in_db_connection.keys(): grass.fatal('No attribute table connected vector map {} at \ layer {}.'.format(patches, layer)) #Check if cat column exists pcols = grass.vector.vector_columns(patch_map, layer=layer) #Check if cat column exists if not 'cat' in pcols.keys(): grass.fatal('Cannot find the reqired column cat in vector map \ {}.'.format(patches)) #Check if pop_proxy column exists if not pop_proxy in pcols.keys(): grass.fatal('Cannot find column {} in vector map \ {}'.format(pop_proxy, patches)) #Check if pop_proxy column is numeric type if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL', 'DOUBLE PRECISION']: grass.fatal('Column {} is of type {}. Only numeric types \ (integer or double precision) \ allowed!'.format(pop_proxy, pcols[pop_proxy]['type'])) #Check if pop_proxy column does not contain values <= 0 pop_vals = np.fromstring(grass.read_command('v.db.select', flags='c', map=patches, columns=pop_proxy, nv=-9999).rstrip('\n'), dtype=float, sep='\n') if np.min(pop_vals) <= 0: grass.fatal('Column {} contains values <= 0 or NULL. Neither \ values <= 0 nor NULL allowed!}'.format(pop_proxy)) ############################################## # Use pygrass region instead of grass.parse_command !?! start_reg = grass.parse_command('g.region', flags='ugp') max_n = start_reg['n'] min_s = start_reg['s'] max_e = start_reg['e'] min_w = start_reg['w'] # cost_nsres = reg['nsres'] # cost_ewres = reg['ewres'] # Rasterize patches # http://www.gdal.org/gdal_tutorial.html # http://geoinformaticstutorial.blogspot.no/2012/11/convert- # shapefile-to-raster-with-gdal.html if t_flag: # Rasterize patches with "all-touched" mode using GDAL # Read region-settings (not needed canuse max_n, min_s, max_e, # min_w nsres, ewres... prast = os.path.join(folder, 'patches_rast.tif') # Check if GDAL-GRASS plugin is installed if ogr.GetDriverByName('GRASS'): #With GDAL-GRASS plugin #Locate file for patch vector map pfile = grass.parse_command('g.findfile', element='vector', file=patches, mapset=patches_mapset)['file'] pfile = os.path.join(pfile, 'head') else: # Without GDAL-GRASS-plugin grass.warning("Cannot find GDAL-GRASS plugin. Consider \ installing it in order to save time for \ all-touched rasterisation") pfile = os.path.join(folder, 'patches_vect.gpkg') # Export patch vector map to temp-file in a GDAL-readable # format (shp) grass.run_command('v.out.ogr', flags='m', quiet=True, input=patch_map, type='area', layer=layer, output=pfile, lco='GEOMETRY_NAME=geom') # Rasterize vector map with all-touched option os.system('gdal_rasterize -l {} -at -tr {} {} \ -te {} {} {} {} -ot Uint32 -a cat \ {} {} -q'.format(patches, start_reg['ewres'], start_reg['nsres'], start_reg['w'], start_reg['s'], start_reg['e'], start_reg['n'], pfile, prast)) if not ogr.GetDriverByName('GRASS'): # Remove vector temp-file os.remove(os.path.join(folder, 'patches_vect.gpkg')) # Import rasterized patches grass.run_command('r.external', flags='o', quiet=True, input=prast, output='{}_patches_pol'.format(TMP_PREFIX)) else: # Simple rasterisation (only area) # in G 7.6 also with support for 'centroid' if float(grass.version()['version'][:3]) >= 7.6: conv_types = ['area', 'centroid'] else: conv_types = ['area'] grass.run_command('v.to.rast', quiet=True, input=patches, use='cat', type=conv_types, output='{}_patches_pol'.format(TMP_PREFIX)) # Extract boundaries from patch raster map grass.run_command('r.mapcalc', expression='{p}_patches_boundary=if(\ {p}_patches_pol,\ if((\ (isnull({p}_patches_pol[-1,0])||| \ {p}_patches_pol[-1,0]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[0,1])||| \ {p}_patches_pol[0,1]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[1,0])||| \ {p}_patches_pol[1,0]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[0,-1])||| \ {p}_patches_pol[0,-1]!={p}_patches_pol)), \ {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX), quiet=True) rasterized_cats = grass.read_command( 'r.category', separator='newline', map='{p}_patches_boundary'.format(p=TMP_PREFIX)).replace( '\t', '').strip('\n') rasterized_cats = list( map(int, set([x for x in rasterized_cats.split('\n') if x != '']))) #Init output vector maps if they are requested by user network = VectorTopo(edge_map) network_columns = [(u'cat', 'INTEGER PRIMARY KEY'), (u'from_p', 'INTEGER'), (u'to_p', 'INTEGER'), (u'min_dist', 'DOUBLE PRECISION'), (u'dist', 'DOUBLE PRECISION'), (u'max_dist', 'DOUBLE PRECISION')] network.open('w', tab_name=edge_map, tab_cols=network_columns) vertex = VectorTopo(vertex_map) vertex_columns = [ (u'cat', 'INTEGER PRIMARY KEY'), (pop_proxy, 'DOUBLE PRECISION'), ] vertex.open('w', tab_name=vertex_map, tab_cols=vertex_columns) if p_flag: # Init cost paths file for start-patch grass.run_command('v.edit', quiet=True, map=shortest_paths, tool='create') grass.run_command('v.db.addtable', quiet=True, map=shortest_paths, columns="cat integer,\ from_p integer,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") start_region_bbox = Bbox(north=float(max_n), south=float(min_s), east=float(max_e), west=float(min_w)) vpatches = VectorTopo(patches, mapset=patches_mapset) vpatches.open('r', layer=int(layer)) ###Loop through patches vpatch_ids = np.array(vpatches.features_to_wkb_list( feature_type="centroid", bbox=start_region_bbox), dtype=[('vid', 'uint32'), ('cat', 'uint32'), ('geom', '|S10')]) cats = set(vpatch_ids['cat']) n_cats = len(cats) if n_cats < len(vpatch_ids['cat']): grass.verbose('At least one MultiPolygon found in patch map.\n \ Using average coordinates of the centroids for \ visual representation of the patch.') for cat in cats: if cat not in rasterized_cats: grass.warning('Patch {} has not been rasterized and will \ therefore not be treated as part of the \ network. Consider using t-flag or change \ resolution.'.format(cat)) continue grass.verbose("Calculating connectivity-distances for patch \ number {}".format(cat)) # Filter from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat] # Get patch ID if from_vpatch['vid'].size == 1: from_centroid = Centroid(v_id=int(from_vpatch['vid']), c_mapinfo=vpatches.c_mapinfo) from_x = from_centroid.x from_y = from_centroid.y # Get centroid if not from_centroid: continue else: xcoords = [] ycoords = [] for f_p in from_vpatch['vid']: from_centroid = Centroid(v_id=int(f_p), c_mapinfo=vpatches.c_mapinfo) xcoords.append(from_centroid.x) ycoords.append(from_centroid.y) # Get centroid if not from_centroid: continue from_x = np.average(xcoords) from_y = np.average(ycoords) # Get BoundingBox from_bbox = grass.parse_command('v.db.select', map=patch_map, flags='r', where='cat={}'.format(cat)) attr_filter = vpatches.table.filters.select(pop_proxy) attr_filter = attr_filter.where("cat={}".format(cat)) proxy_val = vpatches.table.execute().fetchone() # Prepare start patch start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat) reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat)) recl = grass.feed_command( 'r.reclass', quiet=True, input='{}_patches_boundary'.format(TMP_PREFIX), output=start_patch, rules='-') recl.stdin.write(reclass_rule) recl.stdin.close() recl.wait() # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised) #start_check = grass.parse_command('r.info', flags='r', map=start_patch) #start_check = grass.parse_command('r.univar', flags='g', map=start_patch) #print(start_check) """if start_check['min'] != '1': grass.warning('Patch {} has not been rasterized and will \ therefore not be treated as part of the \ network. Consider using t-flag or change \ resolution.'.format(cat)) grass.run_command('g.remove', flags='f', vector=start_patch, raster=start_patch, quiet=True) grass.del_temp_region() continue""" # Prepare stop patches ############################################ reg = grass.parse_command('g.region', flags='ug', quiet=True, raster=start_patch, n=float(from_bbox['n']) + float(cutoff), s=float(from_bbox['s']) - float(cutoff), e=float(from_bbox['e']) + float(cutoff), w=float(from_bbox['w']) - float(cutoff), align='{}_patches_pol'.format(TMP_PREFIX)) north = reg['n'] if max_n > reg['n'] else max_n south = reg['s'] if min_s < reg['s'] else min_s east = reg['e'] if max_e < reg['e'] else max_e west = reg['w'] if min_w > reg['w'] else min_w # Set region to patch search radius grass.use_temp_region() grass.run_command('g.region', quiet=True, n=north, s=south, e=east, w=west, align='{}_patches_pol'.format(TMP_PREFIX)) # Create buffer around start-patch as a mask # for cost distance analysis grass.run_command('r.buffer', quiet=True, input=start_patch, output='MASK', distances=cutoff) grass.run_command('r.mapcalc', quiet=True, expression='{pf}_patch_{p}_neighbours_contur=\ if({pf}_patches_boundary=={p},\ null(),\ {pf}_patches_boundary)'.format( pf=TMP_PREFIX, p=cat)) grass.run_command('r.mask', flags='r', quiet=True) # Calculate cost distance cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat) grass.run_command('r.cost', flags=dist_flags, quiet=True, overwrite=True, input=costs, output=cost_distance_map, start_rast=start_patch, memory=memory) #grass.run_command('g.region', flags='up') # grass.raster.raster_history(cost_distance_map) cdhist = History(cost_distance_map) cdhist.clear() cdhist.creator = os.environ['USER'] cdhist.write() # History object cannot modify description grass.run_command('r.support', map=cost_distance_map, description='Generated by r.connectivity.distance', history=os.environ['CMDLINE']) # Export distance at boundaries maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist' maps = maps.format(TMP_PREFIX, cat, prefix), connections = grass.encode( grass.read_command('r.stats', flags='1ng', quiet=True, input=maps, separator=';').rstrip('\n')) if connections: con_array = np.genfromtxt(BytesIO(connections), delimiter=';', dtype=None, names=['x', 'y', 'cat', 'dist']) else: grass.warning('No connections for patch {}'.format(cat)) # Write centroid to vertex map vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val) vertex.table.conn.commit() # Remove temporary map data grass.run_command('g.remove', quiet=True, flags='f', type=['raster', 'vector'], pattern="{}*{}*".format(TMP_PREFIX, cat)) grass.del_temp_region() continue #Find closest points on neigbour patches to_cats = set(np.atleast_1d(con_array['cat'])) to_coords = [] for to_cat in to_cats: connection = con_array[con_array['cat'] == to_cat] connection.sort(order=['dist']) pixel = border_dist if len( connection) > border_dist else len(connection) - 1 # closest_points_x = connection['x'][pixel] # closest_points_y = connection['y'][pixel] closest_points_to_cat = to_cat closest_points_min_dist = connection['dist'][0] closest_points_dist = connection['dist'][pixel] closest_points_max_dist = connection['dist'][-1] to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid'] if len(to_patch_ids) == 1: to_centroid = Centroid(v_id=to_patch_ids, c_mapinfo=vpatches.c_mapinfo) to_x = to_centroid.x to_y = to_centroid.y elif len(to_patch_ids) >= 1: xcoords = [] ycoords = [] for t_p in to_patch_ids: to_centroid = Centroid(v_id=int(t_p), c_mapinfo=vpatches.c_mapinfo) xcoords.append(to_centroid.x) ycoords.append(to_centroid.y) # Get centroid if not to_centroid: continue to_x = np.average(xcoords) to_y = np.average(ycoords) to_coords.append('{},{},{},{},{},{}'.format( connection['x'][0], connection['y'][0], to_cat, closest_points_min_dist, closest_points_dist, closest_points_max_dist)) #Save edges to network dataset if closest_points_dist <= 0: zero_dist = 1 # Write data to network network.write(Line([(from_x, from_y), (to_x, to_y)]), cat=lin_cat, attrs=( cat, int(closest_points_to_cat), closest_points_min_dist, closest_points_dist, closest_points_max_dist, )) network.table.conn.commit() lin_cat = lin_cat + 1 # Save closest points and shortest paths through cost raster as # vector map (r.drain limited to 1024 points) if requested if p_flag: grass.verbose('Extracting shortest paths for patch number \ {}...'.format(cat)) points_n = len(to_cats) tiles = int(points_n / 1024.0) rest = points_n % 1024 if not rest == 0: tiles = tiles + 1 tile_n = 0 while tile_n < tiles: tile_n = tile_n + 1 #Import closest points for start-patch in 1000er blocks sp = grass.feed_command('v.in.ascii', flags='nr', overwrite=True, quiet=True, input='-', stderr=subprocess.PIPE, output="{}_{}_cp".format( TMP_PREFIX, cat), separator=",", columns="x double precision,\ y double precision,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") sp.stdin.write(grass.encode("\n".join(to_coords))) sp.stdin.close() sp.wait() # Extract shortest paths for start-patch in chunks of # 1024 points cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat) start_points = "{}_{}_cp".format(TMP_PREFIX, cat) grass.run_command('r.drain', overwrite=True, quiet=True, input=cost_distance_map, output=cost_paths, drain=cost_paths, start_points=start_points) grass.run_command('v.db.addtable', map=cost_paths, quiet=True, columns="cat integer,\ from_p integer,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") grass.run_command('v.db.update', map=cost_paths, column='from_p', value=cat, quiet=True) grass.run_command('v.distance', quiet=True, from_=cost_paths, to=start_points, upload='to_attr', column='to_p', to_column='to_p') grass.run_command('v.db.join', quiet=True, map=cost_paths, column='to_p', other_column='to_p', other_table=start_points, subset_columns='dist_min,dist,dist_max') #grass.run_command('v.info', flags='c', # map=cost_paths) grass.run_command('v.patch', flags='ae', overwrite=True, quiet=True, input=cost_paths, output=shortest_paths) # Remove temporary map data grass.run_command('g.remove', quiet=True, flags='f', type=['raster', 'vector'], pattern="{}*{}*".format(TMP_PREFIX, cat)) # Remove temporary map data for patch if r_flag: grass.run_command('g.remove', flags='f', type='raster', name=cost_distance_map, quiet=True) vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val) vertex.table.conn.commit() # Print progress message grass.percent(i=int((float(counter) / n_cats) * 100), n=100, s=3) # Update counter for progress message counter = counter + 1 if zero_dist: grass.warning('Some patches are directly adjacent to others. \ Minimum distance set to 0.0000000001') # Close vector maps and build topology network.close() vertex.close() # Add vertex attributes # grass.run_command('v.db.addtable', map=vertex_map) # grass.run_command('v.db.join', map=vertex_map, column='cat', # other_table=in_db_connection[int(layer)]['table'], # other_column='cat', subset_columns=pop_proxy, # quiet=True) # Add history and meta data to produced maps grass.run_command('v.support', flags='h', map=edge_map, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) grass.run_command('v.support', flags='h', map=vertex_map, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) if p_flag: grass.run_command('v.support', flags='h', map=shortest_paths, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) # Output also Conefor files if requested if conefor_dir: query = """SELECT p_from, p_to, avg(dist) FROM (SELECT CASE WHEN from_p > to_p THEN to_p ELSE from_p END AS p_from, CASE WHEN from_p > to_p THEN from_p ELSE to_p END AS p_to, dist FROM {}) AS x GROUP BY p_from, p_to""".format(edge_map) with open(os.path.join(conefor_dir, 'undirected_connection_file'), 'w') as edges: edges.write( grass.read_command('db.select', sql=query, separator=' ')) with open(os.path.join(conefor_dir, 'directed_connection_file'), 'w') as edges: edges.write( grass.read_command('v.db.select', map=edge_map, separator=' ', flags='c')) with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes: nodes.write( grass.read_command('v.db.select', map=vertex_map, separator=' ', flags='c'))
zachranka = VectorTopo('adresnimista_zachranka', mapset='ruian_praha') zachranka.open('r') ulice = VectorTopo('ulice', mapset='ruian_praha') ulice.open('r') zu = VectorTopo('zachranka_ulice') cols = [('cat', 'INTEGER PRIMARY KEY'), ('kod', 'INTEGER'), ('ulice', 'TEXT'), ('nespravny', 'INTEGER')] zu.open('w', tab_cols=cols) seznam = [] for z in zachranka: u = ulice.find['by_point'].geo(z, maxdist=1000.) if u is None: continue nespravny = z.attrs['ulicekod'] != u.attrs['kod'] print (u'{:10} {:1} {}'.format(z.attrs['kod'], nespravny, u.attrs['nazev'])) zu.write(z, (z.attrs['kod'], u.attrs['nazev'], nespravny)) if u.cat not in seznam: zu.write(u, (None, u.attrs['nazev'], None)) seznam.append(u.cat) zu.table.conn.commit() # nutne pro zapis atributu !!! zu.close() zachranka.close() ulice.close()
def graph_to_vector(name, mapset, graphs, output, order_types, outlet_cats, copy_columns): """ Write the Graph as vector map. Attach the network id, the streams orders, the reverse falg, the original category and copy columns from the source stream network vector map if required. :param name: Name of the input stream vector map :param mapset: Mapset name of the input stream vector map :param graphs: The list of computed graphs :param output: The name of the output vector map :param order_types: The order algorithms :param outlet_cats: Categories of the outlet points :param copy_columns: The column names to be copied from the original input map :return: """ streams = VectorTopo(name=name, mapset=mapset) streams.open("r") # Specifiy all columns that should be created cols = [ ("cat", "INTEGER PRIMARY KEY"), ("outlet_cat", "INTEGER"), ("network", "INTEGER"), ("reversed", "INTEGER"), ] for order in order_types: cols.append((ORDER_DICT[order], "INTEGER")) # Add the columns of the table from the input map if copy_columns: for entry in copy_columns: cols.append((entry[1], entry[2])) out_streams = VectorTopo(output) grass.message(_("Writing vector map <%s>" % output)) out_streams.open("w", tab_cols=cols) count = 0 for graph in graphs: outlet_cat = outlet_cats[count] count += 1 grass.message( _("Writing network %i from %i with " "outlet category %i" % (count, len(graphs), outlet_cat))) # Write each edge as line for edge_id in graph: edge = graph[edge_id] line = streams.read(edge_id) # Reverse the line if required if edge.reverse is True: line.reverse() # Orders derived from shreve algorithm if ORDER_SCHEIDEGGER in order_types: edge.stream_order[ORDER_SCHEIDEGGER] *= 2 if ORDER_DRWAL in order_types: if edge.stream_order[ORDER_DRWAL] != 0: edge.stream_order[ORDER_DRWAL] = int( math.log(edge.stream_order[ORDER_DRWAL], 2) + 1) # Create attributes attrs = [] # Append the outlet point category attrs.append(outlet_cat) # Append the network id attrs.append(count) # The reverse flag attrs.append(edge.reverse) # Then the stream orders defined at the command line for order in order_types: val = int(edge.stream_order[order]) if val == 0: val = None attrs.append(val) # Copy attributes from original streams if the table exists if copy_columns: for entry in copy_columns: # First entry is the column index attrs.append(line.attrs.values()[entry[0]]) # Write the feature out_streams.write(line, cat=edge_id, attrs=attrs) # Commit the database entries out_streams.table.conn.commit() # Close the input and output map out_streams.close() streams.close()
def main(): input = options["input"] if options["refline"]: refline_cat = int(options["refline"]) else: refline_cat = None nb_vertices = int(options["vertices"]) if options["range"]: search_range = float(options["range"]) else: search_range = None output = options["output"] transversals = flags["t"] median = flags["m"] global tmp_points_map global tmp_centerpoints_map global tmp_line_map global tmp_cleaned_map global tmp_map tmp_points_map = "points_map_tmp_%d" % os.getpid() tmp_centerpoints_map = "centerpoints_map_tmp_%d" % os.getpid() tmp_line_map = "line_map_tmp_%d" % os.getpid() tmp_cleaned_map = "cleaned_map_tmp_%d" % os.getpid() tmp_map = "generaluse_map_tmp_%d" % os.getpid() nb_lines = grass.vector_info_topo(input)["lines"] # Find best reference line and max distance between centerpoints of lines segment_input = "" categories = grass.read_command("v.category", input=input, option="print", quiet=True).splitlines() for category in categories: segment_input += "P {}".format(category.strip()) segment_input += " {} {}".format(category.strip(), " 50%") segment_input += os.linesep grass.write_command( "v.segment", input=input, output=tmp_centerpoints_map, rules="-", stdin=segment_input, quiet=True, ) center_distances = grass.read_command( "v.distance", from_=tmp_centerpoints_map, to=tmp_centerpoints_map, upload="dist", flags="pa", quiet=True, ).splitlines() cats = [] mean_dists = [] count = 0 distmax = 0 for center in center_distances: if count < 2: count += 1 continue cat = center.strip().split("|")[0] distsum = 0 for x in center.strip().split("|")[1:]: distsum += float(x) mean_dist = distsum / len(center.strip().split("|")[1:]) cats.append(cat) mean_dists.append(mean_dist) if transversals and not search_range: search_range = sum(mean_dists) / len(mean_dists) grass.message(_("Calculated search range: %.5f." % search_range)) if not refline_cat: refline_cat = sorted(zip(cats, mean_dists), key=lambda tup: tup[1])[0][0] grass.message( _("Category number of chosen reference line: %s." % refline_cat)) # Use transversals algorithm if transversals: # Break any intersections in the original lines so that # they do not interfere further on grass.run_command("v.clean", input=input, output=tmp_cleaned_map, tool="break", quiet=True) xmean = [] ymean = [] xmedian = [] ymedian = [] step = 100.0 / nb_vertices os.environ["GRASS_VERBOSE"] = "-1" for vertice in range(0, nb_vertices + 1): # v.segment sometimes cannot find points when # using 0% or 100% offset length_offset = step * vertice if length_offset < 0.00001: length_offset = 0.00001 if length_offset > 99.99999: length_offset = 99.9999 # Create endpoints of transversal segment_input = "P 1 %s %.5f%% %f\n" % ( refline_cat, length_offset, search_range, ) segment_input += "P 2 %s %.5f%% %f\n" % ( refline_cat, length_offset, -search_range, ) grass.write_command( "v.segment", input=input, output=tmp_points_map, stdin=segment_input, overwrite=True, ) # Create transversal grass.write_command( "v.net", points=tmp_points_map, output=tmp_line_map, operation="arcs", file="-", stdin="99999 1 2", overwrite=True, ) # Patch transversal onto cleaned input lines maps = tmp_cleaned_map + "," + tmp_line_map grass.run_command("v.patch", input=maps, out=tmp_map, overwrite=True) # Find intersections grass.run_command( "v.clean", input=tmp_map, out=tmp_line_map, tool="break", error=tmp_points_map, overwrite=True, ) # Add categories to intersection points grass.run_command( "v.category", input=tmp_points_map, out=tmp_map, op="add", overwrite=True, ) # Get coordinates of points coords = grass.read_command("v.to.db", map=tmp_map, op="coor", flags="p").splitlines() count = 0 x = [] y = [] for coord in coords: x.append(float(coord.strip().split("|")[1])) y.append(float(coord.strip().split("|")[2])) # Calculate mean and median for this transversal if len(x) > 0: xmean.append(sum(x) / len(x)) ymean.append(sum(y) / len(y)) x.sort() y.sort() xmedian.append((x[(len(x) - 1) // 2] + x[(len(x)) // 2]) / 2) ymedian.append((y[(len(y) - 1) // 2] + y[(len(y)) // 2]) / 2) del os.environ["GRASS_VERBOSE"] # Use closest point algorithm else: # Get reference line calculate its length grass.run_command("v.extract", input=input, output=tmp_line_map, cats=refline_cat, quiet=True) os.environ["GRASS_VERBOSE"] = "0" lpipe = grass.read_command("v.to.db", map=tmp_line_map, op="length", flags="p").splitlines() del os.environ["GRASS_VERBOSE"] for l in lpipe: linelength = float(l.strip().split("|")[1]) step = linelength / nb_vertices # Create reference points for vertice calculation grass.run_command( "v.to.points", input=tmp_line_map, output=tmp_points_map, dmax=step, quiet=True, ) nb_points = grass.vector_info_topo(tmp_points_map)["points"] cat = [] x = [] y = [] # Get coordinates of closest points on all input lines if search_range: points = grass.read_command( "v.distance", from_=tmp_points_map, from_layer=2, to=input, upload="to_x,to_y", dmax=search_range, flags="pa", quiet=True, ).splitlines() else: points = grass.read_command( "v.distance", from_=tmp_points_map, from_layer=2, to=input, upload="to_x,to_y", flags="pa", quiet=True, ).splitlines() firstline = True for point in points: if firstline: firstline = False continue cat.append((int(point.strip().split("|")[0]))) x.append(float(point.strip().split("|")[2])) y.append(float(point.strip().split("|")[3])) # Calculate mean coordinates xsum = [0] * nb_points ysum = [0] * nb_points linecount = [0] * nb_points for i in range(len(cat)): index = cat[i] - 1 linecount[index] += 1 xsum[index] = xsum[index] + x[i] ysum[index] = ysum[index] + y[i] xmean = [0] * nb_points ymean = [0] * nb_points for c in range(0, nb_points): xmean[c] = xsum[c] / linecount[c] ymean[c] = ysum[c] / linecount[c] # Calculate the median xmedian = [0] * nb_points ymedian = [0] * nb_points for c in range(0, nb_points): xtemp = [] ytemp = [] for i in range(len(cat)): if cat[i] == c + 1: xtemp.append(x[i]) ytemp.append(y[i]) xtemp.sort() ytemp.sort() xmedian[c] = (xtemp[(len(xtemp) - 1) // 2] + xtemp[(len(xtemp)) // 2]) / 2 ymedian[c] = (ytemp[(len(ytemp) - 1) // 2] + ytemp[(len(ytemp)) // 2]) / 2 # Create new line and write to file if median and nb_lines > 2: line = geo.Line(list(zip(xmedian, ymedian))) else: if median and nb_lines <= 2: grass.message( _("More than 2 lines necesary for median, using mean.")) line = geo.Line(list(zip(xmean, ymean))) new = VectorTopo(output) new.open("w") new.write(line) new.close()
for prvek in obce.viter('areas'): if prvek.attrs is None: continue if prvek.attrs['psc'] == psc: if obec_id is None: obec_id = prvek.id for b in prvek.boundaries(): for n in b.read_area_ids(): if n != -1 and n != obec_id: obce_psc.add(n) obce_psc.add(obec_id) hranice = list() for prvek in obce.viter('areas'): if prvek.id not in obce_psc: continue for b in prvek.boundaries(): if b.id not in hranice: hranice.append(b.id) vystup.write(b, attrs=(None, None)) vystup.write(prvek.centroid(), attrs=(prvek.attrs['nazev'], prvek.attrs['psc'])) vystup.table.conn.commit() vystup.close() obce.close()
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1): # # map=Input map name # map_out=Output map with extensions # maxlen=Max length in map units that line can be extended (def=200) # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5) # vlen=number of verticies to look back in calculating line end direction (def=1) # Not sure if it is worth putting this in as parameter. # allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1' grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format( map, map_out, maxlen, scale, debug)) vlen = 1 # not sure if this is worth putting in as parameter cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'), (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'), (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'), (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'), (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'), (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'), (u'x_len', 'DOUBLE PRECISION')] extend = VectorTopo('extend') if extend.exist(): extend.remove() extend.open('w', tab_name='extend', tab_cols=cols) # # Go through input map, looking at each line and it's two nodes to find nodes # with only a single line starting/ending there - i.e. a dangle. # For each found, generate an extension line in the new map "extend" # inMap = VectorTopo(map) inMap.open('r') dangleCnt = 0 tickLen = len(inMap) grass.info("Searching {} features for dangles".format(tickLen)) ticker = 0 grass.message("Percent complete...") for ln in inMap: ticker = (ticker + 1) grass.percent(ticker, tickLen, 5) if ln.gtype == 2: # Only process lines for nd in ln.nodes(): if nd.nlines == 1: # We have a dangle dangleCnt = dangleCnt + 1 vtx = min(len(ln) - 1, vlen) if len([1 for _ in nd.lines(only_out=True) ]) == 1: # Dangle starting at node dend = "head" sx = ln[0].x sy = ln[0].y dx = sx - ln[vtx].x dy = sy - ln[vtx].y else: # Dangle ending at node dend = "tail" sx = ln[-1].x sy = ln[-1].y dx = sx - ln[-(vtx + 1)].x dy = sy - ln[-(vtx + 1)].y endaz = math.atan2(dy, dx) if scale > 0: extLen = min(ln.length() * scale, maxlen) else: extLen = maxlen ex = extLen * math.cos(endaz) + sx ey = extLen * math.sin(endaz) + sy extLine = geo.Line([(sx, sy), (ex, ey)]) quiet = extend.write(extLine, (ln.cat, dend, sx, sy, extLen, endaz, 0, 0, 0, 0, 'null', extLen)) grass.info( "{} dangle nodes found, committing table extend".format(dangleCnt)) extend.table.conn.commit() extend.close(build=True, release=True) inMap.close() # # Create two tables where extensions intersect; # 1. intersect with original lines # 2. intersect with self - to extract intersects between extensions # # First the intersects with original lines grass.info( "Searching for intersects between potential extensions and original lines" ) table_isectIn = Table('isectIn', connection=sqlite3.connect(get_path(path))) if table_isectIn.exist(): table_isectIn.drop(force=True) run_command("v.distance", flags='a', overwrite=True, quiet=True, from_="extend", from_type="line", to=map, to_type="line", dmax="0", upload="cat,dist,to_x,to_y", column="near_cat,dist,nx,ny", table="isectIn") # Will have touched the dangle it comes from, so remove those touches run_command( "db.execute", sql= "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") run_command("db.execute", sql="ALTER TABLE isectIn ADD ntype VARCHAR", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") run_command("db.execute", sql="UPDATE isectIn SET ntype = 'orig' ", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") # # Now second self intersect table # grass.info("Searching for intersects of potential extensions") table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path))) if table_isectX.exist(): table_isectX.drop(force=True) run_command("v.distance", flags='a', overwrite=True, quiet=True, from_="extend", from_type="line", to="extend", to_type="line", dmax="0", upload="cat,dist,to_x,to_y", column="near_cat,dist,nx,ny", table="isectX") # Obviously all extensions will intersect with themself, so remove those "intersects" run_command("db.execute", sql="DELETE FROM isectX WHERE from_cat = near_cat", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") run_command("db.execute", sql="ALTER TABLE isectX ADD ntype VARCHAR", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") run_command("db.execute", sql="UPDATE isectX SET ntype = 'ext' ", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") # # Combine the two tables and add a few more attributes # run_command("db.execute", sql="INSERT INTO isectIn SELECT * FROM isectX", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") cols_isectIn = Columns('isectIn', connection=sqlite3.connect(get_path(path))) cols_isectIn.add(['from_x'], ['DOUBLE PRECISION']) cols_isectIn.add(['from_y'], ['DOUBLE PRECISION']) cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION']) # Get starting coordinate at the end of the dangle run_command( "db.execute", sql= "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") run_command( "db.execute", sql= "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") table_isectIn.conn.commit() # For each intersect point, calculate the distance along extension line from end of dangle # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents grass.info( "Calculating distances of intersects along potential extensions") cur = table_isectIn.execute( sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn") for row in cur.fetchall(): rowid, fx, fy, nx, ny = row x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2) sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format( x_len, rowid) table_isectIn.execute(sql_code=sqlStr) grass.verbose("Ready to commit isectIn changes") table_isectIn.conn.commit() # Remove any zero distance from end of their dangle. # This happens when another extension intersects exactly at that point run_command("db.execute", sql="DELETE FROM isectIn WHERE ext_len = 0.0", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") table_isectIn.conn.commit() # Go through the extensions and find the intersect closest to each origin. grass.info("Searching for closest intersect for each potential extension") # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER" # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION" # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION" # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT" # run_command("db.execute", # sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))", # driver = "sqlite", # database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("CREATE index") run_command("db.execute", sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("UPDATE best_xid") run_command( "db.execute", sql= "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("UPDATE x_len") run_command( "db.execute", sql= "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("UPDATE near_x") run_command( "db.execute", sql= "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("UPDATE near_y") run_command( "db.execute", sql= "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("UPDATE other_cat") run_command( "db.execute", sql= "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("UPDATE xtype") run_command( "db.execute", sql= "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("DROP index") run_command("db.execute", sql="DROP INDEX idx_from_cat", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") grass.verbose("CREATE index on near_cat") run_command("db.execute", sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny', 'near_cat', 'ntype') # quiet=table_isectIn.filters.order_by(['ext_len ASC']) quiet = table_isectIn.filters.order_by('ext_len ASC') quiet = table_isectIn.filters.limit(1) table_extend = Table('extend', connection=sqlite3.connect(get_path(path))) # Code below was relplaced by commands above untill memory problem can be sorted # table_extend.filters.select('cat') # cur=table_extend.execute() # updateCnt = 0 # for row in cur.fetchall(): # cat, = row # quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat)) ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1 # x_sect=table_isectIn.execute().fetchone() # if x_sect is not None: # x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect # sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat) # table_extend.execute(sql_code=sqlStr) ## Try periodic commit to avoide crash! # updateCnt = (updateCnt + 1) % 10000 # if updateCnt == 0: # table_extend.conn.commit() grass.verbose("Ready to commit extend changes") table_extend.conn.commit() # # There may be extensions that crossed, and that intersection chosen by one but # not "recripricated" by the other. # Need to remove those possibilities and allow the jilted extension to re-search. # grass.verbose("Deleting intersects already resolved") run_command( "db.execute", sql= "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')", #"AND from_cat!=other_cat" no second chance! driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db") table_isectIn.conn.commit() grass.verbose("Deleting complete") # To find the jilted - need a copy of extensions that have found an # intersection (won't overwrite so drop first) grass.verbose( "Re-searching for mis-matched intersects between potential extensions") table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path))) if table_imatch.exist(): table_imatch.drop(force=True) wvar = "xtype!='null'" run_command( "db.copy", overwrite=True, quiet=True, from_driver="sqlite", from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db", from_table="extend", to_driver="sqlite", to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db", to_table="imatch", where=wvar) # Memory problems? if gc.isenabled(): grass.verbose("Garbage collection enabled - forcing gc cycle") gc.collect() else: grass.verbose("Garbage collection not enabled") # Ensure tables are commited table_extend.conn.commit() table_imatch.conn.commit() table_isectIn.conn.commit() # Identify the jilted sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat" cur = table_extend.execute(sql_code=sqlStr) updateCnt = 0 for row in cur.fetchall(): cat, = row grass.verbose("Reworking extend.cat={}".format(cat)) quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat)) #print("SQL: {}".format(table_isectIn.filters.get_sql())) x_sect = table_isectIn.execute().fetchone( ) ## Problem here under modules if x_sect is None: sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format( cat) else: x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format( x_rowid, ext_len, nx, ny, other_cat, ntype, cat) table_extend.execute(sql_code=sqlStr) ## Try periodic commit to avoide crash! updateCnt = (updateCnt + 1) % 100 if (updateCnt == 0): # or (cat == 750483): grass.verbose( "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX") table_extend.conn.commit() grass.verbose("Committing adjustments to table extend") table_extend.conn.commit() # # For debugging, create a map with the chosen intersect points # if debug: wvar = "xtype!='null' AND x_len!=0" # print(wvar) run_command( "v.in.db", overwrite=True, quiet=True, table="extend", driver="sqlite", database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db", x="near_x", y="near_y", key="cat", where=wvar, output="chosen") # # Finally adjust the dangle lines in input map - use a copy (map_out) if requested # if map_out: run_command("g.copy", overwrite=allowOverwrite, quiet=True, vector=map + "," + map_out) else: # Otherwise just modify the original dataset (map) if allowOverwrite: grass.warning("Modifying vector map ({})".format(map)) map_out = map else: grass.error( "Use switch --o to modifying input vector map ({})".format( map)) return 1 # # Get info for lines that need extending table_extend.filters.select( 'parent, dend, near_x, near_y, search_az, xtype') table_extend.filters.where("xtype!='null'") extLines = table_extend.execute().fetchall() cat_mods = [ext[0] for ext in extLines] tickLen = len(cat_mods) grass.info("Extending {} dangles".format(tickLen)) ticker = 0 grass.message("Percent complete...") # Open up the map_out copy (or the original) and work through looking for lines that need modifying inMap = VectorTopo(map_out) inMap.open('rw', tab_name=map_out) for ln_idx in range(len(inMap)): ln = inMap.read(ln_idx + 1) if ln.gtype == 2: # Only process lines while ln.cat in cat_mods: # Note: could be 'head' and 'tail' ticker = (ticker + 1) grass.percent(ticker, tickLen, 5) cat_idx = cat_mods.index(ln.cat) cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx) dump = cat_mods.pop(cat_idx) if xtype == 'orig': # Overshoot by 0.1 as break lines is unreliable nx = nx + 0.1 * math.cos(endaz) ny = ny + 0.1 * math.sin(endaz) newEnd = geo.Point(x=nx, y=ny, z=None) if dend == 'head': ln.insert(0, newEnd) else: # 'tail' ln.append(newEnd) quiet = inMap.rewrite(ln_idx + 1, ln) else: quite = inMap.delete(ln_idx + 1) ## Try periodic commit and garbage collection to avoide crash! if (ln_idx % 1000) == 0: # inMap.table.conn.commit() - no such thing - Why?? if gc.isenabled(): quiet = gc.collect() inMap.close(build=True, release=True) grass.message("v.extendlines completing") # # Clean up temporary tables and maps # if not debug: table_isectIn.drop(force=True) table_isectX.drop(force=True) table_imatch.drop(force=True) extend.remove() chosen = VectorTopo('chosen') if chosen.exist(): chosen.remove() return 0
def create_maps(parsed_obs, offering, seconds_granularity, event_time): """Create vector map representing offerings and observed properties. :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param seconds_granularity: Granularity in seconds :param event_time: """ timestamp_pattern = '%Y-%m-%dT%H:%M:%S' # TODO: Timezone start_time = event_time.split('+')[0] epoch_s = int(time.mktime(time.strptime(start_time, timestamp_pattern))) end_time = event_time.split('+')[1].split('/')[1] epoch_e = int(time.mktime(time.strptime(end_time, timestamp_pattern))) for key, observation in parsed_obs.items(): run_command('g.message', message='Creating vector maps for {}...'.format(key)) map_name = '{}_{}_{}'.format(options['output'], offering, key) if ':' in map_name: map_name = '_'.join(map_name.split(':')) if '-' in map_name: map_name = '_'.join(map_name.split('-')) if '.' in map_name: map_name = '_'.join(map_name.split('.')) run_command('t.create', output=map_name, type='stvds', title='Dataset for offering {} and observed ' 'property {}'.format(offering, key), description='Vector space time dataset') free_cat = 1 points = dict() new = VectorTopo(map_name) if overwrite() is True: try: new.remove() except: pass data = json.loads(observation) cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'), (u'value', 'DOUBLE')] intervals = {} for secondsStamp in range(epoch_s, epoch_e + 1, seconds_granularity): intervals.update({secondsStamp: dict()}) timestamp_pattern = 't%Y%m%dT%H%M%S' # TODO: Timezone for a in data['features']: name = a['properties']['name'] if a['properties']['name'] not in points.keys(): if new.is_open() is False: new.open('w') points.update({a['properties']['name']: free_cat}) new.write(Point(*a['geometry']['coordinates'])) free_cat += 1 for timestamp, value in a['properties'].items(): if timestamp != 'name': observationstart_time = timestamp[:-4] seconds_timestamp = int( time.mktime( time.strptime(observationstart_time, timestamp_pattern))) for interval in intervals.keys(): if interval <= seconds_timestamp < ( interval + seconds_granularity): if name in intervals[interval].keys(): intervals[interval][name].append(float(value)) else: intervals[interval].update( {name: [float(value)]}) break if new.is_open(): new.close(build=False) run_command('v.build', map=map_name, quiet=True) i = 1 layers_timestamps = list() for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp(interval).strftime( 't%Y%m%dT%H%M%S') table_name = '{}_{}_{}_{}'.format(options['output'], offering, key, timestamp) if ':' in table_name: table_name = '_'.join(table_name.split(':')) if '-' in table_name: table_name = '_'.join(table_name.split('-')) if '.' in table_name: table_name = '_'.join(table_name.split('.')) new.open('rw') db = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' link = Link(layer=i, name=table_name, table=table_name, key='cat', database=db, driver='sqlite') new.dblinks.add(link) new.table = new.dblinks[i - 1].table() new.table.create(cols) i += 1 layers_timestamps.append(timestamp) for name, values in intervals[interval].items(): if options['method'] == 'average': aggregated_value = sum(values) / len(values) elif options['method'] == 'sum': aggregated_value = sum(values) new.table.insert( tuple([points[name], name, aggregated_value])) new.table.conn.commit() new.close(build=False) run_command('v.build', map=map_name, quiet=True) create_temporal(map_name, i, layers_timestamps)
obec_id = None obce_psc = set() for prvek in obce.viter('areas'): if prvek.attrs['psc'] == psc: if obec_id is None: obec_id = prvek.id for b in prvek.boundaries(): for n in b.get_left_right(): if n != -1 and n != obec_id: obce_psc.add(n) obce_psc.add(obec_id) hranice = list() for prvek in obce.viter('areas'): if prvek.id not in obce_psc: continue for b in prvek.boundaries(): if b.id not in hranice: hranice.append(b.id) vystup.write(b, attrs=(None, None)) vystup.write(prvek.get_centroid(), attrs=(prvek.attrs['nazev'], prvek.attrs['psc'])) vystup.table.conn.commit() vystup.close() obce.close()
def create_maps(parsed_obs, offering, secondsGranularity, resolution): """ Create raster maps representing offerings, observed props and procedures :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param secondsGranularity: Granularity in seconds :param resolution: 2D grid resolution for rasterization """ timestampPattern = '%Y-%m-%dT%H:%M:%S' # TODO: Timezone startTime = options['event_time'].split('+')[0] epochS = int(time.mktime(time.strptime(startTime, timestampPattern))) endTime = options['event_time'].split('+')[1].split('/')[1] epochE = int(time.mktime(time.strptime(endTime, timestampPattern))) for key, observation in parsed_obs.iteritems(): print('Creating raster maps for offering ' '{}, observed property {}'.format(offering, key)) data = json.loads(observation) cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'), (u'value', 'DOUBLE')] geometries = dict() intervals = {} for secondsStamp in range(epochS, epochE + 1, secondsGranularity): intervals.update({secondsStamp: dict()}) timestampPattern = 't%Y%m%dT%H%M%S' # TODO: Timezone for a in data['features']: name = a['properties']['name'] geometries.update({name: a['geometry']['coordinates']}) for timestamp, value in a['properties'].iteritems(): if timestamp != 'name': observationStartTime = timestamp[:-4] secondsTimestamp = int( time.mktime( time.strptime(observationStartTime, timestampPattern))) for interval in intervals.keys(): if secondsTimestamp >= interval \ and secondsTimestamp < ( interval + secondsGranularity): if name in intervals[interval].keys(): intervals[interval][name].append(float(value)) else: intervals[interval].update( {name: [float(value)]}) break for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp(interval).strftime( 't%Y%m%dT%H%M%S') tableName = '{}_{}_{}_{}'.format(options['output'], offering, key, timestamp) if ':' in tableName: tableName = '_'.join(tableName.split(':')) if '-' in tableName: tableName = '_'.join(tableName.split('-')) if '.' in tableName: tableName = '_'.join(tableName.split('.')) new = VectorTopo(tableName) if overwrite() is True: try: new.remove() except: pass new.open(mode='w', layer=1, tab_name=tableName, link_name=tableName, tab_cols=cols, overwrite=True) i = 0 for procedure, values in intervals[interval].iteritems(): if new.exist() is False: i = 1 else: i += 1 if options['method'] == 'average': value = sum(values) / len(values) elif options['method'] == 'sum': value = sum(values) # TODO: Other aggregations methods new.write(Point(*geometries[procedure]), cat=i, attrs=( procedure, value, )) new.table.conn.commit() new.close(build=False) run_command('v.build', quiet=True, map=tableName) if options['bbox'] == '': run_command('g.region', vect=tableName, res=resolution) run_command('v.to.rast', input=tableName, output=tableName, use='attr', attribute_column='value', layer=1, label_column='name', type='point', quiet=True) if flags['k'] is False: run_command('g.remove', 'f', type='vector', name=tableName, quiet=True)