def new_map(self, mapa, layer, tab_sufix, objs, values, tab_subname=''): """Return """ map_out = VectorTopo(mapa) if objs == [] or objs is None: return None tab_sufix_out = OUT_TABLES_NAMES[tab_sufix] tab_name = self.road_name + tab_sufix_out + tab_subname columns = OUT_TABLES[tab_sufix] if layer == 1: map_out.open('w', layer=layer, with_z=True, tab_name=tab_name, tab_cols=columns) else: map_out.open('rw') link = Link(layer, tab_name, tab_name, 'cat' + str(layer)) map_out.dblinks.add(link) table = link.table() if not table.exist(): table.create(columns) table.conn.commit() map_out.close() map_out.open('rw', layer=layer, with_z=True) for i, obj in enumerate(objs): map_out.write(obj, i + 1, values[i]) map_out.table.conn.commit() map_out.close()
def main(opt, flg): # # Set check variables # overwrite = True rasters = opt['rasters'].split(',') if opt['rasters'] else [] rprefix = opt['rprefix'].split(',') if opt['rprefix'] else [] def split(x): return x.split('@') if '@' in x else (x, '') vname, vmset = split(opt['vector']) shpcsv = opt['shpcsv'] if opt['shpcsv'] else vname + '.csv' rstcsv = (opt['rstcsv'].split(',') if opt['rstcsv'] else [split(rst)[0] + '.csv' for rst in rasters]) zones = opt['zones'] if opt['zones'] else vname + '_zones' nprocs = int(opt.get('nprocs', 1)) if rasters: if rprefix and len(rasters) != len(rprefix): raise if len(rasters) != len(rstcsv): raise prefixes = rprefix if rprefix else rasters skipshp = opt['skipshape'].split(',') if opt['skipshape'] else [] skiprst = opt['skipunivar'].split(',') if opt['skipunivar'] else [] layer = int(opt['layer']) newlayer = int(opt['newlayer']) newlayername = (opt['newlayername'] if opt['newlayername'] else vname + '_stats') newtabname = opt['newtabname'] if opt['newtabname'] else vname + '_stats' rstpercentile = float(opt['rstpercentile']) separator = opt.get('separator', ';') # # compute # if not os.path.exists(shpcsv): get_shp_csv(opt['vector'], shpcsv, overwrite, separator) if not get_mapset_raster(zones): get_zones(opt['vector'], zones, layer) if not rstcsv or not os.path.exists(rstcsv[0]): get_rst_csv(rasters, zones, rstcsv, rstpercentile, overwrite, nprocs, separator) newlink = Link(newlayer, newlayername, newtabname) newtab = newlink.table() with Vector(vname, vmset, mode='r', layer=layer) as vct: mode = 'r' if newlink in vct.dblinks else 'rw' with VectorTopo(vname, vmset, mode=mode, layer=layer) as vct: update_cols(newtab, shpcsv, rstcsv, prefixes, skipshp, skiprst, separator=separator) if mode == 'rw': # add the new link vct.dblinks.add(newlink) vct.build()
def _create_table(self): """Return""" link = Link(self.layer, self.tab_name, self.tab_name, "cat" + str(self.layer)) self.polygon.dblinks.add(link) table = link.table() tab_sufix = self.name if self.name == "": tab_sufix = "first" TABLES[tab_sufix][0] = ("cat" + str(self.layer), "INTEGER PRIMARY KEY") if not table.exist(): table.create(TABLES[tab_sufix]) table.conn.commit()
def _create_table(self): """Return """ link = Link(self.layer, self.tab_name, self.tab_name, 'cat' + str(self.layer)) self.polygon.dblinks.add(link) table = link.table() tab_sufix = self.name if self.name == '': tab_sufix = 'first' TABLES[tab_sufix][0] = (u'cat' + str(self.layer), u'INTEGER PRIMARY KEY') if not table.exist(): table.create(TABLES[tab_sufix]) table.conn.commit()
def extract_training(vect, tvect, tlayer): """Assign a class to all the areas that contained, are contained or intersect a training vector""" msgr = get_msgr() tname, tmset = tvect.split('@') if '@' in tvect else (tvect, '') vname, vmset = vect.split('@') if '@' in vect else (vect, '') with VectorTopo(tname, tmset, mode='r') as trn: with VectorTopo(vname, vmset, mode='r') as vct: layer_num, layer_name = get_layer_num_name(vct, tlayer) # instantiate the area objects trn_area = Area(c_mapinfo=trn.c_mapinfo) seg_area = Area(c_mapinfo=vct.c_mapinfo) n_areas = trn.number_of('areas') # check/remove/create a new table table, create_link = make_new_table(vct, layer_name, force=True) find_lines(table, [l for l in trn.viter('lines')], vct) # find and save all the segments find_area(table, trn.viter('areas', idonly=True), trn_area, seg_area, n_areas, vct) check_balance(table, trn.table) if create_link: msgr.message(_("Connect the new table to the vector map...")) with Vector(vect, mode='rw') as seg: link = Link(layer_num, name=layer_name, table=table.name) seg.dblinks.add(link) seg.build()
def create_db_links(self, vect_map, linking_elem): """vect_map an open vector map """ dblinks = {} for layer_name, layer_dscr in linking_elem.iteritems(): # Create DB links dblink = Link(layer=layer_dscr.layer_number, name=layer_name, table=vect_map.name + layer_dscr.table_suffix, key='cat') # add link to vector map if dblink not in vect_map.dblinks: vect_map.dblinks.add(dblink) # create table dbtable = dblink.table() dbtable.create(layer_dscr.cols, overwrite=True) dblinks[layer_name] = self.LinkDescr(dblink.layer, dbtable) return dblinks
def export_results(vect_name, results, cats, rlayer, training=None, cols=None, overwrite=False, append=False, pkl=None): if pkl: res = open(pkl, 'w') pickle.dump(results, res) res.close() # check if the link already exist with Vector(vect_name, mode='r') as vct: link = vct.dblinks.by_name(rlayer) mode = 'r' if link else 'w' print("Opening vector <%s>" % vect_name) with Vector(vect_name, mode=mode) as vect: if cols: cols.insert(0, COLS[0]) tab = link.table() if link else Table(rlayer, vect.table.conn) if tab.exist() and append: columns_to_up = [] # add the column to the table for cname, ctype in cols: columns_to_up.append("%s=?" % cname) if cname not in tab.columns: tab.columns.add(cname, ctype) upsql = "UPDATE %s SET %s WHERE %s=%s" up = upsql % (tab.name, ','.join(columns_to_up), tab.key, '?') else: if tab.exist(): print("Table <%s> already exist, will be removed." % tab.name) tab.drop(force=True) print("Ceating a new table <%s>." % rlayer) tab.create(cols) up = '' export2onesqlite(tab, cats.astype(int), up, *[cls['predict'].astype(int) for cls in results]) if mode == 'w': nlyr = len(vect.dblinks) + 1 link = Link(nlyr, tab.name, tab.name) vect.dblinks.add(link) vect.build() else: for cls in results: create_tab(vect, cls['name'], cats, cls['predict'], training, COLS if training else COLS[:2])
def create_tab(vect, tab_name, cats, clsses, cols, training=None): cur = vect.table.conn.cursor() table = Table(tab_name, vect.table.conn) add_link = True if table.exist(): print("Table <%s> already exist, will be removed." % tab_name) table.drop(cursor=cur) add_link = False print("Ceating a new table <%s>." % tab_name) table.create(cols, cursor=cur) export2sqlite(table, cats, clsses, Table(training, vect.table.conn) if training else None) cur.close() if add_link: vect.dblinks.add( Link(layer=len(vect.dblinks) + 1, name=tab_name, table=tab_name))
def open(self, mode=None, layer=1, overwrite=None, with_z=None, # parameters valid only if mode == 'w' tab_name='', tab_cols=None, link_name=None, link_key='cat', link_db='$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db', link_driver='sqlite'): """Open a Vector map. :param mode: open a vector map in ``r`` in reading, ``w`` in writing and in ``rw`` read and write mode :type mode: str :param layer: specify the layer that you want to use :type layer: int :param overwrite: valid only for ``w`` mode :type overwrite: bool :param with_z: specify if vector map must be open with third dimension enabled or not. Valid only for ``w`` mode, default: False :type with_z: bool :param tab_name: define the name of the table that will be generate :type tab_name: str :param tab_cols: define the name and type of the columns of the attribute table of the vecto map :type tab_cols: list of pairs :param link_name: define the name of the link connecttion with the database :type link_name: str :param link_key: define the nema of the column that will be use as vector category :type link_key: str :param link_db: define the database connection parameters :type link_db: str :param link_driver: define witch database driver will be used :param link_driver: str Some of the parameters are valid only with mode ``w`` or ``rw`` See more examples in the documentation of the ``read`` and ``write`` methods """ with_z = libvect.WITH_Z if with_z else libvect.WITHOUT_Z # check if map exists or not if not self.exist() and mode != 'w': raise OpenError("Map <%s> not found." % self._name) if libvect.Vect_set_open_level(self._topo_level) != 0: raise OpenError("Invalid access level.") # update the overwrite attribute self.overwrite = overwrite if overwrite is not None else self.overwrite # check if the mode is valid if mode not in ('r', 'rw', 'w'): raise ValueError("Mode not supported. Use one of: 'r', 'rw', 'w'.") # check if the map exist if self.exist() and mode in ('r', 'rw'): # open in READ mode if mode == 'r': openvect = libvect.Vect_open_old2(self.c_mapinfo, self.name, self.mapset, str(layer)) # open in READ and WRITE mode elif mode == 'rw': openvect = libvect.Vect_open_update2(self.c_mapinfo, self.name, self.mapset, str(layer)) # instantiate class attributes self.dblinks = DBlinks(self.c_mapinfo) # If it is opened in write mode if mode == 'w': openvect = libvect.Vect_open_new(self.c_mapinfo, self.name, with_z) self.dblinks = DBlinks(self.c_mapinfo) if tab_cols: # create a link link = Link(layer, link_name if link_name else self.name, tab_name if tab_name else self.name, link_key, link_db, link_driver) # add the new link self.dblinks.add(link) # create the table table = link.table() table.create(tab_cols) table.conn.commit() # check the C function result. if openvect == -1: str_err = "Not able to open the map, C function return %d." raise OpenError(str_err % openvect) if len(self.dblinks) == 0: self.layer = layer self.table = None self.n_lines = 0 else: self.layer = self.dblinks.by_layer(layer).layer self.table = self.dblinks.by_layer(layer).table() self.n_lines = self.table.n_rows() self.writable = self.mapset == functions.getenv("MAPSET") self.find = {'by_point': PointFinder(self.c_mapinfo, self.table, self.writable), 'by_box': BboxFinder(self.c_mapinfo, self.table, self.writable), 'by_polygon': PolygonFinder(self.c_mapinfo, self.table, self.writable), }
def open( self, mode=None, layer=1, overwrite=None, with_z=None, # parameters valid only if mode == 'w' tab_name="", tab_cols=None, link_name=None, link_key="cat", link_db="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db", link_driver="sqlite", ): """Open a Vector map. :param mode: open a vector map in ``r`` in reading, ``w`` in writing and in ``rw`` read and write mode :type mode: str :param layer: specify the layer that you want to use :type layer: int :param overwrite: valid only for ``w`` mode :type overwrite: bool :param with_z: specify if vector map must be open with third dimension enabled or not. Valid only for ``w`` mode, default: False :type with_z: bool :param tab_name: define the name of the table that will be generate :type tab_name: str :param tab_cols: define the name and type of the columns of the attribute table of the vecto map :type tab_cols: list of pairs :param link_name: define the name of the link connecttion with the database :type link_name: str :param link_key: define the nema of the column that will be use as vector category :type link_key: str :param link_db: define the database connection parameters :type link_db: str :param link_driver: define witch database driver will be used :param link_driver: str Some of the parameters are valid only with mode ``w`` or ``rw`` See more examples in the documentation of the ``read`` and ``write`` methods """ self.mode = mode if mode else self.mode with_z = libvect.WITH_Z if with_z else libvect.WITHOUT_Z # check if map exists or not if not self.exist() and self.mode != "w": raise OpenError("Map <%s> not found." % self._name) if libvect.Vect_set_open_level(self._topo_level) != 0: raise OpenError("Invalid access level.") # update the overwrite attribute self.overwrite = overwrite if overwrite is not None else self.overwrite # check if the mode is valid if self.mode not in ("r", "rw", "w"): raise ValueError("Mode not supported. Use one of: 'r', 'rw', 'w'.") # check if the map exist if self.exist() and self.mode in ("r", "rw"): # open in READ mode if self.mode == "r": openvect = libvect.Vect_open_old2(self.c_mapinfo, self.name, self.mapset, str(layer)) # open in READ and WRITE mode elif self.mode == "rw": openvect = libvect.Vect_open_update2(self.c_mapinfo, self.name, self.mapset, str(layer)) # instantiate class attributes self.dblinks = DBlinks(self.c_mapinfo) # If it is opened in write mode if self.mode == "w": openvect = libvect.Vect_open_new(self.c_mapinfo, self.name, with_z) self.dblinks = DBlinks(self.c_mapinfo) if self.mode in ("w", "rw") and tab_cols: # create a link link = Link( layer, link_name if link_name else self.name, tab_name if tab_name else self.name, link_key, link_db, link_driver, ) # add the new link self.dblinks.add(link) # create the table table = link.table() table.create(tab_cols, overwrite=overwrite) table.conn.commit() # check the C function result. if openvect == -1: str_err = "Not able to open the map, C function return %d." raise OpenError(str_err % openvect) # Load attribute table for selected layer. if len(self.dblinks) == 0: self.layer = layer self.table = None self.n_lines = 0 else: layer_db_link = self.dblinks.by_layer(layer) if not layer_db_link: raise LookupError( "There appears to be no database link for layer %d of <%s>." % (layer, self.name)) if layer_db_link.layer != layer: raise RuntimeError( "The databse link for layer %d of <%s> references layer %d." % (layer, self.name, layer_db_link.layer)) self.layer = layer try: self.table = layer_db_link.table() except Exception as error: raise RuntimeError( "Loading the attribute table for layer %d of <%s> failed." % (layer, self.name)) from error self.n_lines = self.table.n_rows() self.writeable = self.mapset == utils.getenv("MAPSET") # Initialize the finder self.find = { "by_point": PointFinder(self.c_mapinfo, self.table, self.writeable), "by_bbox": BboxFinder(self.c_mapinfo, self.table, self.writeable), "by_polygon": PolygonFinder(self.c_mapinfo, self.table, self.writeable), } self.find_by_point = self.find["by_point"] self.find_by_bbox = self.find["by_bbox"] self.find_by_polygon = self.find["by_polygon"]
def maps_rows_timestamps(parsed_obs, offering, new, seconds_granularity, target, obs_props, epoch_s, epoch_e, i): """Import vectors with rows representing timestamps. Layers represent output_offering_procedure and rows representing timestamps :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param new: Given vector map which should be updated with new layers :param seconds_granularity: Granularity in seconds :param target: The target CRS for sensors :param obs_props: Oberved properties :param epoch_s: time.mktime standardized timestamp of the beginning of obs :param epoch_e: time.mktime standardized timestamp of the end of obs :param i: Index of the first free layer """ db = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' points = dict() free_cat = 1 for propIndex in range(len(obs_props)): obs_props[propIndex] = soslib.standardize_table_name( [obs_props[propIndex]]) for key, observation in parsed_obs.items(): print('Working on the observed property {}'.format(key)) key = soslib.standardize_table_name([key]) data = json.loads(observation) # get the transformation between source and target crs crs = data['crs'] crs = int(crs['properties']['name'].split(':')[-1]) transform = soslib.get_transformation(crs, target) empty_procs = list() timestamp_pattern = 't%Y%m%dT%H%M%S' # TODO: Timezone cur_layer = i cols = [(u'connection', 'INTEGER'), (u'timestamp', 'VARCHAR')] for obsProp in obs_props: cols.append((u'{}'.format(obsProp), 'DOUBLE')) if new.is_open() is True: # close without printing that crazy amount of messages new.close(build=False) run_command('v.build', quiet=True, map=options['output']) new.open('rw') else: new.open('w') for a in data['features']: name = a['properties']['name'] table_name = soslib.standardize_table_name( [options['output'], offering, name]) intervals = {} for seconds_stamp in range(epoch_s, epoch_e + 1, seconds_granularity): intervals.update({seconds_stamp: dict()}) empty = True for timestamp, value in a['properties'].items(): if timestamp != 'name': if empty: empty = False observationstart_time = timestamp[:-4] seconds_timestamp = int( time.mktime( time.strptime(observationstart_time, timestamp_pattern))) for interval in intervals.keys(): if interval <= seconds_timestamp < ( interval + seconds_granularity): if name in intervals[interval].keys(): intervals[interval][name].append(float(value)) else: intervals[interval].update( {name: [float(value)]}) break if empty: # in value, there is name of the last proc empty_procs.append(value) if new.is_open() is True: # close without printing that crazy amount of messages new.close(build=False) run_command('v.build', quiet=True, map=options['output']) new.open('rw') yet_existing = False if not new.dblinks.by_name(table_name): link = Link(layer=cur_layer, name=table_name, table=table_name, key='connection', database=db, driver='sqlite') new.dblinks.add(link) new.table = new.dblinks[cur_layer - 1].table() new.table.create(cols) else: yet_existing = True # open the right layer new.close(build=False) run_command('v.build', quiet=True, map=options['output']) new.open('rw', layer=cur_layer) if name not in points.keys(): points.update({name: free_cat}) # transform the geometry into the target crs sx, sy, sz = a['geometry']['coordinates'] point = ogr.CreateGeometryFromWkt('POINT ({} {} {})'.format( sx, sy, sz)) point.Transform(transform) coords = (point.GetX(), point.GetY(), point.GetZ()) new.write(Point(*coords), cat=free_cat) free_cat += 1 inserts = dict() # create attr tab inserts for empty procs for emptyProc in empty_procs: insert = [None] * len(cols) insert[0] = points[emptyProc] insert[1] = emptyProc inserts.update({emptyProc: insert}) # create attr tab inserts for procs with observations for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp( interval).strftime('t%Y%m%dT%H%M%S') for name, values in intervals[interval].items(): if options['method'] == 'average': aggregated_value = sum(values) / len(values) elif options['method'] == 'sum': aggregated_value = sum(values) if yet_existing: a = read_command( 'db.select', sql='SELECT COUNT(*) FROM {} WHERE ' 'timestamp="{}"'.format(table_name, timestamp)) if a.split('\n')[1] != '0': run_command('db.execute', sql='UPDATE {} SET {}={} WHERE ' 'timestamp="{}";'.format( table_name, key, aggregated_value, timestamp)) continue # if name not in inserts.keys(): insert = [None] * len(cols) insert[0] = points[name] insert[1] = timestamp insert[cols.index((key, 'DOUBLE'))] = aggregated_value new.table.insert(tuple(insert)) new.table.conn.commit() cur_layer += 1 new.close()
def maps_rows_sensors(parsed_obs, offering, new, seconds_granularity, target, epoch_s, epoch_e, i): """Import vectors with rows representing procedures. Layers represent output_offering_observedproperties and rows represent procedures :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param new: Given vector map which should be updated with new layers :param seconds_granularity: Granularity in seconds :param target: The target CRS for sensors :param epoch_s: time.mktime standardized timestamp of the beginning of obs :param epoch_e: time.mktime standardized timestamp of the end of obs :param i: Index of the first free layer """ free_cat = 1 for key, observation in parsed_obs.items(): points = {} table_name = soslib.standardize_table_name( [options['output'], offering, key]) data = json.loads(observation) # get the transformation between source and target crs crs = data['crs'] crs = int(crs['properties']['name'].split(':')[-1]) transform = soslib.get_transformation(crs, target) intervals = {} for seconds_stamp in range(epoch_s, epoch_e + 1, seconds_granularity): intervals.update({seconds_stamp: dict()}) empty_procs = list() timestamp_pattern = 't%Y%m%dT%H%M%S' # TODO: Timezone coords_dict = {} cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR')] if new.is_open() is True: # close without printing that crazy amount of messages new.close(build=False) run_command('v.build', quiet=True, map=options['output']) new.open('rw') else: new.open('w') for a in data['features']: name = a['properties']['name'] empty = True if name not in points.keys(): points.update({name: free_cat}) # transform the geometry into the target crs sx, sy, sz = a['geometry']['coordinates'] point = ogr.CreateGeometryFromWkt('POINT ({} {} {})'.format( sx, sy, sz)) point.Transform(transform) coords = (point.GetX(), point.GetY(), point.GetZ()) coords_dict.update({free_cat: coords}) free_cat += 1 for timestamp, value in a['properties'].items(): if timestamp != 'name': if empty: empty = False observationstart_time = timestamp[:-4] seconds_timestamp = int( time.mktime( time.strptime(observationstart_time, timestamp_pattern))) for interval in intervals.keys(): if interval <= seconds_timestamp < ( interval + seconds_granularity): if name in intervals[interval].keys(): intervals[interval][name].append(float(value)) else: timestamp2 = datetime.datetime.fromtimestamp( interval).strftime('t%Y%m%dT%H%M%S') intervals[interval].update( {name: [float(value)]}) if (u'%s' % timestamp2, 'DOUBLE') not in cols: cols.append((u'%s' % timestamp2, 'DOUBLE')) break if empty: # in value, there is name of the last proc empty_procs.append(value) if len(cols) > 2000: grass.warning( 'Recommended number of columns is less than 2000, you have ' 'reached {}\nYou should set an event_time with a smaller range' ' or recompile SQLite limits as described at ' 'https://sqlite.org/limits.html'.format(len(cols))) link = Link( layer=i, name=table_name, table=table_name, key='cat', database='$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db', driver='sqlite') if new.is_open(): new.close() new.open('rw') new.dblinks.add(link) new.table = new.dblinks[i - 1].table() new.table.create(cols) inserts = dict() new.close(build=False) run_command('v.build', quiet=True, map=options['output']) new.open('rw', layer=i) for cat, coords in coords_dict.items(): new.write(Point(*coords), cat=cat) # create attr tab inserts for empty procs for emptyProc in empty_procs: insert = [None] * len(cols) insert[0] = points[emptyProc] insert[1] = emptyProc inserts.update({emptyProc: insert}) # create attr tab inserts for procs with observations for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp(interval).strftime( 't%Y%m%dT%H%M%S') for name, values in intervals[interval].items(): if options['method'] == 'average': aggregated_value = sum(values) / len(values) elif options['method'] == 'sum': aggregated_value = sum(values) if name not in inserts.keys(): insert = [None] * len(cols) insert[0] = points[name] insert[1] = name insert[cols.index( (timestamp, 'DOUBLE'))] = aggregated_value inserts.update({name: insert}) else: inserts[name][cols.index( (timestamp, 'DOUBLE'))] = aggregated_value for insert in inserts.values(): new.table.insert(tuple(insert)) new.table.conn.commit() i += 1 # to avoid printing that crazy amount of messages new.close(build=False) run_command('v.build', quiet=True, map=options['output'])
def create_maps(parsed_obs, offering, seconds_granularity, event_time): """Create vector map representing offerings and observed properties. :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param seconds_granularity: Granularity in seconds :param event_time: """ timestamp_pattern = '%Y-%m-%dT%H:%M:%S' # TODO: Timezone start_time = event_time.split('+')[0] epoch_s = int(time.mktime(time.strptime(start_time, timestamp_pattern))) end_time = event_time.split('+')[1].split('/')[1] epoch_e = int(time.mktime(time.strptime(end_time, timestamp_pattern))) for key, observation in parsed_obs.items(): run_command('g.message', message='Creating vector maps for {}...'.format(key)) map_name = '{}_{}_{}'.format(options['output'], offering, key) if ':' in map_name: map_name = '_'.join(map_name.split(':')) if '-' in map_name: map_name = '_'.join(map_name.split('-')) if '.' in map_name: map_name = '_'.join(map_name.split('.')) run_command('t.create', output=map_name, type='stvds', title='Dataset for offering {} and observed ' 'property {}'.format(offering, key), description='Vector space time dataset') free_cat = 1 points = dict() new = VectorTopo(map_name) if overwrite() is True: try: new.remove() except: pass data = json.loads(observation) cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'), (u'value', 'DOUBLE')] intervals = {} for secondsStamp in range(epoch_s, epoch_e + 1, seconds_granularity): intervals.update({secondsStamp: dict()}) timestamp_pattern = 't%Y%m%dT%H%M%S' # TODO: Timezone for a in data['features']: name = a['properties']['name'] if a['properties']['name'] not in points.keys(): if new.is_open() is False: new.open('w') points.update({a['properties']['name']: free_cat}) new.write(Point(*a['geometry']['coordinates'])) free_cat += 1 for timestamp, value in a['properties'].items(): if timestamp != 'name': observationstart_time = timestamp[:-4] seconds_timestamp = int( time.mktime( time.strptime(observationstart_time, timestamp_pattern))) for interval in intervals.keys(): if interval <= seconds_timestamp < ( interval + seconds_granularity): if name in intervals[interval].keys(): intervals[interval][name].append(float(value)) else: intervals[interval].update( {name: [float(value)]}) break if new.is_open(): new.close(build=False) run_command('v.build', map=map_name, quiet=True) i = 1 layers_timestamps = list() for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp(interval).strftime( 't%Y%m%dT%H%M%S') table_name = '{}_{}_{}_{}'.format(options['output'], offering, key, timestamp) if ':' in table_name: table_name = '_'.join(table_name.split(':')) if '-' in table_name: table_name = '_'.join(table_name.split('-')) if '.' in table_name: table_name = '_'.join(table_name.split('.')) new.open('rw') db = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db' link = Link(layer=i, name=table_name, table=table_name, key='cat', database=db, driver='sqlite') new.dblinks.add(link) new.table = new.dblinks[i - 1].table() new.table.create(cols) i += 1 layers_timestamps.append(timestamp) for name, values in intervals[interval].items(): if options['method'] == 'average': aggregated_value = sum(values) / len(values) elif options['method'] == 'sum': aggregated_value = sum(values) new.table.insert( tuple([points[name], name, aggregated_value])) new.table.conn.commit() new.close(build=False) run_command('v.build', map=map_name, quiet=True) create_temporal(map_name, i, layers_timestamps)
def main(opt, flg): # # Set check variables # overwrite = True rasters = opt["rasters"].split(",") if opt["rasters"] else [] rprefix = opt["rprefix"].split(",") if opt["rprefix"] else [] def split(x): return x.split("@") if "@" in x else (x, "") vname, vmset = split(opt["vector"]) shpcsv = opt["shpcsv"] if opt["shpcsv"] else vname + ".csv" rstcsv = (opt["rstcsv"].split(",") if opt["rstcsv"] else [split(rst)[0] + ".csv" for rst in rasters]) zones = opt["zones"] if opt["zones"] else vname + "_zones" nprocs = int(opt.get("nprocs", 1)) if rasters: if rprefix and len(rasters) != len(rprefix): raise if len(rasters) != len(rstcsv): raise prefixes = rprefix if rprefix else rasters else: prefixes = None skipshp = opt["skipshape"].split(",") if opt["skipshape"] else [] skiprst = opt["skipunivar"].split(",") if opt["skipunivar"] else [] layer = int(opt["layer"]) newlayer = int(opt["newlayer"]) newlayername = opt["newlayername"] if opt[ "newlayername"] else vname + "_stats" newtabname = opt["newtabname"] if opt["newtabname"] else vname + "_stats" rstpercentile = float(opt["rstpercentile"]) separator = opt.get("separator", ";") # # compute # if not os.path.exists(shpcsv): get_shp_csv(opt["vector"], shpcsv, overwrite, separator) if not get_mapset_raster(zones): get_zones(opt["vector"], zones, layer) if not rstcsv or not os.path.exists(rstcsv[0]): get_rst_csv(rasters, zones, rstcsv, rstpercentile, overwrite, nprocs, separator) newlink = Link(newlayer, newlayername, newtabname) newtab = newlink.table() with Vector(vname, vmset, mode="r", layer=layer) as vct: mode = "r" if newlink in vct.dblinks else "rw" with VectorTopo(vname, vmset, mode=mode, layer=layer) as vct: update_cols(newtab, shpcsv, rstcsv, prefixes, skipshp, skiprst, separator=separator) if mode == "rw": # add the new link vct.dblinks.add(newlink) vct.build()
def write_structures( plants, output, elev, stream=None, ndigits=0, resolution=None, contour="", overwrite=False, ): """Write a vector map with the plant structures""" def write_hydrostruct(out, hydro, plant): pot = plant.potential_power( intakes=[ hydro.intake, ] ) (plant_id, itk_id, side, disch, gross_head) = ( plant.id, hydro.intake.id, hydro.side, float(hydro.intake.discharge), float(hydro.intake.elevation - plant.restitution.elevation), ) out.write(hydro.conduct, (plant_id, itk_id, disch, 0.0, 0.0, "conduct", side)) out.write( hydro.penstock, (plant_id, itk_id, disch, gross_head, pot, "penstock", side) ) out.table.conn.commit() tab_cols = [ (u"cat", "INTEGER PRIMARY KEY"), (u"plant_id", "VARCHAR(10)"), (u"intake_id", "INTEGER"), (u"discharge", "DOUBLE"), (u"gross_head", "DOUBLE"), (u"power", "DOUBLE"), (u"kind", "VARCHAR(10)"), (u"side", "VARCHAR(10)"), ] with VectorTopo(output, mode="w", overwrite=overwrite) as out: link = Link(layer=1, name=output, table=output, driver="sqlite") out.open("w") out.dblinks.add(link) out.table = out.dblinks[0].table() out.table.create(tab_cols) print("Number of plants: %d" % len(plants)) # check if contour vector map is provide by the user if contour: cname, cmset = contour.split("@") if "@" in contour else (contour, "") # check if the map already exist if bool(utils.get_mapset_vector(cname, cmset)) and overwrite: compute_contour = True remove = False else: # create a random name contour = "tmp_struct_contour_%05d_%03d" % ( os.getpid(), random.randint(0, 999), ) compute_contour = True remove = True if compute_contour: # compute the levels of the contour lines map levels = [] for p in plants.values(): for itk in p.intakes: levels.append( closest(itk.elevation, ndigits=ndigits, resolution=resolution) ) levels = sorted(set(levels)) # generate the contur line that pass to the point r.contour( input="%s@%s" % (elev.name, elev.mapset), output=contour, step=0, levels=levels, overwrite=True, ) # open the contur lines with VectorTopo(contour, mode="r") as cnt: for plant in plants.values(): print(plant.id) for options in plant.structures( elev, stream=stream, ndigits=ndigits, resolution=resolution, contour=cnt, ): for hydro in options: print("writing: ", hydro.intake) write_hydrostruct(out, hydro, plant) if remove: cnt.remove()
def write_structures(plants, output, elev, stream=None, ndigits=0, resolution=None, contour='', overwrite=False): """Write a vector map with the plant structures""" def write_hydrostruct(out, hydro, plant): pot = plant.potential_power(intakes=[ hydro.intake, ]) (plant_id, itk_id, side, disch, gross_head) = (plant.id, hydro.intake.id, hydro.side, float(hydro.intake.discharge), float(hydro.intake.elevation - plant.restitution.elevation)) out.write(hydro.conduct, (plant_id, itk_id, disch, 0., 0., 'conduct', side)) out.write(hydro.penstock, (plant_id, itk_id, disch, gross_head, pot, 'penstock', side)) out.table.conn.commit() tab_cols = [ (u'cat', 'INTEGER PRIMARY KEY'), (u'plant_id', 'VARCHAR(10)'), (u'intake_id', 'INTEGER'), (u'discharge', 'DOUBLE'), (u'gross_head', 'DOUBLE'), (u'power', 'DOUBLE'), (u'kind', 'VARCHAR(10)'), (u'side', 'VARCHAR(10)'), ] with VectorTopo(output, mode='w', overwrite=overwrite) as out: link = Link(layer=1, name=output, table=output, driver='sqlite') out.open('w') out.dblinks.add(link) out.table = out.dblinks[0].table() out.table.create(tab_cols) print('Number of plants: %d' % len(plants)) # check if contour vector map is provide by the user if contour: cname, cmset = (contour.split('@') if '@' in contour else (contour, '')) # check if the map already exist if bool(utils.get_mapset_vector(cname, cmset)) and overwrite: compute_contour = True remove = False else: # create a random name contour = 'tmp_struct_contour_%05d_%03d' % (os.getpid(), random.randint(0, 999)) compute_contour = True remove = True if compute_contour: # compute the levels of the contour lines map levels = [] for p in plants.values(): for itk in p.intakes: levels.append( closest(itk.elevation, ndigits=ndigits, resolution=resolution)) levels = sorted(set(levels)) # generate the contur line that pass to the point r.contour(input='%s@%s' % (elev.name, elev.mapset), output=contour, step=0, levels=levels, overwrite=True) # open the contur lines with VectorTopo(contour, mode='r') as cnt: for plant in plants.values(): print(plant.id) for options in plant.structures(elev, stream=stream, ndigits=ndigits, resolution=resolution, contour=cnt): for hydro in options: print('writing: ', hydro.intake) write_hydrostruct(out, hydro, plant) if remove: cnt.remove()
def open( self, mode=None, layer=1, overwrite=None, with_z=None, # parameters valid only if mode == 'w' tab_name='', tab_cols=None, link_name=None, link_key='cat', link_db='$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db', link_driver='sqlite'): """Open a Vector map. :param mode: open a vector map in ``r`` in reading, ``w`` in writing and in ``rw`` read and write mode :type mode: str :param layer: specify the layer that you want to use :type layer: int :param overwrite: valid only for ``w`` mode :type overwrite: bool :param with_z: specify if vector map must be open with third dimension enabled or not. Valid only for ``w`` mode, default: False :type with_z: bool :param tab_name: define the name of the table that will be generate :type tab_name: str :param tab_cols: define the name and type of the columns of the attribute table of the vecto map :type tab_cols: list of pairs :param link_name: define the name of the link connecttion with the database :type link_name: str :param link_key: define the nema of the column that will be use as vector category :type link_key: str :param link_db: define the database connection parameters :type link_db: str :param link_driver: define witch database driver will be used :param link_driver: str Some of the parameters are valid only with mode ``w`` or ``rw`` See more examples in the documentation of the ``read`` and ``write`` methods """ with_z = libvect.WITH_Z if with_z else libvect.WITHOUT_Z # check if map exists or not if not self.exist() and mode != 'w': raise OpenError("Map <%s> not found." % self._name) if libvect.Vect_set_open_level(self._topo_level) != 0: raise OpenError("Invalid access level.") # update the overwrite attribute self.overwrite = overwrite if overwrite is not None else self.overwrite # check if the mode is valid if mode not in ('r', 'rw', 'w'): raise ValueError("Mode not supported. Use one of: 'r', 'rw', 'w'.") # check if the map exist if self.exist() and mode in ('r', 'rw'): # open in READ mode if mode == 'r': openvect = libvect.Vect_open_old2(self.c_mapinfo, self.name, self.mapset, str(layer)) # open in READ and WRITE mode elif mode == 'rw': openvect = libvect.Vect_open_update2(self.c_mapinfo, self.name, self.mapset, str(layer)) # instantiate class attributes self.dblinks = DBlinks(self.c_mapinfo) # If it is opened in write mode if mode == 'w': openvect = libvect.Vect_open_new(self.c_mapinfo, self.name, with_z) self.dblinks = DBlinks(self.c_mapinfo) if tab_cols: # create a link link = Link(layer, link_name if link_name else self.name, tab_name if tab_name else self.name, link_key, link_db, link_driver) # add the new link self.dblinks.add(link) # create the table table = link.table() table.create(tab_cols) table.conn.commit() # check the C function result. if openvect == -1: str_err = "Not able to open the map, C function return %d." raise OpenError(str_err % openvect) if len(self.dblinks) == 0: self.layer = layer self.table = None self.n_lines = 0 else: self.layer = self.dblinks.by_layer(layer).layer self.table = self.dblinks.by_layer(layer).table() self.n_lines = self.table.n_rows() self.writable = self.mapset == functions.getenv("MAPSET") self.find = { 'by_point': PointFinder(self.c_mapinfo, self.table, self.writable), 'by_box': BboxFinder(self.c_mapinfo, self.table, self.writable), 'by_polygon': PolygonFinder(self.c_mapinfo, self.table, self.writable), }
def create_maps(parsed_obs, offering, layer, new, secondsGranularity, event_time, service): """ Add layers representing offerings and observed properties to the vector map :param parsed_obs: Observations for a given offering in geoJSON format :param offering: A collection of sensors used to conveniently group them up :param layer: Count of yet existing layers in vector map :param new: Given vector map which should be updated with new layers :param secondsGranularity: Granularity in seconds """ i = layer + 1 points = dict() freeCat = 1 if flags['s']: # Set target projection of current LOCATION target_crs = grass.read_command('g.proj', flags='fj').rstrip(os.linesep) target = osr.SpatialReference(target_crs) target.ImportFromProj4(target_crs) if target == 'XY location (unprojected)': grass.fatal("Sorry, XY locations are not supported!") # The following is work in progress cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'varchar'), (u'description', 'varchar'), (u'keywords', 'varchar'), (u'sensor_type', 'varchar'), (u'system_type', 'varchar'), (u'crs', 'INTEGER'), (u'x', 'DOUBLE'), (u'y', 'DOUBLE'), (u'z', 'DOUBLE')] # new = Vector(new) if new.is_open() is False: new.open('w', tab_name=options['output'], tab_cols=cols) offs = [o.id for o in service.offerings] off_idx = offs.index(offering) outputFormat = service.get_operation_by_name( 'DescribeSensor').parameters['outputFormat']['values'][0] procedures = service.offerings[off_idx].procedures for proc in procedures: response = service.describe_sensor(procedure=proc, outputFormat=outputFormat) root = SensorML(response) system = root.members[0] name = system.name desc = system.description keywords = ','.join(system.keywords) sensType = system.classifiers['Sensor Type'].value sysType = system.classifiers['System Type'].value crs = int(system.location[0].attrib['srsName'].split(':')[1]) coords = system.location[0][0].text.replace('\n', '') sx = float(coords.split(',')[0]) sy = float(coords.split(',')[1]) sz = float(coords.split(',')[2]) # Set source projection from SOS source = osr.SpatialReference() source.ImportFromEPSG(crs) transform = osr.CoordinateTransformation(source, target) point = ogr.CreateGeometryFromWkt('POINT ({} {} {})'.format( sx, sy, sz)) point.Transform(transform) x = point.GetX() y = point.GetY() z = point.GetZ() if name not in points.keys(): points.update({name: freeCat}) point = Point(x, y, z) new.write(point, cat=freeCat, attrs=( u'{}'.format(system.name.decode('utf-8')), system.description, ','.join(system.keywords), system.classifiers['Sensor Type'].value, system.classifiers['System Type'].value, crs, float(coords.split(',')[0]), float(coords.split(',')[1]), float(coords.split(',')[2]), )) freeCat += 1 new.table.conn.commit() new.close(build=True) else: timestampPattern = '%Y-%m-%dT%H:%M:%S' # TODO: Timezone startTime = event_time.split('+')[0] epochS = int(time.mktime(time.strptime(startTime, timestampPattern))) endTime = event_time.split('+')[1].split('/')[1] epochE = int(time.mktime(time.strptime(endTime, timestampPattern))) for key, observation in parsed_obs.iteritems(): tableName = '{}_{}_{}'.format(options['output'], offering, key) if ':' in tableName: tableName = '_'.join(tableName.split(':')) if '-' in tableName: tableName = '_'.join(tableName.split('-')) if '.' in tableName: tableName = '_'.join(tableName.split('.')) data = json.loads(observation) intervals = {} for secondsStamp in range(epochS, epochE + 1, secondsGranularity): intervals.update({secondsStamp: dict()}) timestampPattern = 't%Y%m%dT%H%M%S' # TODO: Timezone cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR')] for a in data['features']: name = a['properties']['name'] if name not in points.keys(): if new.is_open() is False: new.open('w') points.update({name: freeCat}) new.write(Point(*a['geometry']['coordinates']), cat=freeCat) freeCat += 1 for timestamp, value in a['properties'].iteritems(): if timestamp != 'name': observationStartTime = timestamp[:-4] secondsTimestamp = int( time.mktime( time.strptime(observationStartTime, timestampPattern))) for interval in intervals.keys(): if secondsTimestamp >= interval \ and secondsTimestamp < ( interval + secondsGranularity): if name in intervals[interval].keys(): intervals[interval][name].append( float(value)) else: timestamp2 = datetime.datetime.fromtimestamp( interval).strftime('t%Y%m%dT%H%M%S') intervals[interval].update( {name: [float(value)]}) if (u'%s' % timestamp2, 'DOUBLE') not in cols: cols.append( (u'%s' % timestamp2, 'DOUBLE')) break if len(cols) > 2000: grass.warning( 'Recommended number of columns is less than 2000, you have ' 'reached {}\nYou should set an event_time with a smaller range' ' or recompile SQLite limits as described at ' 'https://sqlite.org/limits.html'.format(len(cols))) link = Link( layer=i, name=tableName, table=tableName, key='cat', database='$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db', driver='sqlite') if new.is_open(): new.close() new.open('rw') new.dblinks.add(link) new.table = new.dblinks[i - 1].table() new.table.create(cols) inserts = dict() for interval in intervals.keys(): if len(intervals[interval]) != 0: timestamp = datetime.datetime.fromtimestamp( interval).strftime('t%Y%m%dT%H%M%S') for name, values in intervals[interval].iteritems(): if options['method'] == 'average': aggregatedValue = sum(values) / len(values) elif options['method'] == 'sum': aggregatedValue = sum(values) if name not in inserts.keys(): insert = [None] * len(cols) insert[0] = points[name] insert[1] = name insert[cols.index( (timestamp, 'DOUBLE'))] = aggregatedValue inserts.update({name: insert}) else: inserts[name][cols.index( (timestamp, 'DOUBLE'))] = aggregatedValue for insert in inserts.values(): new.table.insert(tuple(insert)) new.table.conn.commit() new.close(build=False) run_command('v.build', quiet=True, map=options['output']) i += 1