def load(self): """Load all info from an existing vector map into the internal structure""" # Get the data from an existing raster map kvp = vector.vector_info(self.ident) # Fill base information self.base.set_name(self.ident.split("@")[0]) self.base.set_mapset(self.ident.split("@")[1]) self.base.set_creator(str(getpass.getuser())) # Fill spatial extent self.set_spatial_extent(north=kvp["north"], south=kvp["south"], \ east=kvp["east"], west=kvp["west"],\ top=kvp["top"], bottom=kvp["bottom"])
def main(): infile = options['input'] compression_off = flags['c'] global basedir basedir = grass.tempdir() # check if vector map exists gfile = grass.find_file(infile, element='vector') if not gfile['name']: grass.fatal(_("Vector map <%s> not found") % infile) # check if input vector map is in the native format if vector.vector_info(gfile['fullname'])['format'] != 'native': grass.fatal( _("Unable to pack vector map <%s>. Only native format supported.") % gfile['fullname']) # split the name if there is the mapset name if infile.find('@'): infile = infile.split('@')[0] # output name if options['output']: outfile = options['output'] else: outfile = infile + '.pack' # check if exists the output file if os.path.exists(outfile): if os.getenv('GRASS_OVERWRITE'): grass.warning( _("Pack file <%s> already exists and will be overwritten") % outfile) try_remove(outfile) else: grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile)) # prepare for packing grass.verbose(_("Packing <%s>...") % (gfile['fullname'])) # write tar file, optional compression if compression_off: tar = tarfile.open(name=outfile, mode='w:') else: tar = tarfile.open(name=outfile, mode='w:gz') tar.add(gfile['file'], infile) # check if exist a db connection for the vector db_vect = vector.vector_db(gfile['fullname']) if not db_vect: grass.verbose( _('There is not database connected with vector map <%s>') % gfile['fullname']) else: # for each layer connection save a table in sqlite database sqlitedb = os.path.join(basedir, 'db.sqlite') for i, dbconn in db_vect.items(): grass.run_command('db.copy', from_driver=dbconn['driver'], from_database=dbconn['database'], from_table=dbconn['table'], to_driver='sqlite', to_database=sqlitedb, to_table=dbconn['table']) tar.add(sqlitedb, 'db.sqlite') # add to the tar file the PROJ files to check when unpack file gisenv = grass.gisenv() for support in ['INFO', 'UNITS', 'EPSG']: path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'], 'PERMANENT', 'PROJ_' + support) if os.path.exists(path): tar.add(path, 'PROJ_' + support) tar.close() grass.message( _("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
def main(): export_shifted = flags['s'] currmapset = grass.gisenv()['MAPSET'] #### check for v.in.ply, v.out.ply if not grass.find_program("v.in.ply", '--help'): grass.fatal( _("The GRASS addon v.in.ply was not found, please install it first.\n" )) if not grass.find_program("v.out.ply", '--help'): grass.fatal( _("The GRASS addon v.out.ply was not found, please install it first.\n" )) # import input PLY file infile = options['input'] if not os.path.exists(infile): grass.fatal(_("Unable to read input file <%s>") % infile) grass.debug("input file=[%s]" % infile) if not infile[-4:].lower() == '.ply': grass.fatal(_("Input file must end with .ply or .PLY")) gcpfile = infile[:-4] + ".txt" srcdir, ply = os.path.split(infile) ply = ply[:-4] if not os.path.exists(gcpfile): gcpfile = infile[:-4] + ".TXT" if not os.path.exists(gcpfile): grass.fatal( _("Input file with GCPs must be <%s> or <%s>") % ply + ".txt", ply + ".TXT") if options['output'] is not None and options['output'] != '': ply = options['output'] clist = list() plydesc = grass.read_command('v.in.ply', flags='p', input=infile, output=ply) # remember column names for vertices currname = '' currprop = '' for l in plydesc.splitlines(): f = l.split(':') if f[0] == 'element name': # new element currname = f[1].strip() currprop = '' if f[0] == 'property name': currprop = f[1].strip() if currname == 'vertex' and currprop not in ['x', 'y', 'z']: clist.append(currprop) columns = ','.join(clist) grass.run_command('v.in.ply', flags='b', input=infile, output=ply) # import vector exists? found = grass.find_file(ply, element='vector', mapset=currmapset) if found['name'] != ply: grass.fatal(_('PLY import failed!')) # detach table table = gvector.vector_layer_db(map=ply, layer=1)['table'] grass.run_command('v.db.connect', map=ply, layer=1, flags='d') # print RMS rmsfile = os.path.join(srcdir, ply + "_rms.csv") grass.run_command('v.rectify', input=ply, output=ply + '_georef', points=gcpfile, flags='3bor', separator=';', rmsfile=rmsfile) # georectify ply_georef = ply + '_georef' grass.run_command('v.rectify', input=ply, output=ply_georef, points=gcpfile, flags='3bo') # output vector exists? found = grass.find_file(ply_georef, element='vector', mapset=currmapset) if found['name'] != ply_georef: grass.run_command('v.db.connect', map=ply, layer=1, table=table, key='cat') grass.fatal('PLY import failed!') grass.run_command('v.db.connect', map=ply_georef, layer=1, table=table, key='cat') output = os.path.join(srcdir, ply_georef + '.ply') grass.run_command('v.out.ply', input=ply_georef, output=output, columns=columns) grass.run_command('v.db.connect', map=ply_georef, layer=1, flags='d') if export_shifted: vinfo = gvector.vector_info(map=ply_georef) north_center = (float(vinfo['north']) + float(vinfo['south'])) / -2.0 east_center = (float(vinfo['east']) + float(vinfo['west'])) / -2.0 height_center = (float(vinfo['top']) + float(vinfo['bottom'])) / -2.0 ply_shifted = ply_georef + '_shifted' grass.run_command('v.transform', input=ply_georef, layer=-1, output=ply_shifted, xshift=east_center, yshift=north_center, zshift=height_center, xscale=1.0, yscale=1.0, zscale=1.0, zrot=0.0, flags='b') # output vector exists? found = grass.find_file(ply_shifted, element='vector', mapset=currmapset) if found['name'] != ply_shifted: grass.run_command('v.db.connect', map=ply, layer=1, table=table, key='cat') grass.fatal('PLY import failed!') grass.run_command('v.db.connect', map=ply_shifted, layer=1, table=table, key='cat') output = os.path.join(srcdir, ply_shifted + '.ply') grass.run_command('v.out.ply', input=ply_shifted, output=output, columns=columns) grass.run_command('v.db.connect', map=ply_shifted, layer=1, flags='d') grass.run_command('v.db.connect', map=ply, layer=1, table=table, key='cat') grass.message( _("Done: Pointcloud '%s' has been successfully imported, georeferenced, and exported" ) % ply)
def main(): export_shifted = flags["s"] currmapset = grass.gisenv()["MAPSET"] #### check for v.in.ply, v.out.ply if not grass.find_program("v.in.ply", "--help"): grass.fatal( _("The GRASS addon v.in.ply was not found, please install it first.\n" )) if not grass.find_program("v.out.ply", "--help"): grass.fatal( _("The GRASS addon v.out.ply was not found, please install it first.\n" )) # import input PLY file infile = options["input"] if not os.path.exists(infile): grass.fatal(_("Unable to read input file <%s>") % infile) grass.debug("input file=[%s]" % infile) if not infile[-4:].lower() == ".ply": grass.fatal(_("Input file must end with .ply or .PLY")) gcpfile = infile[:-4] + ".txt" srcdir, ply = os.path.split(infile) ply = ply[:-4] if not os.path.exists(gcpfile): gcpfile = infile[:-4] + ".TXT" if not os.path.exists(gcpfile): grass.fatal( _("Input file with GCPs must be <%s> or <%s>") % ply + ".txt", ply + ".TXT", ) if options["output"] is not None and options["output"] != "": ply = options["output"] clist = list() plydesc = grass.read_command("v.in.ply", flags="p", input=infile, output=ply) # remember column names for vertices currname = "" currprop = "" for l in plydesc.splitlines(): f = l.split(":") if f[0] == "element name": # new element currname = f[1].strip() currprop = "" if f[0] == "property name": currprop = f[1].strip() if currname == "vertex" and currprop not in ["x", "y", "z"]: clist.append(currprop) columns = ",".join(clist) grass.run_command("v.in.ply", flags="b", input=infile, output=ply) # import vector exists? found = grass.find_file(ply, element="vector", mapset=currmapset) if found["name"] != ply: grass.fatal(_("PLY import failed!")) # detach table table = gvector.vector_layer_db(map=ply, layer=1)["table"] grass.run_command("v.db.connect", map=ply, layer=1, flags="d") # print RMS rmsfile = os.path.join(srcdir, ply + "_rms.csv") grass.run_command( "v.rectify", input=ply, output=ply + "_georef", points=gcpfile, flags="3bor", separator=";", rmsfile=rmsfile, ) # georectify ply_georef = ply + "_georef" grass.run_command("v.rectify", input=ply, output=ply_georef, points=gcpfile, flags="3bo") # output vector exists? found = grass.find_file(ply_georef, element="vector", mapset=currmapset) if found["name"] != ply_georef: grass.run_command("v.db.connect", map=ply, layer=1, table=table, key="cat") grass.fatal("PLY import failed!") grass.run_command("v.db.connect", map=ply_georef, layer=1, table=table, key="cat") output = os.path.join(srcdir, ply_georef + ".ply") grass.run_command("v.out.ply", input=ply_georef, output=output, columns=columns) grass.run_command("v.db.connect", map=ply_georef, layer=1, flags="d") if export_shifted: vinfo = gvector.vector_info(map=ply_georef) north_center = (float(vinfo["north"]) + float(vinfo["south"])) / -2.0 east_center = (float(vinfo["east"]) + float(vinfo["west"])) / -2.0 height_center = (float(vinfo["top"]) + float(vinfo["bottom"])) / -2.0 ply_shifted = ply_georef + "_shifted" grass.run_command( "v.transform", input=ply_georef, layer=-1, output=ply_shifted, xshift=east_center, yshift=north_center, zshift=height_center, xscale=1.0, yscale=1.0, zscale=1.0, zrot=0.0, flags="b", ) # output vector exists? found = grass.find_file(ply_shifted, element="vector", mapset=currmapset) if found["name"] != ply_shifted: grass.run_command("v.db.connect", map=ply, layer=1, table=table, key="cat") grass.fatal("PLY import failed!") grass.run_command("v.db.connect", map=ply_shifted, layer=1, table=table, key="cat") output = os.path.join(srcdir, ply_shifted + ".ply") grass.run_command("v.out.ply", input=ply_shifted, output=output, columns=columns) grass.run_command("v.db.connect", map=ply_shifted, layer=1, flags="d") grass.run_command("v.db.connect", map=ply, layer=1, table=table, key="cat") grass.message( _("Done: Pointcloud '%s' has been successfully imported, georeferenced, and exported" ) % ply)
def main(): infile = options["input"] compression_off = flags["c"] global basedir basedir = grass.tempdir() # check if vector map exists gfile = grass.find_file(infile, element="vector") if not gfile["name"]: grass.fatal(_("Vector map <%s> not found") % infile) # check if input vector map is in the native format if vector.vector_info(gfile["fullname"])["format"] != "native": grass.fatal( _("Unable to pack vector map <%s>. Only native format supported.") % gfile["fullname"] ) # split the name if there is the mapset name if infile.find("@"): infile = infile.split("@")[0] # output name if options["output"]: outfile = options["output"] else: outfile = infile + ".pack" # check if exists the output file if os.path.exists(outfile): if os.getenv("GRASS_OVERWRITE"): grass.warning( _("Pack file <%s> already exists and will be overwritten") % outfile ) try_remove(outfile) else: grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile)) # prepare for packing grass.verbose(_("Packing <%s>...") % (gfile["fullname"])) # write tar file, optional compression if compression_off: tar = tarfile.open(name=outfile, mode="w:") else: tar = tarfile.open(name=outfile, mode="w:gz") tar.add(gfile["file"], infile) # check if exist a db connection for the vector db_vect = vector.vector_db(gfile["fullname"]) if not db_vect: grass.verbose( _("There is not database connected with vector map <%s>") % gfile["fullname"] ) else: # for each layer connection save a table in sqlite database sqlitedb = os.path.join(basedir, "db.sqlite") for i, dbconn in db_vect.items(): grass.run_command( "db.copy", from_driver=dbconn["driver"], from_database=dbconn["database"], from_table=dbconn["table"], to_driver="sqlite", to_database=sqlitedb, to_table=dbconn["table"], ) tar.add(sqlitedb, "db.sqlite") # add to the tar file the PROJ files to check when unpack file gisenv = grass.gisenv() for support in ["INFO", "UNITS", "EPSG"]: path = os.path.join( gisenv["GISDBASE"], gisenv["LOCATION_NAME"], "PERMANENT", "PROJ_" + support ) if os.path.exists(path): tar.add(path, "PROJ_" + support) tar.close() grass.message(_("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
def main(): infile = options['input'] compression_off = flags['c'] global basedir basedir = grass.tempdir() # check if vector map exists gfile = grass.find_file(infile, element = 'vector') if not gfile['name']: grass.fatal(_("Vector map <%s> not found") % infile) # check if input vector map is in the native format if vector.vector_info(gfile['fullname'])['format'] != 'native': grass.fatal(_("Unable to pack vector map <%s>. Only native format supported.") % \ gfile['fullname']) # split the name if there is the mapset name if infile.find('@'): infile = infile.split('@')[0] # output name if options['output']: outfile = options['output'] else: outfile = infile + '.pack' # check if exists the output file if os.path.exists(outfile): if os.getenv('GRASS_OVERWRITE'): grass.warning(_("Pack file <%s> already exists and will be overwritten") % outfile) try_remove(outfile) else: grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile)) # prepare for packing grass.verbose(_("Packing <%s>...") % (gfile['fullname'])) # write tar file, optional compression if compression_off: tar = tarfile.open(name = outfile, mode = 'w:') else: tar = tarfile.open(name = outfile, mode = 'w:gz') tar.add(gfile['file'], infile) # check if exist a db connection for the vector db_vect = vector.vector_db(gfile['fullname']) if not db_vect: grass.verbose(_('There is not database connected with vector map <%s>') % gfile['fullname']) else: # for each layer connection save a table in sqlite database sqlitedb = os.path.join(basedir, 'db.sqlite') for i, dbconn in db_vect.iteritems(): grass.run_command('db.copy', from_driver = dbconn['driver'], from_database = dbconn['database'], from_table = dbconn['table'], to_driver = 'sqlite', to_database = sqlitedb, to_table = dbconn['table']) tar.add(sqlitedb, 'db.sqlite') # add to the tar file the PROJ files to check when unpack file gisenv = grass.gisenv() for support in ['INFO', 'UNITS']: path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'], 'PERMANENT', 'PROJ_' + support) if os.path.exists(path): tar.add(path, 'PROJ_' + support) tar.close() grass.message(_("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
def main(): options, flags = gcore.parser() aspect = options['aspect'] speed = options['speed'] probability = options['probability'] if options['particle_base']: particle_base = options['particle_base'] + '_' else: particle_base = None if options['particles']: particles = options['particles'] min_size = float(options['min_size']) max_size = float(options['max_size']) comet_length = int(options['comet_length']) else: particles = min_size = max_size = comet_length = None try: total_time = int(options['total_time']) step = int(options['step']) age = int(options['age']) count = int(options['count']) except ValueError: gcore.fatal(_("Parameter should be integer")) gcore.use_temp_region() # create aspect in x and y direction aspect_x = 'aspect_x_' + str(os.getpid()) aspect_y = 'aspect_y_' + str(os.getpid()) xshift_tmp = 'xshift_tmp_' + str(os.getpid()) yshift_tmp = 'yshift_tmp_' + str(os.getpid()) TMP_RAST.append(aspect_x) TMP_RAST.append(aspect_y) grast.mapcalc(exp="{aspect_x} = cos({aspect})".format(aspect_x=aspect_x, aspect=aspect)) grast.mapcalc(exp="{aspect_y} = sin({aspect})".format(aspect_y=aspect_y, aspect=aspect)) grast.mapcalc(exp="{xshift} = {aspect_x}*{speed}*{t}".format(xshift=xshift_tmp, t=step, speed=speed, aspect_x=aspect_x), overwrite=True) grast.mapcalc(exp="{yshift} = {aspect_y}*{speed}*{t}".format(yshift=yshift_tmp, t=step, speed=speed, aspect_y=aspect_y), overwrite=True) # initialize vector_tmp1 = 'vector_tmp1_' + str(os.getpid()) vector_tmp2 = 'vector_tmp2_' + str(os.getpid()) vector_tmp3 = 'vector_tmp3_' + str(os.getpid()) vector_region = 'vector_region_' + str(os.getpid()) TMP_VECT.extend([vector_tmp1, vector_tmp2, vector_tmp3, vector_region]) random_tmp = 'random_tmp_' + str(os.getpid()) TMP_RAST.extend([xshift_tmp, yshift_tmp, random_tmp]) gcore.run_command('v.in.region', output=vector_region, type='area') loop = 0 vector_1 = particle_base + "{0:03d}".format(loop) generate_points(name=vector_1, probability_map=probability, count=count) grast.mapcalc(exp="{random} = int(rand(1, {maxt}))".format(random=random_tmp, maxt=age + 1)) gcore.run_command('v.what.rast', map=vector_1, raster=random_tmp, column='t') write_vect_history('v.particles', options, flags, vector_1) vector_names = [vector_1, ] for time in range(0, total_time + step, step): vector_1 = particle_base + "{0:03d}".format(loop) vector_2 = particle_base + "{0:03d}".format(loop + 1) vector_names.append(vector_2) gcore.run_command('v.what.rast', map=vector_1, raster=xshift_tmp, column='xshift') gcore.run_command('v.what.rast', map=vector_1, raster=yshift_tmp, column='yshift') gcore.run_command('v.transform', layer=1, input=vector_1, output=vector_2, columns='xshift:xshift,yshift:yshift', quiet=True) # increase age gcore.info("Increasing age...") sql = 'UPDATE {table} SET t=t+1;'.format(table=vector_2) gcore.run_command('db.execute', sql=sql) # remove old points gcore.info("Removing old points...") gcore.run_command('v.select', overwrite=True, ainput=vector_2, atype='point', binput=vector_region, btype='area', operator='within', output=vector_tmp1) gcore.run_command('v.extract', input=vector_tmp1, layer=1, type='point', where="t <= " + str(age) + " AND xshift IS NOT NULL", output=vector_tmp2, overwrite=True) # generate new points gcore.info("Generating new points...") count_to_generate = count - gvect.vector_info(vector_tmp2)['points'] if count_to_generate > 0: generate_points(name=vector_tmp3, probability_map=probability, count=count_to_generate, overwrite=True) gcore.info("Patchig new and old points...") gcore.run_command('v.patch', flags='e', input=[vector_tmp2, vector_tmp3], output=vector_2, overwrite=True) sql = 'UPDATE {table} SET t={t} WHERE t IS NULL;'.format(table=vector_2, t=0) gcore.run_command('db.execute', sql=sql) write_vect_history('v.particles', options, flags, vector_2) loop += 1 # Make sure the temporal database exists tgis.init() tgis.open_new_space_time_dataset(particle_base[:-1], type='stvds', temporaltype='relative', title="title", descr='desc', semantic='mean', dbif=None, overwrite=gcore.overwrite()) # TODO: we must start from 1 because there is a bug in register_maps_in_space_time_dataset tgis.register_maps_in_space_time_dataset( type='vect', name=particle_base[:-1], maps=','.join(vector_names), start=str(1), end=None, unit='seconds', increment=step, interval=False, dbif=None) # create one vector map with multiple layers fd, path = tempfile.mkstemp(text=True) tmpfile = open(path, 'w') k = 0 for vector in vector_names: k += 1 layers = [x for x in range(k - comet_length + 1, k + 1) if x > 0] categories = list(range(len(layers), 0, -1)) text = '' for layer, cat in zip(layers, categories): text += '{l} {c}\n'.format(l=layer, c=cat) coords = gcore.read_command('v.to.db', flags='p', quiet=True, map=vector, type='point', option='coor', separator=" ").strip() for coord in coords.split('\n'): coord = coord.split() tmpfile.write('P 1 {n_cat}\n{x} {y}\n'.format(n_cat=len(categories), x=coord[1], y=coord[2])) tmpfile.write(text) tmpfile.close() gcore.run_command('v.in.ascii', flags='n', overwrite=True, input=path, output=particles, format='standard', separator=" ") os.close(fd) os.remove(path) k = 0 sql = [] sizes = get_sizes(max_size, min_size, comet_length) temporal_maps = [] for vector in vector_names: k += 1 table = 't' + str(k) gcore.run_command('v.db.addtable', map=particles, table=table, layer=k, column="width double precision") temporal_maps.append(particles + ':' + str(k)) for i in range(comet_length): sql.append("UPDATE {table} SET width={w:.1f} WHERE cat={c}".format(table=table, w=sizes[i][1], c=sizes[i][0])) gcore.write_command('db.execute', input='-', stdin=';\n'.join(sql)) tgis.open_new_space_time_dataset(particles, type='stvds', temporaltype='relative', title="title", descr='desc', semantic='mean', dbif=None, overwrite=True) # TODO: we must start from 1 because there is a bug in register_maps_in_space_time_dataset tgis.register_maps_in_space_time_dataset( type='vect', name=particles, maps=','.join(temporal_maps), start=str(1), end=None, unit='seconds', increment=step, interval=False, dbif=None) write_vect_history('v.particles', options, flags, particles)