def search(self): """ Search for Landsat-8 scenes """ # Convert AOI to GeoJSON aoi_file = tempdir() + '/aoi_geojson.geojson' Module('v.out.ogr', overwrite=True, input=options['map'], format='GeoJSON', output=aoi_file) # Reproject to espg 4326 aoi_file_re = tempdir() + '/aoi_reprojected.geojson' os.system("ogr2ogr -t_srs epsg:4326 " + aoi_file_re + " " + aoi_file) # Search for scenes with open(aoi_file_re) as f: aoi = json.dumps(json.load(f)) search = Search(date_from=options['date_from'], date_to=options['date_to'], satellite_name='Landsat-8', intersects=aoi, cloud_from=0, cloud_to=options['clouds']) self._scenes = search.scenes() os.remove(aoi_file)
def import_file(filename, archive, output, region): """Extracts one binary file from its archive and import it.""" # open the archive with ZipFile(archive, 'r') as a: # create temporary file and directory tempdir = grass.tempdir() tempfile = os.path.join(tempdir, filename) # try to inflate and import the layer if os.path.isfile(archive): try: grass.message("Inflating {} ...".format(filename)) a.extract(filename, tempdir) grass.message("Importing {} as {} ..." .format(filename, output)) grass.run_command('r.in.bin', flags='s', input=tempfile, output=output, bytes=2, anull=-9999, **region) # if file is not present in the archive except KeyError: grass.warning("Could not find {} in {}. Skipping" .format(filename, archive)) # make sure temporary files are cleaned finally: grass.try_remove(tempfile) grass.try_rmdir(tempdir) else: grass.warning("Could not find file {}. Skipping" .format(archive))
def import_file(filename, archive, output, region): """Extracts one binary file from its archive and import it.""" # open the archive with ZipFile(archive, 'r') as a: # create temporary file and directory tempdir = grass.tempdir() tempfile = os.path.join(tempdir, filename) # try to inflate and import the layer try: grass.message("Inflating '%s' ..." % filename) a.extract(filename, tempdir) grass.message("Importing '%s' as <%s> ..." % (filename, output)) grass.run_command('r.in.bin', flags='s', overwrite=True, input=tempfile, output=output, bytes=2, anull=-9999, **region) # if file is not present in the archive except KeyError: grass.fatal("Could not find '%s' in '%s'" % (filename, archive)) # make sure temporary files are cleaned finally: grass.try_remove(tempfile) grass.try_rmdir(tempdir)
class TestRasterExport(TestCase): tmp = gscript.tempdir() float_ = os.path.join(tmp, "geotiffloat") int_ = os.path.join(tmp, "geotifint") grid = os.path.join(tmp, "grid") pack = os.path.join(tmp, "pack") @classmethod def setUpClass(cls): """Initiate the temporal GIS and set the region """ cls.use_temp_region() cls.runModule("g.gisenv", set="TGIS_USE_CURRENT_MAPSET=1") cls.runModule("g.region", s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10) maps = [] for i in range(10): cls.runModule("r.mapcalc", expression="a_{id_} = rand(0.1,1.0)".format(id_=i), flags="s", overwrite=True) maps.append("a_{id_}".format(id_=i)) cls.runModule("t.create", type="strds", temporaltype="absolute", output="A", title="A test", description="A test", overwrite=True) cls.runModule("t.register", flags="i", type="raster", input="A", maps=maps, start="2001-01-01", increment="4 months", overwrite=True) @classmethod def tearDownClass(cls): """Remove the temporary region """ cls.del_temp_region() cls.runModule("t.remove", flags="rf", inputs="A") def test_simple_geotif(self): self.assertModule("t.rast.export", input="A", output=self.float_, overwrite=True) self.assertFileExists(self.float_) def test_simple_geotif_int(self): self.assertModule("t.rast.export", input="A", output=self.int_, overwrite=True, type="Int16") self.assertFileExists(self.int_) def test_simple_grid(self): self.assertModule("t.rast.export", input="A", output=self.grid, overwrite=True, format="AAIGrid") self.assertFileExists(self.grid) def test_simple_pack(self): self.assertModule("t.rast.export", input="A", output=self.pack, overwrite=True, format="pack") self.assertFileExists(self.pack)
def create_tempdirs(list_of_directories): ''' Function that create needed temporary folder. Those name have to be saved as other function will depend of the name of those folder. ''' return_list = [] tmp_grass_dir = gscript.tempdir() for directory in list_of_directories: # Temporary directory for administrative units statistics outputdirectory = os.path.join(tmp_grass_dir, directory) if not os.path.exists(outputdirectory): os.makedirs(outputdirectory) return_list.append(outputdirectory) # Return paths return return_list
def register(mset_pat, rast_pat, datefmt="%Y%m%d_%H", mapset_fmt="r%Y_%m", raster_fmt="{base}_{date:%Y%m}", sep='|', overwrite=False, **kwargs): reg_dir = gscript.tempdir() env = os.environ.copy() loc = Location() temporals = {} for mset_name in loc.mapsets(pattern=mset_pat): menv = gs.grass_init(gs.GISBASE, loc.gisdbase, loc.name, mset_name, env=env.copy()) for rname in loc[mset_name].glist("raster", pattern=rast_pat): base, date = extract_date(rname, datefmt=datefmt) trast = raster_fmt.format(base=base, date=date) rasts = temporals.get(trast, []) rasts.append((rname, "{date:%Y-%m-%d %H:%M:%S}".format(date=date))) temporals[trast] = rasts # create for tname, trasts in temporals.items(): gscript.run_command( "t.create", type="strds", temporaltype="absolute", output=tname, title=tname, description="COSMO REA6: {tname}".format(tname=tname), semantictype="mean", overwrite=overwrite, env=menv) csvfile = os.path.join(reg_dir, "{}.csv".format(tname)) with open(csvfile, mode="w") as csv: for row in trasts: csv.write(sep.join(row) + '\n') gscript.run_command("t.register", overwrite=overwrite, type="raster", input=tname, file=csvfile, separator=SEP.get(sep, sep), env=menv)
def _tempdir(self): """!Create temp_dir and append list self.temp_dirs_to_cleanup with path of file @return string path to temp_dir """ self._debug("_tempdir", "started") temp_dir = gscript.tempdir() if temp_dir is None: gscript.fatal(_("Unable to create temporary directory")) # list of created tempfiles for destructor self.temp_dirs_to_cleanup.append(temp_dir) self._debug("_tempdir", "finished") return temp_dir
def _tempdir(self): """!Create temp_dir and append list self.temp_dirs_to_cleanup with path of file @return string path to temp_dir """ self._debug("_tempdir", "started") temp_dir = g.tempdir() if temp_dir is None: g.fatal(_("Unable to create temporary directory")) # list of created tempfiles for destructor self.temp_dirs_to_cleanup.append(temp_dir) self._debug("_tempdir", "finished") return temp_dir
def _temp(self, directory=False): """!Create temp file/dir and append list self.temp_to_cleanup with the file/dir path @param directory if False create file, if True create directory @return string path to temp """ if directory: temp = grass.tempdir() else: temp = grass.tempfile() if temp is None: grass.fatal(_("Unable to create temporary files")) # list of created temps for destructor self.temp_to_cleanup.append(temp) return temp
def import_file(filename, archive, output, region): """Extracts one binary file from its archive and import it.""" # open the archive with ZipFile(archive, 'r') as a: # create temporary file and directory tempdir = grass.tempdir() tempfile = os.path.join(tempdir, filename) # try to inflate and import the layer if os.path.isfile(archive): try: grass.message("Inflating {} ...".format(filename)) a.extract(filename, tempdir) grass.message("Importing {} as {} ...".format( filename, output)) grass.run_command('r.in.bin', flags='s', input=tempfile, output=output, bytes=2, anull=-9999, **region) # if file is not present in the archive except KeyError: grass.warning("Could not find {} in {}. Skipping".format( filename, archive)) # make sure temporary files are cleaned finally: grass.try_remove(tempfile) grass.try_rmdir(tempdir) else: grass.warning("Could not find file {}. Skipping".format(archive))
def mkLonLatGrid(self): # make temporary roi roi='roi__' grass.run_command('v.in.region',output=roi,quiet=True) # create temporary lonlat location tmpdir=grass.tempdir() tmploc='lonlat' grass.core.create_location(tmpdir,tmploc,epsg=4326) grass.run_command('g.mapset',mapset='PERMANENT',location=tmploc, dbase=tmpdir,quiet=True) # reproj roi, smax in meters = 200km per degree grass.run_command('v.proj',input=roi,mapset=self.env['MAPSET'], location=self.env['LOCATION_NAME'],dbase=self.env['GISDBASE'], quiet=True) grass.run_command('g.region',vector=roi,quiet=True) llregion = grass.region() # bounds to extend to next resolution break extent = {c:int(float(llregion[c])/self.res)*self.res for c in ('s','w')} extent.update({c:int((float(llregion[c])+self.res)/self.res)*self.res for c in ('n','e')}) # set region grass.run_command('g.region',res=self.res,**extent) grass.message(('Lon/Lat extent of region:',extent)) # make grid grass.run_command('v.mkgrid',map=self.grid, type='area') grass.run_command('v.db.addcolumn',map=self.grid,columns='lon double,lat double') grass.run_command('v.to.db', map=self.grid, type='centroid', option='coor', columns='lon,lat',quiet=True) # back to origional location and reproj grass.run_command('g.mapset',mapset=self.env['MAPSET'],location=self.env['LOCATION_NAME'], dbase=self.env['GISDBASE'],quiet=True) grass.run_command('v.proj',input=self.grid,mapset='PERMANENT', location=tmploc,dbase=tmpdir,quiet=True, smax=float(self.region['nsres'])+float(self.region['ewres'])) return 0
def CategoStats(rasterLayer, zone_map, cl_list=[], colpref="", prop=True, mode=True, countnullformode=True): ''' Function computing by-segment class proportion from a categorical raster. 'rasterLayer' wait for the name of the categorical raster. 'zone_map' wait for the name of the zones/segmentation raster. 'cl_list' wait for a list with the class values expected in the categorical raster. If this list is provided, the class not existing in the output of r.stats will be filled with ZERO The computational region should be defined properly before running this function. ''' # Check parameters if not prop and not mode: return "ERROR : At least 'prop' or 'mode' should be TRUE" # r.stats tmpfile = gscript.tempfile() if countnullformode: gscript.run_command('r.stats', overwrite=True, flags='c', input='%s,%s'%(zone_map,rasterLayer), output=tmpfile, separator=',') #R.STATS with pixel count including 'null' else: gscript.run_command('r.stats', overwrite=True, flags='cn', input='%s,%s'%(zone_map,rasterLayer), output=tmpfile, separator=',') #R.STATS with pixel count including 'null' # Open csv file and create a csv reader csvfile = open(tmpfile, 'r') reader = csv.reader(csvfile, delimiter=',') # Total pixels per category per zone totals_dict = {} for row in reader: if row[0] not in totals_dict: # Will pass the condition only if the current zone ID does not exists in the dictionary totals_dict[row[0]] = {} # Declare a new embedded dictionnary for the current zone ID totals_dict[row[0]][row[1]] = int(row[2]) # Delete key '*' in 'totals_dict' that could append if there are null values on the zone raster if "*" in totals_dict: del totals_dict["*"] ### MODAL VALUE # modal class of each zone if mode: modalclass_dict = {} for k in totals_dict: mode = max(totals_dict[k].iteritems(), key=operator.itemgetter(1))[0] # The trick was found here : https://stackoverflow.com/a/268285/8013239 if mode =="*": # If the mode is NULL values modalclass_dict[k] = "" else: modalclass_dict[k] = mode ### PROPORTIONS if prop: # Get list of categories to output if cl_list: #If list of classes provided by user class_dict = {str(a):"" for a in cl_list} #To be sure it's string format else: class_dict = {} # Remove null values (mandatory for the computation of proportions) for k in totals_dict: if '*' in totals_dict[k].keys(): del totals_dict[k]["*"] # Proportion of each category per zone (excluding nulls) percents_dict = {} for k in totals_dict: percents_dict[k] = {} for k2 in totals_dict[k]: percents_dict[k][k2] = float(totals_dict[k][k2]) / sum(totals_dict[k].values()) class_dict[k2] = "" #Add this class to the dictionnary of classes # Fill class not met with zero for k in percents_dict: for cl in class_dict: if cl not in percents_dict[k]: percents_dict[k][cl] = 0 # Get list of class sorted by value (arithmetic) class_list = [int(k) for k in class_dict.keys()] class_list.sort() ### OUTPUT # Create output file and create a csv writer outfile = os.path.join(gscript.tempdir(),'categostats_%s_%s.csv'%(zone_map,rasterLayer)) csvfile = open(outfile, 'w') writer = csv.writer(csvfile, delimiter=',') # Write header header = ['cat',] if mode: if colpref == "": header.append('mode') else: header.append('%s_mode'%colpref) if prop: if colpref == "": [header.append('prop_%s'%cl) for cl in class_list] else: [header.append('%s_prop_%s'%(colpref,cl)) for cl in class_list] writer.writerow(header) # Write content for k in totals_dict: newrow = [k,] #Zone ID if mode: newrow.append(modalclass_dict[k]) if prop: for cl in class_list: newrow.append(percents_dict[k]['%s'%cl]) writer.writerow(newrow) csvfile.close() return outfile
def __init__(self, parent, giface, title=_("GRASS GIS Animation tool"), rasters=None, timeseries=None): wx.Frame.__init__(self, parent, title=title, style=wx.DEFAULT_FRAME_STYLE, size=(800, 600)) self._giface = giface self.SetClientSize(self.GetSize()) self.iconsize = (16, 16) self.SetIcon( wx.Icon( os.path.join( globalvar.ICONDIR, 'grass_map.ico'), wx.BITMAP_TYPE_ICO)) # Make sure the temporal database exists tgis.init() # create temporal directory and ensure it's deleted after programs ends # (stored in MAPSET/.tmp/) global TMP_DIR TMP_DIR = gcore.tempdir() self.animations = [Animation() for i in range(MAX_COUNT)] self.windows = [] self.animationPanel = AnimationsPanel( self, self.windows, initialCount=MAX_COUNT) bitmapPool = BitmapPool() mapFilesPool = MapFilesPool() self._progressDlg = None self._progressDlgMax = None self.provider = BitmapProvider( bitmapPool=bitmapPool, mapFilesPool=mapFilesPool, tempDir=TMP_DIR) self.animationSliders = {} self.animationSliders['nontemporal'] = SimpleAnimationSlider(self) self.animationSliders['temporal'] = TimeAnimationSlider(self) self.controller = AnimationController(frame=self, sliders=self.animationSliders, animations=self.animations, mapwindows=self.windows, provider=self.provider, bitmapPool=bitmapPool, mapFilesPool=mapFilesPool) for win in self.windows: win.Bind(wx.EVT_SIZE, self.FrameSizeChanged) self.provider.mapsLoaded.connect(lambda: self.SetStatusText('')) self.provider.renderingStarted.connect(self._showRenderingProgress) self.provider.renderingContinues.connect(self._updateProgress) self.provider.renderingFinished.connect(self._closeProgress) self.provider.compositionStarted.connect( self._showRenderingProgress) self.provider.compositionContinues.connect(self._updateProgress) self.provider.compositionFinished.connect(self._closeProgress) self.InitStatusbar() self._mgr = wx.aui.AuiManager(self) # toolbars self.toolbars = {} tb = ['miscToolbar', 'animationToolbar', 'mainToolbar'] if sys.platform == 'win32': tb.reverse() for toolb in tb: self._addToolbar(toolb) self._addPanes() self._mgr.Update() self.dialogs = dict() self.dialogs['speed'] = None self.dialogs['preferences'] = None self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def main(): global rm_regions, rm_rasters, rm_vectors, tmpfolder # parameters s2names = options['s2names'].split(',') tmpdirectory = options['directory'] test_nprocs_memory() grass.message(_("Downloading Sentinel scenes ...")) if not grass.find_program('i.sentinel.download', '--help'): grass.fatal(_("The 'i.sentinel.download' module was not found, install it first:") + "\n" + "g.extension i.sentinel") if not grass.find_program('i.sentinel.import', '--help'): grass.fatal(_("The 'i.sentinel.import' module was not found, install it first:") + "\n" + "g.extension i.sentinel") if not grass.find_program('i.sentinel.parallel.download', '--help'): grass.fatal(_("The 'i.sentinel.parallel.download' module was not found, install it first:") + "\n" + "g.extension i.sentinel") # create temporary directory to download data if tmpdirectory: if not os.path.isdir(tmpdirectory): try: os.makedirs(tmpdirectory) except: grass.fatal(_("Unable to create temp dir")) else: tmpdirectory = grass.tempdir() tmpfolder = tmpdirectory if os.path.isfile(s2names[0]): with open(s2names[0], 'r') as f: s2namesstr = f.read() else: s2namesstr = ','.join(s2names) grass.run_command( 'i.sentinel.parallel.download', settings=options['settings'], scene_name=s2namesstr, nprocs=options['nprocs'], output=tmpdirectory, flags="fs", quiet=True) grass.message(_("Importing Sentinel scenes ...")) env = grass.gisenv() start_gisdbase = env['GISDBASE'] start_location = env['LOCATION_NAME'] start_cur_mapset = env['MAPSET'] if len(s2namesstr.split(',')) < int(options['nprocs']): procs_import = len(s2namesstr.split(',')) else: procs_import = int(options['nprocs']) ### save current region id = str(os.getpid()) currentregion = 'tmp_region_' + id grass.run_command('g.region', save=currentregion, flags='p') queue_import = ParallelModuleQueue(nprocs=procs_import) memory_per_proc = round(float(options['memory'])/procs_import) mapsetids = [] importflag = 'r' if flags['i']: importflag += 'i' if flags['c']: importflag += 'c' json_standard_folder = os.path.join(env['GISDBASE'], env['LOCATION_NAME'], env['MAPSET'], 'cell_misc') if not os.path.isdir(json_standard_folder): os.makedirs(json_standard_folder) for idx,subfolder in enumerate(os.listdir(tmpdirectory)): if os.path.isdir(os.path.join(tmpdirectory, subfolder)): mapsetid = 'S2_import_%s' %(str(idx+1)) mapsetids.append(mapsetid) directory = os.path.join(tmpdirectory, subfolder) i_sentinel_import = Module( 'i.sentinel.import.worker', input=directory, mapsetid=mapsetid, memory=memory_per_proc, pattern=options['pattern'], flags=importflag, region=currentregion, metadata=json_standard_folder, run_=False ) queue_import.put(i_sentinel_import) queue_import.wait() grass.run_command('g.remove', type='region', name=currentregion, flags='f') # verify that switching the mapset worked env = grass.gisenv() gisdbase = env['GISDBASE'] location = env['LOCATION_NAME'] cur_mapset = env['MAPSET'] if cur_mapset != start_cur_mapset: grass.fatal("New mapset is <%s>, but should be <%s>" % (cur_mapset, start_cur_mapset)) # copy maps to current mapset maplist = [] cloudlist = [] for new_mapset in mapsetids: for vect in grass.parse_command('g.list', type='vector', mapset=new_mapset): cloudlist.append(vect) grass.run_command('g.copy', vector=vect + '@' + new_mapset + ',' + vect) for rast in grass.parse_command('g.list', type='raster', mapset=new_mapset): maplist.append(rast) grass.run_command('g.copy', raster=rast + '@' + new_mapset + ',' + rast) # set nulls grass.run_command('i.zero2null', map=rast, quiet=True) grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) # space time dataset grass.message(_("Creating STRDS of Sentinel scenes ...")) if options['strds_output']: strds = options['strds_output'] grass.run_command( 't.create', output=strds, title="Sentinel-2", desc="Sentinel-2", quiet=True) # create register file registerfile = grass.tempfile() file = open(registerfile, 'w') for imp_rast in list(set(maplist)): date_str1 = imp_rast.split('_')[1].split('T')[0] date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:]) time_str = imp_rast.split('_')[1].split('T')[1] clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:]) file.write("%s|%s %s\n" % (imp_rast, date_str2, clock_str2)) file.close() grass.run_command('t.register', input=strds, file=registerfile, quiet=True) # remove registerfile grass.try_remove(registerfile) if flags['c']: stvdsclouds = strds + '_clouds' grass.run_command( 't.create', output=stvdsclouds, title="Sentinel-2 clouds", desc="Sentinel-2 clouds", quiet=True, type='stvds') registerfileclouds = grass.tempfile() fileclouds = open(registerfileclouds, 'w') for imp_clouds in cloudlist: date_str1 = imp_clouds.split('_')[1].split('T')[0] date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:]) time_str = imp_clouds.split('_')[1].split('T')[1] clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:]) fileclouds.write("%s|%s %s\n" % (imp_clouds, date_str2, clock_str2)) fileclouds.close() grass.run_command( 't.register', type='vector', input=stvdsclouds, file=registerfileclouds, quiet=True) grass.message("<%s> is created" % (stvdsclouds)) # remove registerfile grass.try_remove(registerfileclouds) # extract strds for each band bands = [] pattern = options['pattern'] if "(" in pattern: global beforebrackets, afterbrackets beforebrackets = re.findall(r"(.*?)\(", pattern)[0] inbrackets = re.findall(r"\((.*?)\)", pattern)[0] afterbrackets = re.findall(r"\)(.*)", pattern)[0] bands = ["%s%s%s" % (beforebrackets, x, afterbrackets) for x in inbrackets.split('|')] else: bands = pattern.split('|') for band in bands: if flags['i'] and ('20' in band or '60' in band): band.replace('20', '10').replace('60', '10') grass.run_command('t.rast.extract', input=strds, where="name like '%" + band + "%'", output="%s_%s" % (strds, band), quiet=True) grass.message("<%s_%s> is created" % (strds, band))
def main(): # Initiate GRASS init = vtkGRASSInit() init.Init("v.fuzzy.select") init.ExitOnErrorOn() module = vtkGRASSModule() module.SetDescription("Select best fitting factors of a fuzzy inference model parameter based on vector data") module.AddKeyword("vector") input = vtkGRASSOptionFactory().CreateInstance(vtkGRASSOptionFactory.GetVectorInputType()) feature = vtkGRASSOptionFactory().CreateInstance(vtkGRASSOptionFactory.GetVectorFeatureType()) feature.SetDefaultOptions("point,centroid,area") feature.SetDefaultAnswer("point") feature.MultipleOff() factors = vtkGRASSOptionFactory().CreateInstance(vtkGRASSOptionFactory.GetDataBaseColumnType(), "factors") factors.SetDescription("Names of the table columns of the fuzzy factors") target = vtkGRASSOptionFactory().CreateInstance(vtkGRASSOptionFactory.GetDataBaseColumnType(), "target") target.SetDescription("Name of the table column of the target variable") weightingFactor = vtkGRASSOptionFactory().CreateInstance(vtkGRASSOptionFactory.GetDataBaseColumnType(), "weightfactor") weightingFactor.SetDescription("Name of the table column of the weighting variable") weightingFactor.RequiredOff() iterations = vtkGRASSOption() iterations.SetKey("iterations") iterations.MultipleOff() iterations.RequiredOff() iterations.SetDefaultAnswer("5000") iterations.SetDescription("The maximum number of iterations") iterations.SetTypeToInteger() fuzzysets = vtkGRASSOption() fuzzysets.SetKey("fuzzysets") fuzzysets.MultipleOn() fuzzysets.RequiredOn() fuzzysets.SetDescription("The number of fuzzy sets to be used for calibration eg.: 2,3") fuzzysets.SetTypeToInteger() resultlist = vtkGRASSOptionFactory().CreateInstance(vtkGRASSOptionFactory.GetFileOutputType(), "result") resultlist.SetDescription("The name of the logfile to store a sorted list of all factor combinations with AIC and ERROR") samplingFactor = vtkGRASSOption() samplingFactor.SetKey("samplingfactor") samplingFactor.MultipleOff() samplingFactor.RequiredOff() samplingFactor.SetDescription("The name of the column with ids for bootstrap aggregation selection") samplingFactor.SetTypeToString() weightNum = vtkGRASSOption() weightNum.SetKey("weightnum") weightNum.MultipleOff() weightNum.RequiredOff() weightNum.SetDefaultAnswer("6") weightNum.SetDescription("The number of weights used for calibration") weightNum.SetTypeToInteger() null = vtkGRASSOption() null.SetKey("null") null.MultipleOff() null.RequiredOff() null.SetDefaultAnswer("9999") null.SetDescription("The value used fo no data") null.SetTypeToDouble() breakcrit = vtkGRASSOption() breakcrit.SetKey("breakcrit") breakcrit.MultipleOff() breakcrit.RequiredOff() breakcrit.SetDefaultAnswer("0.01") breakcrit.SetDescription("The break criteria") breakcrit.SetTypeToDouble() sdepth = vtkGRASSOption() sdepth.SetKey("sdepth") sdepth.MultipleOff() sdepth.RequiredOff() sdepth.SetDefaultAnswer("2") sdepth.SetDescription("The maximum number of depths (number of selected factors)") sdepth.SetTypeToInteger() runs = vtkGRASSOption() runs.SetKey("runs") runs.MultipleOff() runs.RequiredOff() runs.SetDefaultAnswer("1") runs.SetDescription("The number of runs for each selection anaylsis") runs.SetTypeToInteger() treduce = vtkGRASSOption() treduce.SetKey("treduce") treduce.MultipleOff() treduce.RequiredOff() treduce.SetDefaultAnswer("1.01") treduce.SetDescription("This factor is used to reduce the annealing temperature each step") treduce.SetTypeToDouble() sdreduce = vtkGRASSOption() sdreduce.SetKey("sdreduce") sdreduce.MultipleOff() sdreduce.RequiredOff() sdreduce.SetDefaultAnswer("1.01") sdreduce.SetDescription("This factor is used to reduce the standard deviation each step") sdreduce.SetTypeToDouble() bagging = vtkGRASSFlag() bagging.SetDescription("Use boostrap aggregation (bagging) for input data selection") bagging.SetKey('b') weighting = vtkGRASSFlag() weighting.SetDescription("Use weighting for input data calibration. A weightingfactor and the number of weights must be provided.") weighting.SetKey('w') paramter = vtkStringArray() for arg in sys.argv: paramter.InsertNextValue(str(arg)) if init.Parser(paramter) == False: return -1 messages = vtkGRASSMessagingInterface() # Check for weighting support if weighting.GetAnswer() == True: if not weightingFactor.GetAnswer(): messages.FatalError("The name of the weighting column name must be provided") # Create the names for the vector import and the (weighted) fuzzy inference scheme generation columns = vtkStringArray() factors.GetAnswers(columns) setnums = vtkStringArray() fuzzysets.GetAnswers(setnums) Factors = [] for i in range(columns.GetNumberOfValues()): Factors.append(columns.GetValue(i)) FuzzySets = [] for i in range(setnums.GetNumberOfValues()): FuzzySets.append(int(setnums.GetValue(i))) columns.InsertNextValue(target.GetAnswer()) Vector = input.GetAnswer() Target = target.GetAnswer() WeightNum = int(weightNum.GetAnswer()) WeightFactor = weightingFactor.GetAnswer() Iterations = int(iterations.GetAnswer()) runs = int(runs.GetAnswer()) searchDepth = int(sdepth.GetAnswer()) tmpdir = grass.tempdir() Count = 0 CalibrationResultFactors = [] CalibrationResultFuzzySets = [] StartFactors = Factors if searchDepth == 0: searchDepth = len(Factors) CalibrationResult = {} SelectedCalibration = "" while Count < searchDepth: factorNames = [] fuzzySetNums = [] CalibrationResultCount = len(CalibrationResultFactors) # Insert the previous selected factors and fuzzy set numbers for i in range(CalibrationResultCount): factorNames.append(CalibrationResultFactors[i]) fuzzySetNums.append(CalibrationResultFuzzySets[i]) # Allocate the next entry factorNames.append("") fuzzySetNums.append("") # For each factor left for factor in StartFactors: factorNames[CalibrationResultCount] = factor for fuzzySet in FuzzySets: fuzzySetNums[CalibrationResultCount] = fuzzySet # Create the unique id of the calibration id = "" for i in range(len(factorNames)): id += str(factorNames[i]) + str(fuzzySetNums[i]) # Make a copy of the lists, otherwise the references get modified a = 1*factorNames b = 1*fuzzySetNums if weighting.GetAnswer(): error, BIC, AIC, MAF, Rsquared = StartWeightedCalibration(id, tmpdir, Vector, Target, factorNames, fuzzySetNums, Iterations, runs, WeightNum, WeightFactor, treduce.GetAnswer(), sdreduce.GetAnswer(), breakcrit.GetAnswer(), bagging.GetAnswer(), samplingFactor.GetAnswer()) CalibrationResult[id] = {"NAME":a, "FIS":b, "ERROR":error, "BIC":BIC, "AIC":AIC, "MAF":MAF, "Rsquared":Rsquared, "WEIGHTING":True} else: error, BIC, AIC, MAF, Rsquared = StartCalibration(id, tmpdir, Vector, Target, factorNames, fuzzySetNums, Iterations, runs, treduce.GetAnswer(), sdreduce.GetAnswer(), breakcrit.GetAnswer(), bagging.GetAnswer(), samplingFactor.GetAnswer()) CalibrationResult[id] = {"NAME":a, "FIS":b, "ERROR":error, "BIC":BIC, "AIC":AIC, "MAF":MAF, "Rsquared":Rsquared, "WEIGHTING":False} # Selection of the best fit model minBIC = 99999999 # Compute the delta BIC and BIC weight and append it to the Calibration result for key in CalibrationResult.keys(): BIC = CalibrationResult[key]["BIC"] if BIC < minBIC: minBIC = BIC BICWeigthSum = 0 for key in CalibrationResult.keys(): BIC = CalibrationResult[key]["BIC"] BICDelta = math.fabs(BIC - minBIC) CalibrationResult[key]["DELTA_BIC"] = BICDelta CalibrationResult[key]["BIC_WEIGHT"] = math.exp(-1*BICDelta/2.0) BICWeigthSum = BICWeigthSum + CalibrationResult[key]["BIC_WEIGHT"] for key in CalibrationResult.keys(): BICWeight = CalibrationResult[key]["BIC_WEIGHT"] MAF = CalibrationResult[key]["MAF"] BICWeight = BICWeight / BICWeigthSum CalibrationResult[key]["BIC_WEIGHT"] = BICWeight CalibrationResult[key]["BIC_MAF_WEIGHT"] = BICWeight / MAF # Select the best result from the CalibrationResult bestFitKey = None bestBICMAFWeight = 999999 for key in CalibrationResult.keys(): BICMAFWeight = CalibrationResult[key]["BIC_MAF_WEIGHT"] if BICMAFWeight < bestBICMAFWeight: bestFitKey = key # Copy the best fit factor names and fuzzy sets CalibrationResultFactors = CalibrationResult[bestFitKey]["NAME"] CalibrationResultFuzzySets = CalibrationResult[bestFitKey]["FIS"] # Build new StartFactor list StartFactors = [] print "Factors ", Factors print "CalibrationResultFactors ", CalibrationResultFactors for factor in Factors: if factor not in CalibrationResultFactors: StartFactors.append(factor) print "StartFactors ", StartFactors # Search depth Count += 1 print "Selected best fit model: " for name in CalibrationResult[bestFitKey].keys(): print name, ":", CalibrationResult[bestFitKey][name] ########################################################################### # Write all results into the best fit file count = 0 result = [] for key in CalibrationResult.keys(): result.append([str(key),CalibrationResult[key]["BIC_MAF_WEIGHT"]]) count = count + 1 # We sort the result based on the delta BIC result = sorted(result, key = lambda result: result[1]) messageout = "RANK|NAME|BIC_MAF_WEIGHT|BIC_WEIGHT|DELTA_BIC|ERROR|BIC|AIC|MAF|Rsquared|WEIGHTING\n" file = open(resultlist.GetAnswer(), 'w') file.write(messageout) for key in range(count-1,0-1,-1): messageout = str(count - key) + "|" + \ str(result[key][0]) + "|" + \ str(CalibrationResult[result[key][0]]["BIC_MAF_WEIGHT"]) + "|" + \ str(CalibrationResult[result[key][0]]["BIC_WEIGHT"]) + "|" + \ str(CalibrationResult[result[key][0]]["DELTA_BIC"]) + "|" + \ str(CalibrationResult[result[key][0]]["ERROR"]) + "|" + \ str(CalibrationResult[result[key][0]]["BIC"]) + "|" + \ str(CalibrationResult[result[key][0]]["AIC"]) + "|" + \ str(CalibrationResult[result[key][0]]["MAF"]) + "|" + \ str(CalibrationResult[result[key][0]]["Rsquared"]) + "|" + \ str(CalibrationResult[result[key][0]]["WEIGHTING"]) + "\n" file.write(messageout) file.close()
def main(): """Do the main processing """ # Parse input options: patch_map = options['input'] patches = patch_map.split('@')[0] patches_mapset = patch_map.split('@')[1] if len( patch_map.split('@')) > 1 else None pop_proxy = options['pop_proxy'] layer = options['layer'] costs = options['costs'] cutoff = float(options['cutoff']) border_dist = int(options['border_dist']) conefor_dir = options['conefor_dir'] memory = int(options['memory']) # Parse output options: prefix = options['prefix'] edge_map = '{}_edges'.format(prefix) vertex_map = '{}_vertices'.format(prefix) shortest_paths = '{}_shortest_paths'.format(prefix) # Parse flags: p_flag = flags['p'] t_flag = flags['t'] r_flag = flags['r'] dist_flags = 'kn' if flags['k'] else 'n' lin_cat = 1 zero_dist = None folder = grass.tempdir() if not os.path.exists(folder): os.makedirs(folder) # Setup counter for progress message counter = 0 # Check if location is lat/lon (only in lat/lon geodesic distance # measuring is supported) if grass.locn_is_latlong(): grass.verbose("Location is lat/lon: Geodesic distance \ measure is used") # Check if prefix is legal GRASS name if not grass.legal_name(prefix): grass.fatal('{} is not a legal name for GRASS \ maps.'.format(prefix)) if prefix[0].isdigit(): grass.fatal('Tables names starting with a digit are not SQL \ compliant.'.format(prefix)) # Check if output maps not already exists or could be overwritten for output in [edge_map, vertex_map, shortest_paths]: if grass.db.db_table_exist(output) and not grass.overwrite(): grass.fatal('Vector map <{}> already exists'.format(output)) # Check if input has required attributes in_db_connection = grass.vector.vector_db(patch_map) if not int(layer) in in_db_connection.keys(): grass.fatal('No attribute table connected vector map {} at \ layer {}.'.format(patches, layer)) #Check if cat column exists pcols = grass.vector.vector_columns(patch_map, layer=layer) #Check if cat column exists if not 'cat' in pcols.keys(): grass.fatal('Cannot find the reqired column cat in vector map \ {}.'.format(patches)) #Check if pop_proxy column exists if not pop_proxy in pcols.keys(): grass.fatal('Cannot find column {} in vector map \ {}'.format(pop_proxy, patches)) #Check if pop_proxy column is numeric type if not pcols[pop_proxy]['type'] in ['INTEGER', 'REAL', 'DOUBLE PRECISION']: grass.fatal('Column {} is of type {}. Only numeric types \ (integer or double precision) \ allowed!'.format(pop_proxy, pcols[pop_proxy]['type'])) #Check if pop_proxy column does not contain values <= 0 pop_vals = np.fromstring(grass.read_command('v.db.select', flags='c', map=patches, columns=pop_proxy, nv=-9999).rstrip('\n'), dtype=float, sep='\n') if np.min(pop_vals) <= 0: grass.fatal('Column {} contains values <= 0 or NULL. Neither \ values <= 0 nor NULL allowed!}'.format(pop_proxy)) ############################################## # Use pygrass region instead of grass.parse_command !?! start_reg = grass.parse_command('g.region', flags='ugp') max_n = start_reg['n'] min_s = start_reg['s'] max_e = start_reg['e'] min_w = start_reg['w'] # cost_nsres = reg['nsres'] # cost_ewres = reg['ewres'] # Rasterize patches # http://www.gdal.org/gdal_tutorial.html # http://geoinformaticstutorial.blogspot.no/2012/11/convert- # shapefile-to-raster-with-gdal.html if t_flag: # Rasterize patches with "all-touched" mode using GDAL # Read region-settings (not needed canuse max_n, min_s, max_e, # min_w nsres, ewres... prast = os.path.join(folder, 'patches_rast.tif') # Check if GDAL-GRASS plugin is installed if ogr.GetDriverByName('GRASS'): #With GDAL-GRASS plugin #Locate file for patch vector map pfile = grass.parse_command('g.findfile', element='vector', file=patches, mapset=patches_mapset)['file'] pfile = os.path.join(pfile, 'head') else: # Without GDAL-GRASS-plugin grass.warning("Cannot find GDAL-GRASS plugin. Consider \ installing it in order to save time for \ all-touched rasterisation") pfile = os.path.join(folder, 'patches_vect.gpkg') # Export patch vector map to temp-file in a GDAL-readable # format (shp) grass.run_command('v.out.ogr', flags='m', quiet=True, input=patch_map, type='area', layer=layer, output=pfile, lco='GEOMETRY_NAME=geom') # Rasterize vector map with all-touched option os.system('gdal_rasterize -l {} -at -tr {} {} \ -te {} {} {} {} -ot Uint32 -a cat \ {} {} -q'.format(patches, start_reg['ewres'], start_reg['nsres'], start_reg['w'], start_reg['s'], start_reg['e'], start_reg['n'], pfile, prast)) if not ogr.GetDriverByName('GRASS'): # Remove vector temp-file os.remove(os.path.join(folder, 'patches_vect.gpkg')) # Import rasterized patches grass.run_command('r.external', flags='o', quiet=True, input=prast, output='{}_patches_pol'.format(TMP_PREFIX)) else: # Simple rasterisation (only area) # in G 7.6 also with support for 'centroid' if float(grass.version()['version'][:3]) >= 7.6: conv_types = ['area', 'centroid'] else: conv_types = ['area'] grass.run_command('v.to.rast', quiet=True, input=patches, use='cat', type=conv_types, output='{}_patches_pol'.format(TMP_PREFIX)) # Extract boundaries from patch raster map grass.run_command('r.mapcalc', expression='{p}_patches_boundary=if(\ {p}_patches_pol,\ if((\ (isnull({p}_patches_pol[-1,0])||| \ {p}_patches_pol[-1,0]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[0,1])||| \ {p}_patches_pol[0,1]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[1,0])||| \ {p}_patches_pol[1,0]!={p}_patches_pol)||| \ (isnull({p}_patches_pol[0,-1])||| \ {p}_patches_pol[0,-1]!={p}_patches_pol)), \ {p}_patches_pol,null()), null())'.format(p=TMP_PREFIX), quiet=True) rasterized_cats = grass.read_command( 'r.category', separator='newline', map='{p}_patches_boundary'.format(p=TMP_PREFIX)).replace( '\t', '').strip('\n') rasterized_cats = list( map(int, set([x for x in rasterized_cats.split('\n') if x != '']))) #Init output vector maps if they are requested by user network = VectorTopo(edge_map) network_columns = [(u'cat', 'INTEGER PRIMARY KEY'), (u'from_p', 'INTEGER'), (u'to_p', 'INTEGER'), (u'min_dist', 'DOUBLE PRECISION'), (u'dist', 'DOUBLE PRECISION'), (u'max_dist', 'DOUBLE PRECISION')] network.open('w', tab_name=edge_map, tab_cols=network_columns) vertex = VectorTopo(vertex_map) vertex_columns = [ (u'cat', 'INTEGER PRIMARY KEY'), (pop_proxy, 'DOUBLE PRECISION'), ] vertex.open('w', tab_name=vertex_map, tab_cols=vertex_columns) if p_flag: # Init cost paths file for start-patch grass.run_command('v.edit', quiet=True, map=shortest_paths, tool='create') grass.run_command('v.db.addtable', quiet=True, map=shortest_paths, columns="cat integer,\ from_p integer,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") start_region_bbox = Bbox(north=float(max_n), south=float(min_s), east=float(max_e), west=float(min_w)) vpatches = VectorTopo(patches, mapset=patches_mapset) vpatches.open('r', layer=int(layer)) ###Loop through patches vpatch_ids = np.array(vpatches.features_to_wkb_list( feature_type="centroid", bbox=start_region_bbox), dtype=[('vid', 'uint32'), ('cat', 'uint32'), ('geom', '|S10')]) cats = set(vpatch_ids['cat']) n_cats = len(cats) if n_cats < len(vpatch_ids['cat']): grass.verbose('At least one MultiPolygon found in patch map.\n \ Using average coordinates of the centroids for \ visual representation of the patch.') for cat in cats: if cat not in rasterized_cats: grass.warning('Patch {} has not been rasterized and will \ therefore not be treated as part of the \ network. Consider using t-flag or change \ resolution.'.format(cat)) continue grass.verbose("Calculating connectivity-distances for patch \ number {}".format(cat)) # Filter from_vpatch = vpatch_ids[vpatch_ids['cat'] == cat] # Get patch ID if from_vpatch['vid'].size == 1: from_centroid = Centroid(v_id=int(from_vpatch['vid']), c_mapinfo=vpatches.c_mapinfo) from_x = from_centroid.x from_y = from_centroid.y # Get centroid if not from_centroid: continue else: xcoords = [] ycoords = [] for f_p in from_vpatch['vid']: from_centroid = Centroid(v_id=int(f_p), c_mapinfo=vpatches.c_mapinfo) xcoords.append(from_centroid.x) ycoords.append(from_centroid.y) # Get centroid if not from_centroid: continue from_x = np.average(xcoords) from_y = np.average(ycoords) # Get BoundingBox from_bbox = grass.parse_command('v.db.select', map=patch_map, flags='r', where='cat={}'.format(cat)) attr_filter = vpatches.table.filters.select(pop_proxy) attr_filter = attr_filter.where("cat={}".format(cat)) proxy_val = vpatches.table.execute().fetchone() # Prepare start patch start_patch = '{}_patch_{}'.format(TMP_PREFIX, cat) reclass_rule = grass.encode('{} = 1\n* = NULL'.format(cat)) recl = grass.feed_command( 'r.reclass', quiet=True, input='{}_patches_boundary'.format(TMP_PREFIX), output=start_patch, rules='-') recl.stdin.write(reclass_rule) recl.stdin.close() recl.wait() # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised) #start_check = grass.parse_command('r.info', flags='r', map=start_patch) #start_check = grass.parse_command('r.univar', flags='g', map=start_patch) #print(start_check) """if start_check['min'] != '1': grass.warning('Patch {} has not been rasterized and will \ therefore not be treated as part of the \ network. Consider using t-flag or change \ resolution.'.format(cat)) grass.run_command('g.remove', flags='f', vector=start_patch, raster=start_patch, quiet=True) grass.del_temp_region() continue""" # Prepare stop patches ############################################ reg = grass.parse_command('g.region', flags='ug', quiet=True, raster=start_patch, n=float(from_bbox['n']) + float(cutoff), s=float(from_bbox['s']) - float(cutoff), e=float(from_bbox['e']) + float(cutoff), w=float(from_bbox['w']) - float(cutoff), align='{}_patches_pol'.format(TMP_PREFIX)) north = reg['n'] if max_n > reg['n'] else max_n south = reg['s'] if min_s < reg['s'] else min_s east = reg['e'] if max_e < reg['e'] else max_e west = reg['w'] if min_w > reg['w'] else min_w # Set region to patch search radius grass.use_temp_region() grass.run_command('g.region', quiet=True, n=north, s=south, e=east, w=west, align='{}_patches_pol'.format(TMP_PREFIX)) # Create buffer around start-patch as a mask # for cost distance analysis grass.run_command('r.buffer', quiet=True, input=start_patch, output='MASK', distances=cutoff) grass.run_command('r.mapcalc', quiet=True, expression='{pf}_patch_{p}_neighbours_contur=\ if({pf}_patches_boundary=={p},\ null(),\ {pf}_patches_boundary)'.format( pf=TMP_PREFIX, p=cat)) grass.run_command('r.mask', flags='r', quiet=True) # Calculate cost distance cost_distance_map = '{}_patch_{}_cost_dist'.format(prefix, cat) grass.run_command('r.cost', flags=dist_flags, quiet=True, overwrite=True, input=costs, output=cost_distance_map, start_rast=start_patch, memory=memory) #grass.run_command('g.region', flags='up') # grass.raster.raster_history(cost_distance_map) cdhist = History(cost_distance_map) cdhist.clear() cdhist.creator = os.environ['USER'] cdhist.write() # History object cannot modify description grass.run_command('r.support', map=cost_distance_map, description='Generated by r.connectivity.distance', history=os.environ['CMDLINE']) # Export distance at boundaries maps = '{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist' maps = maps.format(TMP_PREFIX, cat, prefix), connections = grass.encode( grass.read_command('r.stats', flags='1ng', quiet=True, input=maps, separator=';').rstrip('\n')) if connections: con_array = np.genfromtxt(BytesIO(connections), delimiter=';', dtype=None, names=['x', 'y', 'cat', 'dist']) else: grass.warning('No connections for patch {}'.format(cat)) # Write centroid to vertex map vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val) vertex.table.conn.commit() # Remove temporary map data grass.run_command('g.remove', quiet=True, flags='f', type=['raster', 'vector'], pattern="{}*{}*".format(TMP_PREFIX, cat)) grass.del_temp_region() continue #Find closest points on neigbour patches to_cats = set(np.atleast_1d(con_array['cat'])) to_coords = [] for to_cat in to_cats: connection = con_array[con_array['cat'] == to_cat] connection.sort(order=['dist']) pixel = border_dist if len( connection) > border_dist else len(connection) - 1 # closest_points_x = connection['x'][pixel] # closest_points_y = connection['y'][pixel] closest_points_to_cat = to_cat closest_points_min_dist = connection['dist'][0] closest_points_dist = connection['dist'][pixel] closest_points_max_dist = connection['dist'][-1] to_patch_ids = vpatch_ids[vpatch_ids['cat'] == int(to_cat)]['vid'] if len(to_patch_ids) == 1: to_centroid = Centroid(v_id=to_patch_ids, c_mapinfo=vpatches.c_mapinfo) to_x = to_centroid.x to_y = to_centroid.y elif len(to_patch_ids) >= 1: xcoords = [] ycoords = [] for t_p in to_patch_ids: to_centroid = Centroid(v_id=int(t_p), c_mapinfo=vpatches.c_mapinfo) xcoords.append(to_centroid.x) ycoords.append(to_centroid.y) # Get centroid if not to_centroid: continue to_x = np.average(xcoords) to_y = np.average(ycoords) to_coords.append('{},{},{},{},{},{}'.format( connection['x'][0], connection['y'][0], to_cat, closest_points_min_dist, closest_points_dist, closest_points_max_dist)) #Save edges to network dataset if closest_points_dist <= 0: zero_dist = 1 # Write data to network network.write(Line([(from_x, from_y), (to_x, to_y)]), cat=lin_cat, attrs=( cat, int(closest_points_to_cat), closest_points_min_dist, closest_points_dist, closest_points_max_dist, )) network.table.conn.commit() lin_cat = lin_cat + 1 # Save closest points and shortest paths through cost raster as # vector map (r.drain limited to 1024 points) if requested if p_flag: grass.verbose('Extracting shortest paths for patch number \ {}...'.format(cat)) points_n = len(to_cats) tiles = int(points_n / 1024.0) rest = points_n % 1024 if not rest == 0: tiles = tiles + 1 tile_n = 0 while tile_n < tiles: tile_n = tile_n + 1 #Import closest points for start-patch in 1000er blocks sp = grass.feed_command('v.in.ascii', flags='nr', overwrite=True, quiet=True, input='-', stderr=subprocess.PIPE, output="{}_{}_cp".format( TMP_PREFIX, cat), separator=",", columns="x double precision,\ y double precision,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") sp.stdin.write(grass.encode("\n".join(to_coords))) sp.stdin.close() sp.wait() # Extract shortest paths for start-patch in chunks of # 1024 points cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat) start_points = "{}_{}_cp".format(TMP_PREFIX, cat) grass.run_command('r.drain', overwrite=True, quiet=True, input=cost_distance_map, output=cost_paths, drain=cost_paths, start_points=start_points) grass.run_command('v.db.addtable', map=cost_paths, quiet=True, columns="cat integer,\ from_p integer,\ to_p integer,\ dist_min double precision,\ dist double precision,\ dist_max double precision") grass.run_command('v.db.update', map=cost_paths, column='from_p', value=cat, quiet=True) grass.run_command('v.distance', quiet=True, from_=cost_paths, to=start_points, upload='to_attr', column='to_p', to_column='to_p') grass.run_command('v.db.join', quiet=True, map=cost_paths, column='to_p', other_column='to_p', other_table=start_points, subset_columns='dist_min,dist,dist_max') #grass.run_command('v.info', flags='c', # map=cost_paths) grass.run_command('v.patch', flags='ae', overwrite=True, quiet=True, input=cost_paths, output=shortest_paths) # Remove temporary map data grass.run_command('g.remove', quiet=True, flags='f', type=['raster', 'vector'], pattern="{}*{}*".format(TMP_PREFIX, cat)) # Remove temporary map data for patch if r_flag: grass.run_command('g.remove', flags='f', type='raster', name=cost_distance_map, quiet=True) vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val) vertex.table.conn.commit() # Print progress message grass.percent(i=int((float(counter) / n_cats) * 100), n=100, s=3) # Update counter for progress message counter = counter + 1 if zero_dist: grass.warning('Some patches are directly adjacent to others. \ Minimum distance set to 0.0000000001') # Close vector maps and build topology network.close() vertex.close() # Add vertex attributes # grass.run_command('v.db.addtable', map=vertex_map) # grass.run_command('v.db.join', map=vertex_map, column='cat', # other_table=in_db_connection[int(layer)]['table'], # other_column='cat', subset_columns=pop_proxy, # quiet=True) # Add history and meta data to produced maps grass.run_command('v.support', flags='h', map=edge_map, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) grass.run_command('v.support', flags='h', map=vertex_map, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) if p_flag: grass.run_command('v.support', flags='h', map=shortest_paths, person=os.environ['USER'], cmdhist=os.environ['CMDLINE']) # Output also Conefor files if requested if conefor_dir: query = """SELECT p_from, p_to, avg(dist) FROM (SELECT CASE WHEN from_p > to_p THEN to_p ELSE from_p END AS p_from, CASE WHEN from_p > to_p THEN from_p ELSE to_p END AS p_to, dist FROM {}) AS x GROUP BY p_from, p_to""".format(edge_map) with open(os.path.join(conefor_dir, 'undirected_connection_file'), 'w') as edges: edges.write( grass.read_command('db.select', sql=query, separator=' ')) with open(os.path.join(conefor_dir, 'directed_connection_file'), 'w') as edges: edges.write( grass.read_command('v.db.select', map=edge_map, separator=' ', flags='c')) with open(os.path.join(conefor_dir, 'node_file'), 'w') as nodes: nodes.write( grass.read_command('v.db.select', map=vertex_map, separator=' ', flags='c'))
def main(): # check if you are in GRASS gisbase = os.getenv("GISBASE") if not gisbase: grass.fatal(_("$GISBASE not defined")) return 0 if flags["l"]: try: from rmodislib import product except ImportError as e: grass.fatal("Unable to load i.modis library: {}".format(e)) prod = product() prod.print_prods() return 0 # return an error if q and spectral are set if not flags["q"] and options["spectral"] != "": grass.warning( _('If no QA layer chosen in the "spectral" option' " the command will report an error")) # return an error if both input and files option are set or not if options["input"] == "" and options["files"] == "": grass.fatal(_('Choose one of "input" or "files" options')) return 0 elif options["input"] != "" and options["files"] != "": grass.fatal( _('It is not possible set "input" and "files"' " options together")) return 0 # check if remove the files or not if flags["t"]: remove = False else: remove = True if grass.overwrite(): over = True else: over = False # check if do check quality, rescaling and setting of colors if flags["q"]: analyze = False else: analyze = True # return the number of select layer from HDF files if options["spectral"]: count = options["spectral"].strip("(").strip(")").split().count("1") else: count = 0 outfile = None # check if file for t.register has to been created if options["outfile"]: if flags["a"]: outfile = open(options["outfile"], "a") else: outfile = open(options["outfile"], "w") if count > 1: grass.warning("The spectral subsets are more than one so the " " output file will be renamed") elif flags["w"] and not options["outfile"]: outfile = tempfile.NamedTemporaryFile(delete=False) # check if import simple file or mosaic if flags["m"] and options["input"] != "": grass.fatal( _("It is not possible to create a mosaic with a single" " HDF file")) return 0 elif flags["m"]: mosaic(options, remove, analyze, over, outfile) else: single(options, remove, analyze, over, outfile) # if t.register file is create if outfile: outfile.close() tempdir = grass.tempdir() # one layer only if count == 1: if flags["g"]: grass.message(_("file={name}".format(name=outfile.name))) else: grass.message( _("You can use temporal framework, registering" " the maps using t.register input=your_strds " "'file={name}'".format(name=outfile.name))) # for more layer create several files with only a subset for each layer elif count > 1: tfile = open(outfile.name) outfiles = {} lines = tfile.readlines() # get the codes from only one HDF for line in lines[:count]: if flags["m"]: code = "_".join(line.split("|")[0].split("_")[2:]) else: code = line.split("|")[0].split(".")[-1] outfiles[code] = open( os.path.join(tempdir, "{co}.txt".format(co=code)), "w") # split the lines for each code for line in lines: if flags["m"]: code = "_".join(line.split("|")[0].split("_")[2:]) else: code = line.split("|")[0].split(".")[-1] outfiles[code].write(line) for k, v in outfiles.items(): v.close() if flags["g"]: message = "" else: message = ("You can use temporal framework, registering the " "maps in different temporal datasets using " "t.register and \n") tfile.close() for fil in outfiles.values(): message += "'file={name}'\n".format(name=fil.name) grass.message(_(message))
def main(): # check if you are in GRASS gisbase = os.getenv('GISBASE') if not gisbase: grass.fatal(_('$GISBASE not defined')) return 0 if flags['l']: try: from rmodislib import product except ImportError as e: grass.fatal("Unable to load i.modis library: {}".format(e)) prod = product() prod.print_prods() return 0 # return an error if q and spectral are set if not flags['q'] and options['spectral'] != '': grass.warning( _('If no QA layer chosen in the "spectral" option' ' the command will report an error')) # return an error if both input and files option are set or not if options['input'] == '' and options['files'] == '': grass.fatal(_('Choose one of "input" or "files" options')) return 0 elif options['input'] != '' and options['files'] != '': grass.fatal( _('It is not possible set "input" and "files"' ' options together')) return 0 # check the version version = grass.core.version() # this is would be set automatically if version['version'].find('7.') == -1: grass.fatal(_('GRASS GIS version 7 required')) return 0 # check if remove the files or not if flags['t']: remove = False else: remove = True if grass.overwrite(): over = True else: over = False # check if do check quality, rescaling and setting of colors if flags['q']: analyze = False else: analyze = True # return the number of select layer from HDF files if options['spectral']: count = options['spectral'].strip('(').strip(')').split().count('1') else: count = 0 outfile = None # check if file for t.register has to been created if options['outfile']: if flags['a']: outfile = open(options['outfile'], 'a') else: outfile = open(options['outfile'], 'w') if count > 1: grass.warning("The spectral subsets are more than one so the " " output file will be renamed") elif flags['w'] and not options['outfile']: outfile = tempfile.NamedTemporaryFile(delete=False) # check if import simple file or mosaic if flags['m'] and options['input'] != '': grass.fatal( _('It is not possible to create a mosaic with a single' ' HDF file')) return 0 elif flags['m']: mosaic(options, remove, analyze, over, outfile) else: single(options, remove, analyze, over, outfile) # if t.register file is create if outfile: outfile.close() tempdir = grass.tempdir() # one layer only if count == 1: if flags['g']: grass.message(_("file={name}".format(name=outfile.name))) else: grass.message( _("You can use temporal framework, registering" " the maps using t.register input=your_strds " "'file={name}'".format(name=outfile.name))) # for more layer create several files with only a subset for each layer elif count > 1: tfile = open(outfile.name) outfiles = {} lines = tfile.readlines() # get the codes from only one HDF for line in lines[:count]: if flags['m']: code = '_'.join(line.split('|')[0].split('_')[2:]) else: code = line.split('|')[0].split('.')[-1] outfiles[code] = open( os.path.join(tempdir, "{co}.txt".format(co=code)), 'w') # split the lines for each code for line in lines: if flags['m']: code = '_'.join(line.split('|')[0].split('_')[2:]) else: code = line.split('|')[0].split('.')[-1] outfiles[code].write(line) for k, v in outfiles.items(): v.close() if flags['g']: message = "" else: message = "You can use temporal framework, registering the " \ "maps in different temporal datasets using " \ "t.register and \n" tfile.close() for fil in outfiles.values(): message += "'file={name}'\n".format(name=fil.name) grass.message(_(message))
def __init__(self, parent, giface, title=_("GRASS GIS Animation tool"), rasters=None, timeseries=None): wx.Frame.__init__(self, parent, title=title, style=wx.DEFAULT_FRAME_STYLE, size=(800, 600)) self._giface = giface self.SetClientSize(self.GetSize()) self.iconsize = (16, 16) self.SetIcon(wx.Icon(os.path.join(globalvar.ICONDIR, 'grass_map.ico'), wx.BITMAP_TYPE_ICO)) # Make sure the temporal database exists tgis.init() # create temporal directory and ensure it's deleted after programs ends (stored in MAPSET/.tmp/) global TMP_DIR TMP_DIR = gcore.tempdir() self.animations = [Animation() for i in range(MAX_COUNT)] self.windows = [] self.animationPanel = AnimationsPanel(self, self.windows, initialCount=MAX_COUNT) bitmapPool = BitmapPool() mapFilesPool = MapFilesPool() self._progressDlg = None self._progressDlgMax = None self.provider = BitmapProvider(bitmapPool=bitmapPool, mapFilesPool=mapFilesPool, tempDir=TMP_DIR) self.animationSliders = {} self.animationSliders['nontemporal'] = SimpleAnimationSlider(self) self.animationSliders['temporal'] = TimeAnimationSlider(self) self.controller = AnimationController(frame=self, sliders=self.animationSliders, animations=self.animations, mapwindows=self.windows, provider=self.provider, bitmapPool=bitmapPool, mapFilesPool=mapFilesPool) for win in self.windows: win.Bind(wx.EVT_SIZE, self.FrameSizeChanged) self.provider.mapsLoaded.connect(lambda: self.SetStatusText('')) self.provider.renderingStarted.connect(self._showRenderingProgress) self.provider.renderingContinues.connect(self._updateProgress) self.provider.renderingFinished.connect(self._closeProgress) self.provider.compositionStarted.connect(self._showRenderingProgress) self.provider.compositionContinues.connect(self._updateProgress) self.provider.compositionFinished.connect(self._closeProgress) self.InitStatusbar() self._mgr = wx.aui.AuiManager(self) # toolbars self.toolbars = {} tb = ['miscToolbar', 'animationToolbar', 'mainToolbar'] if sys.platform == 'win32': tb.reverse() for toolb in tb: self._addToolbar(toolb) self._addPanes() self._mgr.Update() self.dialogs = dict() self.dialogs['speed'] = None self.dialogs['preferences'] = None self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def main(): global rm_regions, rm_rasters, rm_vectors, tmpfolder # parameters if options['s2names']: s2names = options['s2names'].split(',') if os.path.isfile(s2names[0]): with open(s2names[0], 'r') as f: s2namesstr = f.read() else: s2namesstr = ','.join(s2names) tmpdirectory = options['directory'] test_nprocs_memory() if not grass.find_program('i.sentinel.download', '--help'): grass.fatal( _("The 'i.sentinel.download' module was not found, install it first:" ) + "\n" + "g.extension i.sentinel") if not grass.find_program('i.sentinel.import', '--help'): grass.fatal( _("The 'i.sentinel.import' module was not found, install it first:" ) + "\n" + "g.extension i.sentinel") if not grass.find_program('i.sentinel.parallel.download', '--help'): grass.fatal( _("The 'i.sentinel.parallel.download' module was not found, install it first:" ) + "\n" + "g.extension i.sentinel") if not grass.find_program('i.zero2null', '--help'): grass.fatal( _("The 'i.zero2null' module was not found, install it first:") + "\n" + "g.extension i.zero2null") # create temporary directory to download data if tmpdirectory: if not os.path.isdir(tmpdirectory): try: os.makedirs(tmpdirectory) except: grass.fatal(_("Unable to create temp dir")) else: tmpdirectory = grass.tempdir() tmpfolder = tmpdirectory # make distinct download and sen2cor directories try: download_dir = os.path.join(tmpdirectory, 'download_{}'.format(os.getpid())) os.makedirs(download_dir) except Exception as e: grass.fatal(_('Unable to create temp dir {}').format(download_dir)) download_args = { 'settings': options['settings'], 'nprocs': options['nprocs'], 'output': download_dir, 'datasource': options['datasource'], 'flags': 'f' } if options['limit']: download_args['limit'] = options['limit'] if options['s2names']: download_args['flags'] += 's' download_args['scene_name'] = s2namesstr.strip() if options['datasource'] == 'USGS_EE': if flags['e']: download_args['flags'] += 'e' download_args['producttype'] = 'S2MSI1C' else: download_args['clouds'] = options['clouds'] download_args['start'] = options['start'] download_args['end'] = options['end'] download_args['producttype'] = options['producttype'] grass.run_command('i.sentinel.parallel.download', **download_args) number_of_scenes = len(os.listdir(download_dir)) nprocs_final = min(number_of_scenes, int(options['nprocs'])) # run atmospheric correction if flags['a']: sen2cor_folder = os.path.join(tmpdirectory, 'sen2cor_{}'.format(os.getpid())) try: os.makedirs(sen2cor_folder) except Exception as e: grass.fatal( _("Unable to create temporary sen2cor folder {}").format( sen2cor_folder)) grass.message( _('Starting atmospheric correction with sen2cor...').format( nprocs_final)) queue_sen2cor = ParallelModuleQueue(nprocs=nprocs_final) for idx, subfolder in enumerate(os.listdir(download_dir)): folderpath = os.path.join(download_dir, subfolder) for file in os.listdir(folderpath): if file.endswith('.SAFE'): filepath = os.path.join(folderpath, file) output_dir = os.path.join(sen2cor_folder, 'sen2cor_result_{}'.format(idx)) sen2cor_module = Module( 'i.sentinel-2.sen2cor', input_file=filepath, output_dir=output_dir, sen2cor_path=options['sen2cor_path'], nprocs=1, run_=False # all remaining sen2cor parameters can be left as default ) queue_sen2cor.put(sen2cor_module) queue_sen2cor.wait() download_dir = sen2cor_folder grass.message(_("Importing Sentinel scenes ...")) env = grass.gisenv() start_gisdbase = env['GISDBASE'] start_location = env['LOCATION_NAME'] start_cur_mapset = env['MAPSET'] ### save current region id = str(os.getpid()) currentregion = 'tmp_region_' + id grass.run_command('g.region', save=currentregion, flags='p') queue_import = ParallelModuleQueue(nprocs=nprocs_final) memory_per_proc = round(float(options['memory']) / nprocs_final) mapsetids = [] importflag = 'r' if flags['i']: importflag += 'i' if flags['c']: importflag += 'c' json_standard_folder = os.path.join(env['GISDBASE'], env['LOCATION_NAME'], env['MAPSET'], 'cell_misc') if not os.path.isdir(json_standard_folder): os.makedirs(json_standard_folder) subfolders = [] for idx, subfolder in enumerate(os.listdir(download_dir)): if os.path.isdir(os.path.join(download_dir, subfolder)): subfolders.append(subfolder) mapsetid = 'S2_import_%s' % (str(idx + 1)) mapsetids.append(mapsetid) directory = os.path.join(download_dir, subfolder) i_sentinel_import = Module('i.sentinel.import.worker', input=directory, mapsetid=mapsetid, memory=memory_per_proc, pattern=options['pattern'], flags=importflag, region=currentregion, metadata=json_standard_folder, run_=False) queue_import.put(i_sentinel_import) queue_import.wait() grass.run_command('g.remove', type='region', name=currentregion, flags='f') # verify that switching the mapset worked env = grass.gisenv() gisdbase = env['GISDBASE'] location = env['LOCATION_NAME'] cur_mapset = env['MAPSET'] if cur_mapset != start_cur_mapset: grass.fatal("New mapset is <%s>, but should be <%s>" % (cur_mapset, start_cur_mapset)) # copy maps to current mapset maplist = [] cloudlist = [] for new_mapset in mapsetids: for vect in grass.parse_command('g.list', type='vector', mapset=new_mapset): cloudlist.append(vect) grass.run_command('g.copy', vector=vect + '@' + new_mapset + ',' + vect) for rast in grass.parse_command('g.list', type='raster', mapset=new_mapset): maplist.append(rast) grass.run_command('g.copy', raster=rast + '@' + new_mapset + ',' + rast) # set nulls grass.run_command('i.zero2null', map=rast, quiet=True) grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) # space time dataset grass.message(_("Creating STRDS of Sentinel scenes ...")) if options['strds_output']: strds = options['strds_output'] grass.run_command('t.create', output=strds, title="Sentinel-2", desc="Sentinel-2", quiet=True) # create register file registerfile = grass.tempfile() file = open(registerfile, 'w') for imp_rast in list(set(maplist)): date_str1 = imp_rast.split('_')[1].split('T')[0] date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:]) time_str = imp_rast.split('_')[1].split('T')[1] clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:]) file.write("%s|%s %s\n" % (imp_rast, date_str2, clock_str2)) file.close() grass.run_command('t.register', input=strds, file=registerfile, quiet=True) # remove registerfile grass.try_remove(registerfile) if flags['c']: stvdsclouds = strds + '_clouds' grass.run_command('t.create', output=stvdsclouds, title="Sentinel-2 clouds", desc="Sentinel-2 clouds", quiet=True, type='stvds') registerfileclouds = grass.tempfile() fileclouds = open(registerfileclouds, 'w') for imp_clouds in cloudlist: date_str1 = imp_clouds.split('_')[1].split('T')[0] date_str2 = "%s-%s-%s" % (date_str1[:4], date_str1[4:6], date_str1[6:]) time_str = imp_clouds.split('_')[1].split('T')[1] clock_str2 = "%s:%s:%s" % (time_str[:2], time_str[2:4], time_str[4:]) fileclouds.write("%s|%s %s\n" % (imp_clouds, date_str2, clock_str2)) fileclouds.close() grass.run_command('t.register', type='vector', input=stvdsclouds, file=registerfileclouds, quiet=True) grass.message("<%s> is created" % (stvdsclouds)) # remove registerfile grass.try_remove(registerfileclouds) # extract strds for each band bands = [] pattern = options['pattern'] if "(" in pattern: global beforebrackets, afterbrackets beforebrackets = re.findall(r"(.*?)\(", pattern)[0] inbrackets = re.findall(r"\((.*?)\)", pattern)[0] afterbrackets = re.findall(r"\)(.*)", pattern)[0] bands = [ "%s%s%s" % (beforebrackets, x, afterbrackets) for x in inbrackets.split('|') ] else: bands = pattern.split('|') for band in bands: if flags['i'] and ('20' in band or '60' in band): band.replace('20', '10').replace('60', '10') grass.run_command('t.rast.extract', input=strds, where="name like '%" + band + "%'", output="%s_%s" % (strds, band), quiet=True) grass.message("<%s_%s> is created" % (strds, band))