예제 #1
0
def Cross(gisbase,gisdb, mapset, location="cea", vector_name="grid", point_name="sample", group=None):
    import grass
    reload(grass)
    import grass.script as gscript
    import grass.script.setup as gsetup
    gsetup.init(gisbase,
            gisdb, location, mapset)
    suffix=""
    if location=="cea":
        suffix="_ease"
    gscript.run_command('v.out.ogr',input=point_name,output=point_name+suffix,overwrite=True,quiet=True) 
    gscript.run_command('v.out.ogr',input=vector_name,output=vector_name+suffix,overwrite=True,quiet=True,flags='m')
    gscript.run_command('v.db.addcolumn',map='sample',column='cellid integer',quiet=True)
    gscript.run_command('v.db.addcolumn',map='sample',column=group+ " VARCHAR(10)",quiet=True)
    gscript.run_command('v.what.vect',map='sample',column='cellid',query_map=vector_name,query_column='cat',quiet=True)
    if group:
        gscript.run_command('v.what.vect',map='sample',column=group,query_map=vector_name,query_column=group,quiet=True)
    result = gscript.read_command('db.select',sql='select * from '+point_name) 
    with open('result.csv','w') as t:
        t.write(result)
        t.close()
    s = gscript.read_command('db.select', flags='c', \
            sql='select cellid from sample where cellid is not null')
    a = set(s.split('\n'))
    a.discard('')
    b = str(','.join(str(e) for e in a))
    subname='sub'+vector_name
    gscript.run_command('v.extract',input=vector_name,output=subname, where="cat in (%s)" % b,overwrite=True,quiet=True)
    gscript.run_command('v.out.ogr',input=subname,output=subname+suffix,overwrite=True,quiet=True,flags="m",format="ESRI_Shapefile")
예제 #2
0
def hexagrid(gisbase,gisdb, mapset, location="cea",radius=15000):
    import grass
    reload(grass)
    import grass.script as gscript
    import grass.script.setup as gsetup
    gsetup.init(gisbase,
            gisdb, location, mapset)
    radius=int(radius)
    if radius==0:
        radius = 10000
    radius2 = radius*2
    gscript.run_command('v.proj',location='ll',input='sample',output='sample',overwrite=True,quiet=True)
    gscript.run_command('g.region',vect='sample',quiet=True)
    gscript.run_command('g.region',n='n+%d' % radius2 ,e='e+%d' % radius2, \
            w='w-%d' % radius2,s='s-%d' % radius2,quiet=True)
    gscript.run_command('v.mkgrid',flags='h',map='grid',box=(radius,radius),overwrite=True,quiet=True)
    gscript.run_command('v.out.ogr',input='sample',output='sample_ease',overwrite=True,quiet=True) 
    gscript.run_command('v.out.ogr',input='grid',output='grid_ease',overwrite=True,quiet=True)
    gscript.run_command('v.db.addcolumn',map='sample',column='cellid integer',quiet=True)
    gscript.run_command('v.what.vect',map='sample',column='cellid',query_map='grid',query_column='cat',quiet=True)
    result = gscript.read_command('db.select',sql='select * from sample') 
    with open('result.csv','w') as t:
        t.write(result)
        t.close()
    s = gscript.read_command('db.select', flags='c', \
            sql='select cellid from sample where cellid is not null')
    a = set(s.split('\n'))
    a.discard('')
    b = str(','.join(str(e) for e in a))
    gscript.run_command('v.extract',input='grid',output='subgrid', where="cat in (%s)" % b,overwrite=True,quiet=True)
    gscript.run_command('v.out.ogr',input='subgrid',output='subgrid_ease',overwrite=True,quiet=True,format="ESRI_Shapefile")
예제 #3
0
def rlayerInfo(map):
    raster_info = grass.read_command('r.info', map=map, flags='g').strip().split('\n')
    raster_info = list2dict(raster_info)
    
    map_range = grass.read_command('r.info', map=map, flags='r').strip().split('\n')
    map_range = list2dict(map_range)
    raster_info['range'] = map_range
    return raster_info
 def _defineEnvironment(self):
     try:
         gscript.read_command('i.group', flags='g', group=self.group, subgroup=self.group, env=self.env)
     except CalledModuleError:
         gscript.run_command('i.group', group=self.group, subgroup=self.group,
                             input=[self.group + '_' + ext for ext in 'r', 'g', 'b'], env=self.env)
     maps = gscript.read_command('i.group', flags='g', group=self.group, subgroup=self.group).strip()
     if maps:
         self.env = get_environment(raster=maps.splitlines()[0])
    def ReturnXY(self):

        Xcoord=grass.read_command('v.db.select',map=self.Map_vect_poits,column='X_Corrd') # retorna as corrdenadas da coluna x
        Xcoord_split=Xcoord.split("\n");Xcoord_split.remove("");Xcoord_split.remove("X_Corrd"); Xcoord_split=map(float, Xcoord_split) #limpando a lista removendo espacos em branco e transformando tudo em float
        self.Xcoord_list=Xcoord_split

        Ycoord=grass.read_command('v.db.select',map=self.Map_vect_poits,column='Y_Corrd') # retorna as corrdenadas da coluna y
        Ycoord_split=Ycoord.split("\n");Ycoord_split.remove("");Ycoord_split.remove("Y_Corrd"); Ycoord_split=map(float, Ycoord_split) #limpando a lista removendo espacos em branco e transformando tudo em float
        self.Ycoord_list=Ycoord_split
def length(data):
    feat_osm = int(((grass.read_command("v.info", map=data,flags="t",quiet=True)).split("\n")[2]).split("=")[1])
    if feat_osm>0:
        length_data = grass.read_command("v.to.db",map=data,option="length",flags="p")
        s_data=0 
        l_data = length_data.split("\n")
        for item in l_data[1:-1]:
            s_data+=float(item.split("|")[1])         
    else:
        s_data=0
    return s_data
예제 #7
0
def createAbsoluteInterval():
    grass.run_command('g.region', s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10,
                      flags='p3', quiet=True)

    grass.mapcalc(exp="prec_1 = rand(0, 550)", overwrite=True)
    grass.mapcalc(exp="prec_2 = rand(0, 450)", overwrite=True)
    grass.mapcalc(exp="prec_3 = rand(0, 320)", overwrite=True)
    grass.mapcalc(exp="prec_4 = rand(0, 510)", overwrite=True)
    grass.mapcalc(exp="prec_5 = rand(0, 300)", overwrite=True)
    grass.mapcalc(exp="prec_6 = rand(0, 650)", overwrite=True)

    grass.mapcalc(exp="temp_1 = rand(0, 550)", overwrite=True)
    grass.mapcalc(exp="temp_2 = rand(0, 450)", overwrite=True)
    grass.mapcalc(exp="temp_3 = rand(0, 320)", overwrite=True)
    grass.mapcalc(exp="temp_4 = rand(0, 510)", overwrite=True)
    grass.mapcalc(exp="temp_5 = rand(0, 300)", overwrite=True)
    grass.mapcalc(exp="temp_6 = rand(0, 650)", overwrite=True)

    n1 = grass.read_command("g.tempfile", pid=1, flags='d').strip()
    fd = open(n1, 'w')
    fd.write(
        "prec_1|2001-01-01|2001-02-01\n"
        "prec_2|2001-04-01|2001-05-01\n"
        "prec_3|2001-05-01|2001-09-01\n"
        "prec_4|2001-09-01|2002-01-01\n"
        "prec_5|2002-01-01|2002-05-01\n"
        "prec_6|2002-05-01|2002-07-01\n"
    )
    fd.close()

    n2 = grass.read_command("g.tempfile", pid=2, flags='d').strip()
    fd = open(n2, 'w')
    fd.write(
        "temp_1|2000-10-01|2001-01-01\n"
        "temp_2|2001-04-01|2001-05-01\n"
        "temp_3|2001-05-01|2001-09-01\n"
        "temp_4|2001-09-01|2002-01-01\n"
        "temp_5|2002-01-01|2002-05-01\n"
        "temp_6|2002-05-01|2002-07-01\n"
    )
    fd.close()
    name1 = 'absinterval1'
    name2 = 'absinterval2'
    grass.run_command('t.unregister', type='rast',
                      maps='prec_1,prec_2,prec_3,prec_4,prec_5,prec_6,'
                      'temp_1,temp_2,temp_3,temp_4,temp_5,temp_6')
    for name, fname in zip((name1, name2), (n1, n2)):
        grass.run_command('t.create', overwrite=True, type='strds',
                          temporaltype='absolute', output=name,
                          title="A test with input files", descr="A test with input files")
        grass.run_command('t.register', flags='i', input=name, file=fname, overwrite=True)

    return name1, name2
def unviar(mapcost):
  desvioapoio=grass.read_command('r.univar',map=mapcost,fs="comma")
  desviolist=desvioapoio.split("\n")
  
  desvio=desviolist[11].replace('standard deviation: ',"")
  #print desvio
  
  mediaapoio=grass.read_command('r.univar',map=mapcost,fs="comma")
  medialist=mediaapoio.split("\n")
  
  media=desviolist[9].replace('mean: ',"")  
  return desvio,media
def GetCoeff(vect):
    coord_start = grass.read_command("v.to.db", map=vect, option="start", type="line",flags="p").split("\n")[1]
    x_start = float(coord_start.split("|")[1])
    y_start = float(coord_start.split("|")[2])
    coord_end = grass.read_command("v.to.db", map=vect, option="end", type="line",flags="p").split("\n")[1]   
    x_end = float(coord_end.split("|")[1])
    y_end = float(coord_end.split("|")[2])
    if (x_end-x_start) <> 0:
        m = (y_end-y_start)/(x_end-x_start)
    else:
        m = 10**9
    return m
예제 #10
0
파일: pGrass.py 프로젝트: timoco/pgis
 def dsExists(self,inDS):
     ''' check to see if a dataset exists in the grassDB
     INPUT: dataset name (string)
     OUTPUT: exists (boolean)'''
     dsExists=False
     isRast=grass.read_command(self.__gMList,type='rast',pattern=inDS)
     isVect=grass.read_command(self.__gMList,type='vect',pattern=inDS)
     if len(isRast)>0:
         dsExists=True
     if len(isVect)>0:
         dsExists=True
     return dsExists
예제 #11
0
def createRelativeInterval():
    grass.run_command('g.region', s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10,
                      flags='p3', quiet=True)

    grass.mapcalc(exp="prec_1 = rand(0, 550)", overwrite=True)
    grass.mapcalc(exp="prec_2 = rand(0, 450)", overwrite=True)
    grass.mapcalc(exp="prec_3 = rand(0, 320)", overwrite=True)
    grass.mapcalc(exp="prec_4 = rand(0, 510)", overwrite=True)
    grass.mapcalc(exp="prec_5 = rand(0, 300)", overwrite=True)
    grass.mapcalc(exp="prec_6 = rand(0, 650)", overwrite=True)

    grass.mapcalc(exp="temp_1 = rand(0, 550)", overwrite=True)
    grass.mapcalc(exp="temp_2 = rand(0, 450)", overwrite=True)
    grass.mapcalc(exp="temp_3 = rand(0, 320)", overwrite=True)
    grass.mapcalc(exp="temp_4 = rand(0, 510)", overwrite=True)
    grass.mapcalc(exp="temp_5 = rand(0, 300)", overwrite=True)
    grass.mapcalc(exp="temp_6 = rand(0, 650)", overwrite=True)

    n1 = grass.read_command("g.tempfile", pid=1, flags='d').strip()
    fd = open(n1, 'w')
    fd.write(
        "prec_1|1|4\n"
        "prec_2|6|7\n"
        "prec_3|7|10\n"
        "prec_4|10|11\n"
        "prec_5|11|14\n"
        "prec_6|14|17\n"
    )
    fd.close()

    n2 = grass.read_command("g.tempfile", pid=2, flags='d').strip()
    fd = open(n2, 'w')
    fd.write(
        "temp_1|5|6\n"
        "temp_2|6|7\n"
        "temp_3|7|10\n"
        "temp_4|10|11\n"
        "temp_5|11|18\n"
        "temp_6|19|22\n"
    )
    fd.close()
    name1 = 'relinterval1'
    name2 = 'relinterval2'
    grass.run_command('t.unregister', type='rast',
                      maps='prec_1,prec_2,prec_3,prec_4,prec_5,prec_6,'
                      'temp_1,temp_2,temp_3,temp_4,temp_5,temp_6')
    for name, fname in zip((name1, name2), (n1, n2)):
        grass.run_command('t.create', overwrite=True, type='strds',
                          temporaltype='relative', output=name,
                          title="A test with input files", descr="A test with input files")
        grass.run_command('t.register', flags='i', input=name, file=fname, unit="years", overwrite=True)
    return name1, name2
예제 #12
0
def test_tp(filename, link=False, outdir=None):
    """!Test VFK file using GRASS GIS

    Print geometry test report to stdout
    
    @return True on success False on failure
    """
    print "\nSimple Features Test (GRASS-OGR %s):" % ("link" if link else "import")
    print '-' * 80
    layers = []
    for row in grass.read_command('v.external', flags='t', dsn=filename, quiet=True, stderr = None).splitlines():
        layers.append(row.split(','))

    if link:
        gmodule = 'v.external'
    else:
        gmodule = 'v.in.ogr'
    
    if not outdir:
        outdir = os.path.dirname(filename)
    
    for layer in layers:
        if layer[1] == 'none':
            continue

        # import or link layer
        ret = grass.read_command(gmodule, dsn=filename, layer=layer[0], overwrite=True, quiet=True, stderr=grass.STDOUT).splitlines()

        # generate graphics output
        image_path = os.path.join(outdir, layer[0] + '.png')
        grass.run_command('d.mon', start = 'cairo', output = image_path, overwrite=True)
        grass.run_command('g.region', vect = layer[0])
        grass.run_command('d.erase')
        grass.run_command('d.vect', map = layer[0], quiet = True)
        grass.run_command('d.text', text = layer[0], at = '1,95', color = 'black')
        grass.run_command('d.mon', stop = 'cairo')
        nempty = 0
        for line in ret:
            if 'without geometry' in line:
                nempty += int(line.split(' ')[1])
        
        vinfo = grass.vector_info_topo(layer[0])
        if layer[1] == 'polygon':
            nfeat = vinfo['areas']
        elif layer[1] == 'linestring':
            nfeat = vinfo['lines']
        else:
            nfeat = vinfo['points']
        
        report_geom(layer[0], layer[1], 0, nempty, nfeat + nempty)
    
    print '-' * 80
예제 #13
0
def output_headers(river, xsections, outfile):
	""" 
	Prepare the output sdf file, and add header section
	"""
	# Start header section
	ver=grass.read_command('g.version')
	dt=str(datetime.date.today())

	outfile.write("# RAS geometry file create on: "+dt+"\n")
	outfile.write("# exported from GRASS GIS version: "+ver+"\n\n")
	outfile.write("BEGIN HEADER:\n")
	proj=grass.read_command('g.proj',flags="g")
	d=grass.parse_key_val(proj)
	if d['units'] == "metres":
		units="METRIC"
	elif d['units'] == "feet":
		units="US CUSTOMARY"
	else:
		units=""

	outfile.write(" UNITS: "+ units + "\n")
	outfile.write(" DTM TYPE: GRID\n")
	outfile.write(" STREAM LAYER: "+ river +"\n")
	outfile.write(" CROSS-SECTION LAYER: "+ xsections +"\n")
	
	# write out the extents
	info = grass.read_command('v.info', map=river, flags="g")
	d=grass.parse_key_val(info)
	xmin=d['west']
	xmax=d['east']
	ymin=d['south']
	ymax=d['north']
	outfile.write(" BEGIN SPATIALEXTENT: \n")
	outfile.write("   Xmin: "+ xmin +"\n")
	outfile.write("   Xmax: "+ xmax +"\n")
	outfile.write("   Ymin: "+ ymin +"\n")
	outfile.write("   Ymax: "+ ymax +"\n")	
	outfile.write(" END SPATIALEXTENT: \n")

	# write out how many reaches and cross sections
	info = grass.read_command('v.info', map=river, flags="t")
	d = grass.parse_key_val(info)
	num_reaches=d['lines']
	outfile.write(" NUMBER OF REACHES: "+ num_reaches +"\n")
	

	info = grass.read_command('v.info', map=xsections, flags="t")
	d=grass.parse_key_val(info)
	num_xsects=d['lines']
	outfile.write(" NUMBER OF CROSS-SECTIONS: "+ num_xsects +"\n")

	outfile.write("END HEADER:\n\n")
def rulesreclass(mapa,dirs):
  grass.run_command('g.region',rast=mapa)
  x=grass.read_command('r.stats',flags='a',input=mapa)
  #print x
  
  #t=grass.read_command('r.stats',flags='a',input='buffers_10000_MERGE_id2_0_clipMapa_tif_sum2')
  y=x.split('\n')
  #print y
  os.chdir(dirs)
  txtsaida=mapa+'_rules.txt'
  txtreclass=open(mapa+'_rules.txt','w')
  
 
  
  if y!=0:
    for i in y:
          if i !='':
                ##print i
                f=i.split(' ')
          if '*' in f or 'L' in f :
                break
          else:
                ##print f 
                ids=f[0]
                ids=int(ids)
                ##print ids
                ha=f[1]
                ha=float(ha)
                haint=float(round(ha))
                
                ##print haint
                haint2=haint/10000+1
                txtreclass.write(`ids`+'='+`haint2`+ '\n')
    txtreclass.close()      
  return txtsaida
def createtxtED(mapa,dirs):
  x=grass.read_command('r.stats',flags='a',input=mapa)
  
  y=x.split('\n')
  os.chdir(dirs)
  txtsaida=mapa+'PCT_Borda.txt'
  txtreclass=open(mapa+'_EDGE.txt','w')
  txtreclass.write('COD'',''HA\n')
  if y!=0:
    for i in y:
      if i !='':
        ##print i
        f=i.split(' ')
        if '*' in f :
          break
        else:
          ##print f
          ids=f[0]
          ids=int(ids)
          ##print ids
          ha=f[1]
          ha=float(ha)
          haint=float(ha)
          haint=haint/10000+1
          ##print haint
          
          ##print haint
          
          txtreclass.write(`ids`+','+`haint`+'\n')
  txtreclass.close()
def escala_frag(mapa,esc):
  esclist=esc.split(',')
  
   
  res=grass.read_command('g.region',rast=mapa,flags='m')
  res2=res.split('\n')
  res3=res2[5]
  res3=float(res3.replace('ewres=',''))   
  listasizefinal=[]
  listametersfinal=[]
  for i in esclist:
    esc=int(i)
    escfina1=esc/res3
    escfinaMeters=esc/2
    escfina1=int(round(escfina1, ndigits=0))
    print 
    if escfina1%2==0:
      escfina1=int(escfina1)
      escfina1=escfina1+1
      listasizefinal.append(escfina1)
      listametersfinal.append(esc)
    else:
      escfina1=int(round(escfina1, ndigits=0))
      listasizefinal.append(escfina1)
      listametersfinal.append(esc)      
  return listasizefinal,listametersfinal
예제 #17
0
def setCPRJ(map):
    center = []
    info_region = grass.read_command('g.region', flags = 'ael', rast = '%s' % (map)) 
    dict_region = grass.parse_key_val(info_region, ':')
    lon = dict_region['center longitude']	
    lat = dict_region['center latitude']
    lon = str(lon)
    lat = str(lat)
    lon = lon.replace(':', " ")
    lat = lat.replace(':', " ")
    if lat[-1] == 'N':
        signlat = 1
    if lat[-1] == 'S':
        signlat = -1
    if lon[-1] == 'E':
        signlon = 1
    if lon[-1] == 'W':
        signlon = -1
    lat = lat[:-1] 
    lon = lon[:-1]
    lat = [float(i) for i in lat.split()]
    lon = [float(i) for i in lon.split()]
    lat = (lat[0] + (lat[1] / 60) + lat[2] / 3600) * float(signlat)
    lon = (lon[0] + (lon[1] / 60) + lon[2] / 3600) * float(signlon)
    ns = float(dict_region['north-south extent'])
    we = float(dict_region['east-west extent'])
    distance = (ns + we) / 2
    center.append(lat)
    center.append(lon)
    center.append(distance)
    return center
예제 #18
0
    def _getRegionParams(self,opt_region):
        """!Get region parameters from region specified or active default region

        @return region_params as a dictionary
        """
        self._debug("_getRegionParameters", "started")

        if opt_region:
            reg_spl = opt_region.strip().split('@', 1)
            reg_mapset = '.'
            if len(reg_spl) > 1:
                reg_mapset = reg_spl[1]

            if not gscript.find_file(name = reg_spl[0], element = 'windows',
                                   mapset = reg_mapset)['name']:
                 gscript.fatal(_("Region <%s> not found") % opt_region)

        if opt_region:
            s = gscript.read_command('g.region',
                                    quiet = True,
                                    flags = 'ug',
                                    region = opt_region)
            region_params = gscript.parse_key_val(s, val_type = float)
            gscript.verbose("Using region parameters for region %s" %opt_region)
        else:
            region_params = gscript.region()
            gscript.verbose("Using current grass region")

        self._debug("_getRegionParameters", "finished")
        return region_params
예제 #19
0
파일: ghelp.py 프로젝트: GRASS-GIS/grass-ci
    def _pageCitation(self):
        """Citation information"""
        try:
            # import only when needed
            import grass.script as gscript
            text = gscript.read_command('g.version', flags='x')
        except CalledModuleError as error:
            text = _("Unable to provide citation suggestion,"
                     " see GRASS GIS website instead."
                     " The error was: {}").format(error)

        # put text into a scrolling panel
        window = ScrolledPanel(self.aboutNotebook)
        stat_text = wx.TextCtrl(
            window, id=wx.ID_ANY, value=text,
        style=wx.TE_MULTILINE | wx.TE_READONLY)
        window.SetAutoLayout(True)
        window.sizer = wx.BoxSizer(wx.VERTICAL)
        window.sizer.Add(item=stat_text, proportion=1,
                         flag=wx.EXPAND | wx.ALL, border=3)
        window.SetSizer(window.sizer)
        window.Layout()
        window.SetupScrolling()

        return window
예제 #20
0
def create_db(driver, database):
    subst_database = substitute_db(database)
    if driver == 'dbf':
        path = subst_database
        # check if destination directory exists
        if not os.path.isdir(path):
	    # create dbf database
            os.makedirs(path)
	    return True
        return False
    
    if driver == 'sqlite':
        path = os.path.dirname(subst_database)
        # check if destination directory exists
        if not os.path.isdir(path):
            os.makedirs(path)
    
    if subst_database in grass.read_command('db.databases', quiet = True,
                                      driver = driver).splitlines():
        return False

    grass.info(_("Target database doesn't exist, "
                 "creating a new database using <%s> driver...") % driver)
    try:
        grass.run_command('db.createdb', driver = driver,
                          database = subst_database)
    except CalledModuleError:
        grass.fatal(_("Unable to create database <%s> by driver <%s>") % \
                        (subst_database, driver))
        
    return False
def txt(mapa,txtname,folder):
    grass.run_command('g.region',rast=mapa)
    os.chdir(r'E:\data_2015\___john\001.Thalita_p2\__Resultados_metricas_parte1')
    os.chdir(folder)
    x=grass.read_command('r.stats',flags='a',input=mapa)
    y=x.split('\n')
    
    listapoio=[]
    for i in y:
        if ('*' in i):
            continue
        else:
            listapoio.append(i)
    del listapoio[-1]
    
    fd = open(txtname,'w')
    myCsvRow="Cod"",""AreaM2" ",""Area_ha\n"
    fd.write(myCsvRow)
    for i in listapoio:
        temp1=i.split(' ')
        cod=int(temp1[0])
        aream2=float(temp1[1])
        area_HA=round(aream2/10000,2)
        fd.write(`cod`+','+`aream2`+','+`area_HA`+'\n')
    fd.close()
예제 #22
0
def main():

    # Get the options
    input = options["input"]
    where = options["where"]
    columns = options["columns"]
    tempwhere = options["t_where"]
    layer = options["layer"]
    separator = grass.separator(options["separator"])

    if where == "" or where == " " or where == "\n":
        where = None

    if columns == "" or columns == " " or columns == "\n":
        columns = None

    # Make sure the temporal database exists
    tgis.init()

    sp = tgis.open_old_stds(input, "stvds")

    rows = sp.get_registered_maps("name,layer,mapset,start_time,end_time",
                                  tempwhere, "start_time", None)

    col_names = ""
    if rows:
        for row in rows:
            vector_name = "%s@%s" % (row["name"], row["mapset"])
            # In case a layer is defined in the vector dataset,
            # we override the option layer
            if row["layer"]:
                layer = row["layer"]

            select = grass.read_command("v.db.select", map=vector_name,
                                        layer=layer, columns=columns,
                                        separator="%s" % (separator), where=where)

            if not select:
                grass.fatal(_("Unable to run v.db.select for vector map <%s> "
                              "with layer %s") % (vector_name, layer))
            # The first line are the column names
            list = select.split("\n")
            count = 0
            for entry in list:
                if entry.strip() != "":
                    # print the column names in case they change
                    if count == 0:
                        col_names_new = "start_time%send_time%s%s" % (
                            separator, separator, entry)
                        if col_names != col_names_new:
                            col_names = col_names_new
                            print col_names
                    else:
                        if row["end_time"]:
                            print "%s%s%s%s%s" % (row["start_time"], separator,
                                                  row["end_time"], separator, entry)
                        else:
                            print "%s%s%s%s" % (row["start_time"],
                                                separator, separator, entry)
                    count += 1
예제 #23
0
def createRelativePoint():
    grass.run_command('g.region', s=0, n=80, w=0, e=120, b=0, t=50, res=10, res3=10,
                      flags='p3', quiet=True)

    grass.mapcalc(exp="prec_1 = rand(0, 550)", overwrite=True)
    grass.mapcalc(exp="prec_2 = rand(0, 450)", overwrite=True)
    grass.mapcalc(exp="prec_3 = rand(0, 320)", overwrite=True)
    grass.mapcalc(exp="prec_4 = rand(0, 510)", overwrite=True)
    grass.mapcalc(exp="prec_5 = rand(0, 300)", overwrite=True)
    grass.mapcalc(exp="prec_6 = rand(0, 650)", overwrite=True)

    n1 = grass.read_command("g.tempfile", pid=1, flags='d').strip()
    fd = open(n1, 'w')
    fd.write(
        "prec_1|1\n"
        "prec_2|3\n"
        "prec_3|5\n"
        "prec_4|7\n"
        "prec_5|11\n"
        "prec_6|13\n"
    )
    fd.close()
    name = 'relpoint'
    grass.run_command('t.create', overwrite=True, type='strds',
                      temporaltype='relative', output=name,
                      title="A test with input files", descr="A test with input files")

    grass.run_command('t.register', unit="day", input=name, file=n1, overwrite=True)
    return name
def createtxtED(mapa,txtname,folder):
  x=grass.read_command('r.stats',flags='a',input=mapa)
  y=x.split('\n')
  os.chdir(r'C:\_data\talitha\Mapas_classificados_final\saidas_grass\saidas_2015_03_d11')
  os.chdir(folder)
  txtreclass=open(txtname,'w')
  txtreclass.write('COD'',''Classe'',''Area_HA\n')
  nomes=['Matrix','EDGE','Core']
  c=0
  print y
  if y!=0:
  
    for i in y:
      if i !='':
        ##print i
        f=i.split(' ')
        if '*' in f :
          break
        else:
          ##print f
          ids=f[0]
          ids=int(ids)
          ##print ids
          ha=f[1]
          ha=float(ha)
          haint=int(ha)
          haint=haint/10000+1
          ##print haint
          
          ##print haint
          txtreclass.write(`ids`+","+nomes[c]+','+`haint`+'\n')
          c=c+1
    
    txtreclass.close()
예제 #25
0
def filter(method, names, winsize, order, prefix, itercount, fit_up):

    current_mapset = grass.read_command("g.mapset", flags="p")
    current_mapset = current_mapset.strip()

    inputs = init_rasters(names)
    output_names = [prefix + name for name in names]
    outputs = init_rasters(output_names, mapset=current_mapset)
    try:
        open_rasters(outputs, write=True)
        open_rasters(inputs)

        reg = Region()
        for i in range(reg.rows):
            # import ipdb; ipdb.set_trace()
            row_data = np.array([_get_row_or_nan(r, i) for r in inputs])
            filtered_rows = _filter(method, row_data, winsize, order, itercount, fit_up)
            for map_num in range(len(outputs)):
                map = outputs[map_num]
                row = filtered_rows[map_num, :]
                buf = Buffer(row.shape, map.mtype, row)
                map.put_row(i, buf)
    finally:
        close_rasters(outputs)
        close_rasters(inputs)
예제 #26
0
def function(elem):
 #print elem
 mymapset = 'm'+str(elem)
 grass.run_command('g.mapset',mapset=mymapset,loc=MYLOC,flags='c')
 spn0= str(GRASSDBASE)+'/'+str(MYLOC)+'/'+str(mymapset)+'WIND'
 print elem+' '+spn0
 checkit = os.path.isfile(spn0)
 while checkit == False:
  time.sleep(0.1)
 else:
  gge = grass.gisenv()
  spn= str(GRASSDBASE)+'/'+str(MYLOC)+'/'+str(mymapset)+'/SEARCH_PATH'
  wb = open(spn,'a')
  wb.write('PERMANENT')
  wb.write('\n') 
  wb.write('user1')
  wb.write('\n')
  wb.write(str(mymapset))
  wb.write('\n')
  wb.close()
  pa0 = 's'+str(elem)
  comm2 = 'cat = '+str(elem)
  grass.run_command('g.region',rast='elevation')
  grass.run_command('g.region',res=elem)
  varx = grass.read_command ('g.region',flags='g'). splitlines ()
  wb = open('results.txt','a')
  var = str(elem)+' '+str(gge)+' '+str(varx)
  wb.write(var)
  wb.write('\n')
  wb.close()
  elem=None
  mymapset=None
def txt(mapa,txtname,folder):
    grass.run_command('g.region',rast=mapa)
    os.chdir(r'C:\_data\talitha\Mapas_classificados_final\saidas_grass\saidas_2015_03_d11')
    os.chdir(folder)
    x=grass.read_command('r.stats',flags='a',input=mapa)
    y=x.split('\n')
    
    listapoio=[]
    for i in y:
        if ('*' in i):
            continue
        else:
            listapoio.append(i)
    del listapoio[-1]
    
    fd = open(txtname,'w')
    myCsvRow="cod"",""areaM2" ",""Area_ha\n"
    fd.write(myCsvRow)
    for i in listapoio:
        temp1=i.split(' ')
        cod=int(temp1[0])
    
        aream2=float(temp1[1])
        area_HA=round(aream2/10000,2)+1
        fd.write(`cod`+','+`aream2`+','+`area_HA`+'\n')
    fd.close()
def createtxtED(mapa):
    pct_edge=0
    grass.run_command('g.region',rast=mapa)
    x=grass.read_command('r.stats',flags='a',input=mapa)
    y=x.split('\n')
    os.chdir(outputfolder)
    nome=mapa.replace("extracByMask_rast_imgbin_eroED_50m_EDGE_FINAL",'')
    
    txtreclass=open(nome+'PCT_EDGE.txt','w')
    txtreclass.write('class'',''COD'',''A_M2'',''PCT\n')
    classe=['Matrix','EDGE','CORE']
    cont_class=0
    #print y
    del y[-1]
    del y[-1]
   # print y
    if y!=0:
        acumula=0
        for i in y:
            split=i.split(' ')
            split=float(split[1]) 
            acumula=acumula+split    
            #print acumula
        for i in y:
            if i !='':
                  
              ##print i
                f=i.split(' ')
                if '*' in f :
                    break
              
                else:
                ##print f
                    ids=f[0]
                    ids=int(ids)
                    ##print ids
                    m2=f[1]
                    m2=float(m2)                
                    pct=m2/acumula*100
                    pct=round(pct,2)                   
                    txtreclass.write(classe[cont_class]+','+`ids`+','+`m2`+','+`pct`+'\n')
                    cont_class=cont_class+1
                # indice de matheron
                
                if ids==1:
                    pct_edge=m2/acumula*100
                    pct_edge=round(pct_edge,2)
                if ids==2:
                    pctflt=m2/acumula*100
                    pctflt=round(pctflt,2)
                    txt_Matheron=open(nome+'_Matheron.txt','w')
                    if pct_edge>0:
                        txt_Matheron.write('Matheron\n')
                        #pct de edge por pct de flt
                        Matheron=pct_edge/pctflt
                        txt_Matheron.write(`Matheron`)
                    txt_Matheron.close()
                        
                    
        txtreclass.close()
예제 #29
0
def AddCol(vect,t):
    list_c = []
    list_col = ((grass.read_command("db.describe",table=vect,flags="c",quiet=True)).split("\n"))[2:-1]
    for c in list_col:
        list_c.append((c.split(":")[1]).lstrip())
    if not "%s"%t in list_c:
        grass.run_command("v.db.addcolumn",map=vect,columns="%s double"%t,quiet=True)
예제 #30
0
파일: pGrassRaster.py 프로젝트: timoco/pgis
 def rasterReport(self,inRast):
     '''
         Run r.report GRASS function.
         INPUT: inRast 
         OUTPUT: raster report
     '''
     return grass.read_command(self.__rReport,flags='-q',map=inRast)
예제 #31
0
def BuildFileISF(attributes, preferences, decision, outputMap, outputTxt):
    outputTxt = outputTxt + ".isf"
    outf = file(outputTxt, "w")
    outf.write("**ATTRIBUTES\n")
    for i in range(len(attributes)):
        outf.write("+ %s: (continuous)\n" % attributes[i])
    outf.write("+ %s: [" % decision)
    value = []
    value = grass.read_command("r.describe", flags="1n", map=decision)
    v = value.split()

    for i in range(len(v) - 1):
        outf.write("%s, " % str(v[i]))
    outf.write("%s]\n" % str(v[len(v) - 1]))
    outf.write("decision: %s\n" % decision)

    outf.write("\n**PREFERENCES\n")
    for i in range(len(attributes)):
        if (preferences[i] == ""):
            preferences[i] = "none"
        outf.write("%s: %s\n" % (attributes[i], preferences[i]))
    outf.write("%s: gain\n" % decision)

    if flags['n']:
        for i in range(len(attributes)):
            print "%s - convert null to 0" % str(attributes[i])
            grass.run_command("r.null", map=attributes[i], null=0)

    outf.write("\n**EXAMPLES\n")
    examples = []
    MATRIX = []

    for i in range(len(attributes)):
        grass.mapcalc("rast=if(isnull(${decision})==0,${attribute},null())",
                      rast="rast",
                      decision=decision,
                      attribute=attributes[i])
        tmp = grass.read_command("r.stats", flags="1n", nv="?", input="rast")
        example = tmp.split()
        examples.append(example)
    tmp = grass.read_command("r.stats", flags="1n", nv="?", input=decision)
    example = tmp.split()
    examples.append(example)

    MATRIX = map(list, zip(*examples))
    MATRIX = [r for r in MATRIX
              if not '?' in r]  #remove all rows with almost one "?"
    MATRIX = [list(i) for i in set(tuple(j)
                                   for j in MATRIX)]  #remove duplicate example

    for r in range(len(MATRIX)):
        for c in range(len(MATRIX[0])):
            outf.write("%s " % (MATRIX[r][c]))


#			outf.write("%s " % round(float(MATRIX[r][c]), 2))
        outf.write("\n")

    outf.write("**END")
    outf.close()
    return outputTxt
예제 #32
0
def main():
    repeat = int(options.pop('repeat'))
    nprocs = int(options.pop('nprocs'))
    subregions = options['subregions']
    tosplit = flags['d']
    # filter unused optional params
    for key in options.keys():
        if options[key] == '':
            options.pop(key)
    if tosplit and options['output_series']:
        gscript.fatal(_("Parallelization on subregion level is not supported together with <output_series> option"))

    if not gscript.overwrite() and gscript.list_grouped('raster', pattern=options['output'] + '_run1')[gscript.gisenv()['MAPSET']]:
        gscript.fatal(_("Raster map <{r}> already exists."
                     " To overwrite, use the --overwrite flag").format(r=options['output'] + '_run_1'))
    global TMP_RASTERS
    cats = []
    if tosplit:
        gscript.message(_("Splitting subregions"))
        cats = gscript.read_command('r.stats', flags='n', input=subregions).strip().splitlines()
        if len(cats) < 2:
            gscript.fatal(_("Not enough subregions to split computation. Do not use -d flag."))
        mapcalcs = []
        for cat in cats:
            new = PREFIX + cat
            TMP_RASTERS.append(new)
            mapcalcs.append('{new} = if({sub} == {cat}, {sub}, null())'.format(sub=subregions, cat=cat, new=new))
        pool = Pool(nprocs)
        p = pool.map_async(split_subregions, mapcalcs)
        try:
            p.get()
        except (KeyboardInterrupt, CalledModuleError):
            return

    options_list = []
    for i in range(repeat):
        if cats:
            for cat in cats:
                op = options.copy()
                op['random_seed'] = i + 1
                if 'output_series' in op:
                    op['output_series'] += '_run' + str(i + 1) + '_' + cat
                    TMP_RASTERS.append(op['output_series'])
                op['output'] += '_run' + str(i + 1) + '_' + cat
                op['subregions'] = PREFIX + cat
                options_list.append((repeat, i + 1, cat, op))
                TMP_RASTERS.append(op['output'])
        else:
            op = options.copy()
            op['random_seed'] = i + 1
            if 'output_series' in op:
                op['output_series'] += '_run' + str(i + 1)
            op['output'] += '_run' + str(i + 1)
            options_list.append((repeat, i + 1, None, op))

    pool = Pool(nprocs)
    p = pool.map_async(futures_process, options_list)
    try:
        p.get()
    except (KeyboardInterrupt, CalledModuleError):
        return

    if cats:
        gscript.message(_("Patching subregions"))
        for i in range(repeat):
            patch_input = [options['output'] + '_run' + str(i + 1) + '_' + cat for cat in cats]
            gscript.run_command('r.patch', input=patch_input, output=options['output'] + '_run' + str(i + 1))

    return 0
예제 #33
0
def main():

    global allmap
    global trainmap
    global feature_vars
    global training_vars
    global model_output_csv
    global model_output_csvt
    global temptable
    global r_commands
    global reclass_files

    allmap = trainmap = feature_vars = training_vars = None
    model_output_csv = model_output_csvt = temptable = r_commands = None
    reclass_files = None

    voting_function = "voting <- function (x, w) {\n"
    voting_function += "res <- tapply(w, x, sum, simplify = TRUE)\n"
    voting_function += "maj_class <- as.numeric(names(res)[which.max(res)])\n"
    voting_function += "prob <- as.numeric(res[which.max(res)])\n"
    voting_function += "return(list(maj_class=maj_class, prob=prob))\n}"

    weighting_functions = {}
    weighting_functions[
        'smv'] = "weights <- rep(1/length(weighting_base), length(weighting_base))"
    weighting_functions[
        'swv'] = "weights <- weighting_base/sum(weighting_base)"
    weighting_functions[
        'bwwv'] = "weights <- 1-(max(weighting_base) - weighting_base)/(max(weighting_base) - min(weighting_base))"
    weighting_functions[
        'qbwwv'] = "weights <- ((min(weighting_base) - weighting_base)/(max(weighting_base) - min(weighting_base)))**2"

    packages = {
        'svmRadial': ['kernlab'],
        'svmLinear': ['kernlab'],
        'svmPoly': ['kernlab'],
        'rf': ['randomForest'],
        'ranger': ['ranger', 'dplyr'],
        'rpart': ['rpart'],
        'C5.0': ['C50'],
        'xgbTree': ['xgboost', 'plyr']
    }

    install_package = "if(!is.element('%s', installed.packages()[,1])){\n"
    install_package += "cat('\\n\\nInstalling %s package from CRAN')\n"
    install_package += "if(!file.exists(Sys.getenv('R_LIBS_USER'))){\n"
    install_package += "dir.create(Sys.getenv('R_LIBS_USER'), recursive=TRUE)\n"
    install_package += ".libPaths(Sys.getenv('R_LIBS_USER'))}\n"
    install_package += "chooseCRANmirror(ind=1)\n"
    install_package += "install.packages('%s', dependencies=TRUE)}"

    if options['segments_map']:
        allfeatures = options['segments_map']
        segments_layer = options['segments_layer']
        allmap = True
    else:
        allfeatures = options['segments_file']
        allmap = False

    if options['training_map']:
        training = options['training_map']
        training_layer = options['training_layer']
        trainmap = True
    else:
        training = options['training_file']
        trainmap = False

    classcol = None
    if options['train_class_column']:
        classcol = options['train_class_column']
    output_classcol = options['output_class_column']
    output_probcol = None
    if options['output_prob_column']:
        output_probcol = options['output_prob_column']
    classifiers = options['classifiers'].split(',')
    weighting_modes = options['weighting_modes'].split(',')
    weighting_metric = options['weighting_metric']
    if len(classifiers) == 1:
        gscript.message('Only one classifier, so no voting applied')

    processes = int(options['processes'])
    folds = options['folds']
    partitions = options['partitions']
    tunelength = options['tunelength']
    separator = gscript.separator(options['separator'])
    tunegrids = literal_eval(
        options['tunegrids']) if options['tunegrids'] else {}

    max_features = None
    if options['max_features']:
        max_features = int(options['max_features'])

    training_sample_size = None
    if options['training_sample_size']:
        training_sample_size = options['training_sample_size']

    tuning_sample_size = None
    if options['tuning_sample_size']:
        tuning_sample_size = options['tuning_sample_size']

    output_model_file = None
    if options['output_model_file']:
        output_model_file = options['output_model_file'].replace("\\", "/")

    input_model_file = None
    if options['input_model_file']:
        input_model_file = options['input_model_file'].replace("\\", "/")

    classification_results = None
    if options['classification_results']:
        classification_results = options['classification_results'].replace(
            "\\", "/")

    probabilities = flags['p']

    model_details = None
    if options['model_details']:
        model_details = options['model_details'].replace("\\", "/")

    raster_segments_map = None
    if options['raster_segments_map']:
        raster_segments_map = options['raster_segments_map']

    classified_map = None
    if options['classified_map']:
        classified_map = options['classified_map']

    r_script_file = None
    if options['r_script_file']:
        r_script_file = options['r_script_file']

    variable_importance_file = None
    if options['variable_importance_file']:
        variable_importance_file = options['variable_importance_file'].replace(
            "\\", "/")

    accuracy_file = None
    if options['accuracy_file']:
        accuracy_file = options['accuracy_file'].replace("\\", "/")

    bw_plot_file = None
    if options['bw_plot_file']:
        bw_plot_file = options['bw_plot_file'].replace("\\", "/")

    if allmap:
        feature_vars = gscript.tempfile().replace("\\", "/")
        gscript.run_command('v.db.select',
                            map_=allfeatures,
                            file_=feature_vars,
                            layer=segments_layer,
                            quiet=True,
                            overwrite=True)
    else:
        feature_vars = allfeatures.replace("\\", "/")

    if trainmap:
        training_vars = gscript.tempfile().replace("\\", "/")
        gscript.run_command('v.db.select',
                            map_=training,
                            file_=training_vars,
                            layer=training_layer,
                            quiet=True,
                            overwrite=True)
    else:
        training_vars = training.replace("\\", "/")

    r_commands = gscript.tempfile().replace("\\", "/")

    r_file = open(r_commands, 'w')

    if processes > 1:
        install = install_package % ('doParallel', 'doParallel', 'doParallel')
        r_file.write(install)
        r_file.write("\n")

    # automatic installation of missing R packages
    install = install_package % ('caret', 'caret', 'caret')
    r_file.write(install)
    r_file.write("\n")
    install = install_package % ('e1071', 'e1071', 'e1071')
    r_file.write(install)
    r_file.write("\n")
    install = install_package % ('data.table', 'data.table', 'data.table')
    r_file.write(install)
    r_file.write("\n")
    for classifier in classifiers:
        if classifier in packages:
            for package in packages[classifier]:
                install = install_package % (package, package, package)
                r_file.write(install)
                r_file.write("\n")
    r_file.write("\n")
    r_file.write('library(caret)')
    r_file.write("\n")
    r_file.write('library(data.table)')
    r_file.write("\n")

    if processes > 1:
        r_file.write("library(doParallel)")
        r_file.write("\n")
        r_file.write("registerDoParallel(cores = %d)" % processes)
        r_file.write("\n")

    if not flags['t']:
        r_file.write(
            "features <- data.frame(fread('%s', sep='%s', header=TRUE, blank.lines.skip=TRUE, showProgress=FALSE), row.names=1)"
            % (feature_vars, separator))
        r_file.write("\n")
        if classcol:
            r_file.write(
                "if('%s' %%in%% names(features)) {features <- subset(features, select=-%s)}"
                % (classcol, classcol))
            r_file.write("\n")

    if input_model_file:
        r_file.write("finalModels <- readRDS('%s')" % input_model_file)
        r_file.write("\n")
        for classifier in classifiers:
            for package in packages[classifier]:
                r_file.write("library(%s)" % package)
                r_file.write("\n")
    else:
        r_file.write(
            "training <- data.frame(fread('%s', sep='%s', header=TRUE, blank.lines.skip=TRUE, showProgress=FALSE))"
            % (training_vars, separator))
        r_file.write("\n")
        # We have to make sure that class variable values start with a letter as
        # they will be used as variables in the probabilities calculation
        r_file.write("origclassnames <- training$%s" % classcol)
        r_file.write("\n")
        r_file.write(
            "training$%s <- as.factor(paste('class', training$%s, sep='_'))" %
            (classcol, classcol))
        r_file.write("\n")
        if tuning_sample_size:
            r_file.write(
                "rndid <- with(training, ave(training[,1], %s, FUN=function(x) {sample.int(length(x))}))"
                % classcol)
            r_file.write("\n")
            r_file.write("tuning_data <- training[rndid<=%s,]" %
                         tuning_sample_size)
            r_file.write("\n")
        else:
            r_file.write("tuning_data <- training")
            r_file.write("\n")
        # If a max_features value is set, then proceed to feature selection.
        # Currently, feature selection uses random forest. TODO: specific feature selection for each classifier.
        if max_features:
            r_file.write(
                "RfeControl <- rfeControl(functions=rfFuncs, method='cv', number=10, returnResamp = 'all')"
            )
            r_file.write("\n")
            r_file.write(
                "RfeResults <- rfe(subset(tuning_data, select=-%s), tuning_data$%s, sizes=c(1:%i), rfeControl=RfeControl)"
                % (classcol, classcol, max_features))
            r_file.write("\n")
            r_file.write("if(length(predictors(RfeResults))>%s)" %
                         max_features)
            r_file.write("\n")
            r_file.write(
                "{if((RfeResults$results$Accuracy[%s+1] - RfeResults$results$Accuracy[%s])/RfeResults$results$Accuracy[%s] < 0.03)"
                % (max_features, max_features, max_features))
            r_file.write("\n")
            r_file.write(
                "{RfeUpdate <- update(RfeResults, subset(tuning_data, select=-%s), tuning_data$%s, size=%s)"
                % (classcol, classcol, max_features))
            r_file.write("\n")
            r_file.write("bestPredictors <- RfeUpdate$bestVar}}")
            r_file.write(" else {")
            r_file.write("\n")
            r_file.write("bestPredictors <- predictors(RfeResults)}")
            r_file.write("\n")
            r_file.write(
                "tuning_data <- tuning_data[,c('%s', bestPredictors)]" %
                classcol)
            r_file.write("\n")
            r_file.write("training <- training[,c('%s', bestPredictors)]" %
                         classcol)
            r_file.write("\n")
            if not flags['t']:
                r_file.write("features <- features[,bestPredictors]")
                r_file.write("\n")
        if probabilities:
            r_file.write(
                "MyControl.cv <- trainControl(method='repeatedcv', number=%s, repeats=%s, classProbs=TRUE, sampling='down')"
                % (folds, partitions))
        else:
            r_file.write(
                "MyControl.cv <- trainControl(method='repeatedcv', number=%s, repeats=%s, sampling='down')"
                % (folds, partitions))
        r_file.write("\n")
        r_file.write("fmla <- %s ~ ." % classcol)
        r_file.write("\n")
        r_file.write("models.cv <- list()")
        r_file.write("\n")
        r_file.write("finalModels <- list()")
        r_file.write("\n")
        r_file.write("variableImportance <- list()")
        r_file.write("\n")
        if training_sample_size:
            r_file.write(
                "rndid <- with(training, ave(training[,2], %s, FUN=function(x) {sample.int(length(x))}))"
                % classcol)
            r_file.write("\n")
            r_file.write("training_data <- training[rndid<=%s,]" %
                         training_sample_size)
            r_file.write("\n")
        else:
            r_file.write("training_data <- training")
            r_file.write("\n")
        for classifier in classifiers:
            if classifier in tunegrids:
                r_file.write("Grid <- expand.grid(%s)" % tunegrids[classifier])
                r_file.write("\n")
                r_file.write(
                    "%sModel.cv <- train(fmla, tuning_data, method='%s', trControl=MyControl.cv, tuneGrid=Grid"
                    % (classifier, classifier))
            else:
                r_file.write(
                    "%sModel.cv <- train(fmla, tuning_data, method='%s', trControl=MyControl.cv, tuneLength=%s"
                    % (classifier, classifier, tunelength))
            if flags['n']:
                r_file.write(", preprocess=c('center', 'scale')")
            r_file.write(")")
            r_file.write("\n")
            r_file.write("models.cv$%s <- %sModel.cv" %
                         (classifier, classifier))
            r_file.write("\n")
            r_file.write(
                "finalControl <- trainControl(method = 'none', classProbs = TRUE)"
            )
            r_file.write("\n")

            r_file.write(
                "finalModel <- train(fmla, training_data, method='%s', trControl=finalControl, tuneGrid=%sModel.cv$bestTune"
                % (classifier, classifier))
            if flags['n']:
                r_file.write(", preprocess=c('center', 'scale')")
            r_file.write(")")
            r_file.write("\n")
            r_file.write("finalModels$%s <- finalModel" % classifier)
            r_file.write("\n")
            r_file.write("variableImportance$%s <- varImp(finalModel)" %
                         classifier)
            r_file.write("\n")
        if len(classifiers) > 1:
            r_file.write("resamps.cv <- resamples(models.cv)")
            r_file.write("\n")
            r_file.write(
                "accuracy_means <- as.vector(apply(resamps.cv$values[seq(2,length(resamps.cv$values), by=2)], 2, mean))"
            )
            r_file.write("\n")
            r_file.write(
                "kappa_means <- as.vector(apply(resamps.cv$values[seq(3,length(resamps.cv$values), by=2)], 2, mean))"
            )
            r_file.write("\n")
        else:
            r_file.write("resamps.cv <- models.cv[[1]]$resample")
            r_file.write("\n")
            r_file.write("accuracy_means <- mean(resamps.cv$Accuracy)")
            r_file.write("\n")
            r_file.write("kappa_means <- mean(resamps.cv$Kappa)")
            r_file.write("\n")

        if output_model_file:
            r_file.write("saveRDS(finalModels, '%s')" % (output_model_file))
            r_file.write("\n")

    if not flags['t']:
        r_file.write("predicted <- data.frame(predict(finalModels, features))")
        r_file.write("\n")
        # Now erase the 'class_' prefix again in order to get original class values
        r_file.write(
            "predicted <- data.frame(sapply(predicted, function (x) {gsub('class_', '', x)}))"
        )
        r_file.write("\n")
        if probabilities:
            r_file.write(
                "probabilities <- data.frame(predict(finalModels, features, type='prob'))"
            )
            r_file.write("\n")
            r_file.write(
                "colnames(probabilities) <- gsub('.c', '_prob_c', colnames(probabilities))"
            )
            r_file.write("\n")
        r_file.write("ids <- rownames(features)")
        r_file.write("\n")
        # We try to liberate memory space as soon as possible, so erasing non necessary data
        r_file.write("rm(features)")
        r_file.write("\n")
        if flags['i'] or len(classifiers) == 1:
            r_file.write("resultsdf <- data.frame(id=ids, predicted)")
        else:
            r_file.write("resultsdf <- data.frame(id=ids)")
        r_file.write("\n")

        if len(classifiers) > 1:
            r_file.write(voting_function)
            r_file.write("\n")

            if weighting_metric == 'kappa':
                r_file.write("weighting_base <- kappa_means")
            else:
                r_file.write("weighting_base <- accuracy_means")
            r_file.write("\n")
            for weighting_mode in weighting_modes:
                r_file.write(weighting_functions[weighting_mode])
                r_file.write("\n")
                r_file.write("weights <- weights / sum(weights)")
                r_file.write("\n")
                r_file.write("vote <- apply(predicted, 1, voting, w=weights)")
                r_file.write("\n")
                r_file.write(
                    "vote <- as.data.frame(matrix(unlist(vote), ncol=2, byrow=TRUE))"
                )
                r_file.write("\n")
                r_file.write("resultsdf$%s_%s <- vote$V1" %
                             (output_classcol, weighting_mode))
                r_file.write("\n")
                r_file.write("resultsdf$%s_%s <- vote$V2" %
                             (output_probcol, weighting_mode))
                r_file.write("\n")

        r_file.write("rm(predicted)")
        r_file.write("\n")

        if allmap and not flags['f']:
            model_output = gscript.tempfile().replace("\\", "/")
            model_output_csv = model_output + '.csv'
            write_string = "write.csv(resultsdf, '%s'," % model_output_csv
            write_string += " row.names=FALSE, quote=FALSE)"
            r_file.write(write_string)
            r_file.write("\n")

        if classified_map:
            reclass_files = {}
            if len(classifiers) > 1:
                if flags['i']:
                    for classifier in classifiers:
                        tmpfilename = gscript.tempfile()
                        reclass_files[classifier] = tmpfilename.replace(
                            "\\", "/")
                        r_file.write(
                            "tempdf <- data.frame(resultsdf$id, resultsdf$%s)"
                            % (classifier))
                        r_file.write("\n")
                        r_file.write(
                            "reclass <- data.frame(out=apply(tempdf, 1, function(x) paste(x[1],'=', x[2])))"
                        )
                        r_file.write("\n")
                        r_file.write(
                            "write.table(reclass$out, '%s', col.names=FALSE, row.names=FALSE, quote=FALSE)"
                            % reclass_files[classifier])
                        r_file.write("\n")
                for weighting_mode in weighting_modes:
                    tmpfilename = gscript.tempfile()
                    reclass_files[weighting_mode] = tmpfilename.replace(
                        "\\", "/")
                    r_file.write(
                        "tempdf <- data.frame(resultsdf$id, resultsdf$%s_%s)" %
                        (output_classcol, weighting_mode))
                    r_file.write("\n")
                    r_file.write(
                        "reclass <- data.frame(out=apply(tempdf, 1, function(x) paste(x[1],'=', x[2])))"
                    )
                    r_file.write("\n")
                    r_file.write(
                        "write.table(reclass$out, '%s', col.names=FALSE, row.names=FALSE, quote=FALSE)"
                        % reclass_files[weighting_mode])
                    r_file.write("\n")
            else:
                tmpfilename = gscript.tempfile()
                reclass_files[classifiers[0]] = tmpfilename.replace("\\", "/")
                r_file.write(
                    "reclass <- data.frame(out=apply(resultsdf, 1, function(x) paste(x[1],'=', x[2])))"
                )
                r_file.write("\n")
                r_file.write(
                    "write.table(reclass$out, '%s', col.names=FALSE, row.names=FALSE, quote=FALSE)"
                    % reclass_files[classifiers[0]])
                r_file.write("\n")

        if classification_results:
            if probabilities:
                r_file.write("resultsdf <- cbind(resultsdf, probabilities)")
                r_file.write("\n")
                r_file.write("rm(probabilities)")
                r_file.write("\n")
            r_file.write(
                "write.csv(resultsdf, '%s', row.names=FALSE, quote=FALSE)" %
                classification_results)
            r_file.write("\n")
            r_file.write("rm(resultsdf)")
            r_file.write("\n")
        r_file.write("\n")

    if accuracy_file:
        r_file.write(
            "df_means <- data.frame(method=names(models.cv),accuracy=accuracy_means, kappa=kappa_means)"
        )
        r_file.write("\n")
        r_file.write(
            "write.csv(df_means, '%s', row.names=FALSE, quote=FALSE)" %
            accuracy_file)
        r_file.write("\n")
    if variable_importance_file:
        r_file.write("sink('%s')" % variable_importance_file)
        r_file.write("\n")
        for classifier in classifiers:
            r_file.write("cat('Classifier: %s')" % classifier)
            r_file.write("\n")
            r_file.write("cat('******************************')")
            r_file.write("\n")
            r_file.write(
                "variableImportance$rf$importance[order(variableImportance$rf$importance$Overall, decreasing=TRUE),, drop=FALSE]"
            )
            r_file.write("\n")
        r_file.write("sink()")
        r_file.write("\n")
    if model_details:
        r_file.write("sink('%s')" % model_details)
        r_file.write("\n")
        r_file.write("cat('BEST TUNING VALUES')")
        r_file.write("\n")
        r_file.write("cat('******************************')")
        r_file.write("\n")
        r_file.write("\n")
        r_file.write("lapply(models.cv, function(x) x$best)")
        r_file.write("\n")
        r_file.write("cat('\n\n')")
        r_file.write("\n")
        r_file.write("cat('SUMMARY OF RESAMPLING RESULTS')")
        r_file.write("\n")
        r_file.write("cat('******************************')")
        r_file.write("\n")
        r_file.write("cat('\n\n')")
        r_file.write("\n")
        r_file.write("summary(resamps.cv)")
        r_file.write("\n")
        r_file.write("cat('\n')")
        r_file.write("\n")
        r_file.write("cat('\nRESAMPLED CONFUSION MATRICES')")
        r_file.write("\n")
        r_file.write("cat('******************************')")
        r_file.write("\n")
        r_file.write("cat('\n\n')")
        r_file.write("\n")
        r_file.write(
            "conf.mat.cv <- lapply(models.cv, function(x) confusionMatrix(x))")
        r_file.write("\n")
        r_file.write("print(conf.mat.cv)")
        r_file.write("\n")
        r_file.write("cat('DETAILED CV RESULTS')")
        r_file.write("\n")
        r_file.write("cat('\n\n')")
        r_file.write("\n")
        r_file.write("cat('******************************')")
        r_file.write("\n")
        r_file.write("cat('\n\n')")
        r_file.write("\n")
        r_file.write("lapply(models.cv, function(x) x$results)")
        r_file.write("\n")
        r_file.write("sink()")
        r_file.write("\n")

    if bw_plot_file and len(classifiers) > 1:
        r_file.write("png('%s.png')" % bw_plot_file)
        r_file.write("\n")
        r_file.write("print(bwplot(resamps.cv))")
        r_file.write("\n")
        r_file.write("dev.off()")
        r_file.write("\n")

    r_file.close()

    if r_script_file:
        shutil.copy(r_commands, r_script_file)

    gscript.message("Running R now. Following output is R output.")
    try:
        subprocess.check_call(
            ['Rscript', r_commands],
            stderr=subprocess.STDOUT,
        )
    except subprocess.CalledProcessError:
        gscript.fatal(
            "There was an error in the execution of the R script.\nPlease check the R output."
        )

    gscript.message("Finished running R.")

    if allmap and not flags['f']:

        model_output_csvt = model_output + '.csvt'
        temptable = 'classif_tmp_table_%d' % os.getpid()

        f = open(model_output_csvt, 'w')
        header_string = '"Integer"'
        if flags['i']:
            for classifier in classifiers:
                header_string += ',"Integer"'
        if len(classifiers) > 1:
            for weighting_mode in weighting_modes:
                header_string += ',"Integer"'
                header_string += ',"Real"'
        else:
            header_string += ',"Integer"'

        f.write(header_string)
        f.close()

        gscript.message("Loading results into attribute table")
        gscript.run_command('db.in.ogr',
                            input_=model_output_csv,
                            output=temptable,
                            overwrite=True,
                            quiet=True)
        index_creation = "CREATE INDEX idx_%s_cat" % temptable
        index_creation += " ON %s (id)" % temptable
        gscript.run_command('db.execute', sql=index_creation, quiet=True)
        columns = gscript.read_command('db.columns',
                                       table=temptable).splitlines()[1:]
        orig_cat = gscript.vector_db(allfeatures)[int(segments_layer)]['key']
        gscript.run_command('v.db.join',
                            map_=allfeatures,
                            column=orig_cat,
                            otable=temptable,
                            ocolumn='id',
                            subset_columns=columns,
                            quiet=True)

    if classified_map:
        for classification, reclass_file in reclass_files.items():
            output_map = classified_map + '_' + classification
            gscript.run_command('r.reclass',
                                input=raster_segments_map,
                                output=output_map,
                                rules=reclass_file,
                                quiet=True)
예제 #34
0
def main():
    gscript.set_raise_on_error(False)
    options, flags = gscript.parser()

    import wx

    from grass.script.setup import set_gui_path
    set_gui_path()

    from core.settings import UserSettings
    from core.giface import StandaloneGrassInterface
    from iclass.frame import IClassMapFrame

    group_name = subgroup_name = map_name = trainingmap_name = None

    if options['group']:
        if not options['subgroup']:
            gscript.fatal(_("Name of subgroup required"))
        group_name = gscript.find_file(name=options['group'],
                                       element='group')['name']
        if not group_name:
            gscript.fatal(_("Group <%s> not found") % options['group'])
        subgroups = gscript.read_command('i.group',
                                         group=group_name,
                                         flags='sg').splitlines()
        if options['subgroup'] not in subgroups:
            gscript.fatal(_("Subgroup <%s> not found") % options['subgroup'])
        subgroup_name = options['subgroup']

    if options['map']:
        map_name = gscript.find_file(name=options['map'],
                                     element='cell')['fullname']
        if not map_name:
            gscript.fatal(_("Raster map <%s> not found") % options['map'])

    if options['trainingmap']:
        trainingmap_name = gscript.find_file(name=options['trainingmap'],
                                             element='vector')['fullname']
        if not trainingmap_name:
            gscript.fatal(
                _("Vector map <%s> not found") % options['trainingmap'])

    # define display driver
    driver = UserSettings.Get(group='display', key='driver', subkey='type')
    if driver == 'png':
        os.environ['GRASS_RENDER_IMMEDIATE'] = 'png'
    else:
        os.environ['GRASS_RENDER_IMMEDIATE'] = 'cairo'

    # launch application
    app = wx.App()

    # show main frame
    giface = StandaloneGrassInterface()
    frame = IClassMapFrame(parent=None, giface=giface)
    if not flags['m']:
        frame.CenterOnScreen()
    if group_name:
        frame.SetGroup(group_name, subgroup_name)
    if map_name:
        giface.WriteLog(_("Loading raster map <%s>...") % map_name)
        frame.trainingMapManager.AddLayer(map_name)
    if trainingmap_name:
        giface.WriteLog(_("Loading training map <%s>...") % trainingmap_name)
        frame.ImportAreas(trainingmap_name)

    frame.Show()
    if flags['m']:
        frame.Maximize()
    app.MainLoop()
예제 #35
0
def get_location_proj_string(env=None):
    """Returns projection of environment in PROJ.4 format"""
    out = gs.read_command("g.proj", flags="jf", env=env)
    return out.strip()
예제 #36
0
def main():
    stats = grass.read_command('r.stats', input = options['map'], \
                                          sep = 'space', \
                                          nv = '*', \
                                          nsteps = '255', \
                                          flags = 'Anc').split('\n')[:-1]

    # res = cellsize
    res = grass.region()['nsres']

    zn = np.zeros((len(stats), 4), float)
    kl = np.zeros((len(stats), 2), float)
    prc = np.zeros((9, 2), float)

    for i in range(len(stats)):
        if i == 0:
            zn[i, 0], zn[i, 1] = map(float, stats[i].split(' '))
            zn[i, 1] = zn[i, 1]
            zn[i, 2] = zn[i, 1] * res
        if i != 0:
            zn[i, 0], zn[i, 1] = map(float, stats[i].split(' '))
            zn[i, 2] = zn[i, 1] + zn[i - 1, 2]
            zn[i, 3] = zn[i, 1] * (res**2)

    totcell = sum(zn[:, 1])
    print("Tot. cells %s" % (totcell))
    totarea = totcell * (res**2)
    print("Tot. area %s" % (totarea))
    maxdist = max(zn[:, 0])
    print("Max distance %s" % (maxdist))

    for i in range(len(stats)):
        kl[i, 0] = zn[i, 0]
        kl[i, 1] = zn[i, 2] / totcell

    # quantiles
    prc[0, 0], prc[0, 1] = findint(kl, 0.05), 0.05
    prc[1, 0], prc[1, 1] = findint(kl, 0.15), 0.15
    prc[2, 0], prc[2, 1] = findint(kl, 0.3), 0.3
    prc[3, 0], prc[3, 1] = findint(kl, 0.4), 0.4
    prc[4, 0], prc[4, 1] = findint(kl, 0.5), 0.5
    prc[5, 0], prc[5, 1] = findint(kl, 0.6), 0.6
    prc[6, 0], prc[6, 1] = findint(kl, 0.7), 0.7
    prc[7, 0], prc[7, 1] = findint(kl, 0.85), 0.85
    prc[8, 0], prc[8, 1] = findint(kl, 0.95), 0.95

    # plot
    plotImage(zn[:, 0], zn[:, 3], options['image'] + '_width_function.png',
              '-', 'x', 'W(x)', 'Width Function')

    print("===========================")
    print("Width Function | quantiles")
    print("===========================")
    print('%.0f | %s' % (findint(kl, 0.05), 0.05))
    print('%.0f | %s' % (findint(kl, 0.15), 0.15))
    print('%.0f | %s' % (findint(kl, 0.3), 0.3))
    print('%.0f | %s' % (findint(kl, 0.4), 0.4))
    print('%.0f | %s' % (findint(kl, 0.5), 0.5))
    print('%.0f | %s' % (findint(kl, 0.6), 0.6))
    print('%.0f | %s' % (findint(kl, 0.7), 0.7))
    print('%.0f | %s' % (findint(kl, 0.85), 0.85))
    print('%.0f | %s' % (findint(kl, 0.95), 0.95))
    print('\n')
    print('Done!')
예제 #37
0
def main():
    # Following declarations MAY will used in future for sure.
    global GISDBASE, LAYERCOUNT, LASTFILE

    # Check if ImageMagick is available since it is essential
    if os.name == "nt":
        if grass.find_program("magick", "-version"):
            grass.verbose(_("printws: ImageMagick is available: OK!"))
        else:
            grass.fatal(
                "ImageMagick is not accessible. See documentation of m.printws module for details."
            )
    else:
        if grass.find_program("convert", "-version"):
            grass.verbose(_("printws: ImageMagick is available: OK!"))
        else:
            grass.fatal(
                "ImageMagick is not accessible. See documentation of m.printws module for details."
            )

    textmacros = {}
    #%nam% macros are kept for backward compatibility
    textmacros["%TIME24%"] = time.strftime("%H:%M:%S")
    textmacros["%DATEYMD%"] = time.strftime("%Y.%m.%d")
    textmacros["%DATEMDY%"] = time.strftime("%m/%d/%Y")
    if not hasPwd:
        textmacros["%USERNAME%"] = "(user unknown)"
    else:
        textmacros["%USERNAME%"] = pwd.getpwuid(os.getuid())[0]
    # using $ for macros in the future. New items should be created
    # exclusively as $macros later on
    textmacros["\$TIME24"] = textmacros["%TIME24%"]
    textmacros["\$DATEYMD"] = textmacros["%DATEYMD%"]
    textmacros["\$DATEMDY"] = textmacros["%DATEMDY%"]
    textmacros["\$USERNAME"] = textmacros["%USERNAME%"]

    textmacros["\$SPC"] = "\\u00A0"  # ?? d.text won't display this at string end hmmm

    # saves region for restoring at end
    # doing with official method:
    grass.use_temp_region()

    # getting/setting screen/print dpi ratio

    if len(options["dpi"]) > 0:
        dpioption = float(options["dpi"])
    else:
        dpioption = 150.0

    if len(options["screendpi"]) > 0:
        screendpioption = float(options["screendpi"])
    else:
        screendpioption = 100.0

    global UPSIZE
    UPSIZE = float(dpioption) / float(screendpioption)

    if len(options["input"]) > 0:
        displays = readworkspace(options["input"])
    else:
        quit()

    textmacros["%GXW%"] = options["input"]
    textmacros["\$GXW"] = textmacros["%GXW%"]

    displaycounter = 0

    # there could be multiple displays in a workspace so we loop them
    # each display is a whole and independent file assembly
    for key in displays:
        textmacros["%DISPLAY%"] = key
        textmacros["\$DISPLAY"] = key
        grass.verbose(_("printws: rendering display: " + key))
        displaycounter = displaycounter + 1
        layers = copy.deepcopy(displays[key])

        # extracting extent information from layers dic and erase the item
        # extents[0-5] w s e n minz maxz ;  extents [6-9] window x y w h
        extents = layers[0]
        grass.verbose(
            "m.printws: EXTENTS from workspace:" + str(extents)
        )  # was debug message
        del layers[0]

        regionmode = ""
        if len(options["region"]) > 0:
            grass.run_command("g.region", region=options["region"])
            regionmode = "region"
        else:
            grass.run_command(
                "g.region", "", w=extents[0], s=extents[1], e=extents[2], n=extents[3]
            )
            regionmode = "window"

        # setting GRASS rendering environment

        # dummy file name is defined since the following lines
        # when switching on the cairo driver would create
        # an empty map.png in the current directory
        os.environ["GRASS_RENDER_FILE"] = os.path.join(
            TMPDIR, str(os.getpid()) + "_DIS_" + str(00) + "_GEN_" + str(00) + ".png"
        )
        os.environ["GRASS_RENDER_IMMEDIATE"] = "cairo"
        os.environ["GRASS_RENDER_FILE_READ"] = "TRUE"
        os.environ["GRASS_RENDER_TRANSPARENT"] = "TRUE"
        os.environ["GRASS_RENDER_FILE_COMPRESSION"] = "0"
        os.environ["GRASS_RENDER_FILE_MAPPED"] = "TRUE"

        # reading further options and setting defaults

        if len(options["page"]) > 0:
            pageoption = options["page"]
        else:
            pageoption = "A4landscape"

        # parsing titles, etc.
        if len(options["font"]) > 0:
            isAsterisk = options["font"].find("*")
            if isAsterisk > 0:
                titlefont = getfontbypattern(options["font"].replace("*", ""))
            else:
                titlefont = options["font"]
        else:
            titlefont = getfontbypattern("Open")  # try to find something UTF-8
        grass.verbose(_("printws: titlefont: " + titlefont))

        if len(options["titlecolor"]) > 0:
            titlecolor = options["titlecolor"]
        else:
            titlecolor = black

        if len(options["maintitlesize"]) > 0:
            maintitlesize = converttommfrom(
                float(options["maintitlesize"]), options["layunits"]
            )
        else:
            maintitlesize = 10.0

        if len(options["subtitlesize"]) > 0:
            subtitlesize = converttommfrom(
                float(options["subtitlesize"]), options["layunits"]
            )
        else:
            subtitlesize = 7.0

        if len(options["pssize"]) > 0:
            pssize = converttommfrom(float(options["pssize"]), options["layunits"])
        else:
            pssize = 5.0

        # Please fasten your seatbelts :) Calculations start here.
        # -------------------------------------------------------------------

        pagesizes = getpagesizes(pageoption)
        pagesizesindots = dictodots(pagesizes, dpioption)

        # Leave space for titles up and ps down - still in mm !!
        upperspace = 0
        subtitletop = 0
        titletop = 0
        if len(options["maintitle"]) > 0:
            titletop = 0.4 * maintitlesize
            upperspace = upperspace + titletop + maintitlesize
        if len(options["subtitle"]) > 0:
            subtitletop = upperspace + 0.4 * subtitlesize
            upperspace = subtitletop + subtitlesize + 1
        lowerspace = 0
        if (
            (len(options["psundercentral"]) > 0)
            or (len(options["psunderright"]) > 0)
            or (len(options["psunderleft"]) > 0)
        ):
            lowerspace = lowerspace + pssize + 2

        os.environ["GRASS_RENDER_WIDTH"] = str(pagesizesindots["w"])
        os.environ["GRASS_RENDER_HEIGHT"] = str(pagesizesindots["h"])

        pagemargins = getpagemargins(options["pagemargin"], options["layunits"])
        pagemarginsindots = dictodots(pagemargins, dpioption)

        # Getting max drawing area in dots
        mxfd = getmaxframeindots(pagemarginsindots, pagesizesindots)
        maxframe = (
            str(mxfd["t"])
            + ","
            + str(mxfd["b"])
            + ","
            + str(mxfd["l"])
            + ","
            + str(mxfd["r"])
        )

        # convert font size in mm to percentage for d.text
        mxfmm = dictomm(mxfd, dpioption)
        maintitlesize = float(maintitlesize) / (mxfmm["b"] - mxfmm["t"]) * 100.0
        subtitlesize = float(subtitlesize) / (mxfmm["b"] - mxfmm["t"]) * 100.0

        pssize = float(pssize) / (mxfmm["r"] - mxfmm["l"]) * 100.0
        # subtitle location is another issue
        subtitletoppercent = 100.0 - subtitletop / (mxfmm["b"] - mxfmm["t"]) * 100.0
        titletoppercent = 100.0 - titletop / (mxfmm["b"] - mxfmm["t"]) * 100.0

        mapul = getmapUL(options["mapupperleft"], options["layunits"])
        mapulindots = dictodots(mapul, dpioption)

        mapsizes = getmapsizes(options["mapsize"], options["layunits"])
        mapsizesindots = dictodots(mapsizes, dpioption)

        # Correcting map area ratio to ratio of region edges
        # OR screen window edges depeding on "regionmode"
        # for later:     grass.use_temp_region()
        ISLATLONG = False
        s = grass.read_command("g.region", flags="p")
        kv = grass.parse_key_val(s, sep=":")
        regioncols = float(kv["cols"].strip())
        regionrows = float(kv["rows"].strip())
        ewrestemp = kv["ewres"].strip()
        nsrestemp = kv["nsres"].strip()
        if ewrestemp.find(":") > 0:
            ISLATLONG = True
            ewrestemp = ewrestemp.split(":")
            ewres = (
                float(ewrestemp[0])
                + float(ewrestemp[1]) / 60.0
                + float(ewrestemp[2]) / 3600.0
            )
            nsrestemp = nsrestemp.split(":")
            nsres = (
                float(nsrestemp[0])
                + float(nsrestemp[1]) / 60.0
                + float(nsrestemp[2]) / 3600.0
            )
        else:
            ewres = float(ewrestemp)
            nsres = float(nsrestemp)

        sizex = regioncols * ewres
        sizey = regionrows * nsres

        grass.verbose(_("printws: sizex " + str(sizex)))
        grass.verbose(_("printws: sizey " + str(sizey)))

        if regionmode == "region":
            hregionratio = float(sizex) / float(sizey)
            grass.verbose(_("printws: REGION MODE -> region "))
        else:  # surprisingly doing the SAME
            # using screen window ratio for map area
            # next line was a test for this but didn't help on gadgets positioning
            # hregionratio = float(extents[8]) / float(extents[9])
            hregionratio = float(sizex) / float(sizey)
            grass.verbose(_("printws: REGION MODE -> window"))
        hmapratio = mapsizes["w"] / mapsizes["h"]

        grass.verbose(_("printws: raw mapsizes: " + str(mapsizesindots)))
        grass.verbose(_("printws: hr: " + str(hregionratio)))
        grass.verbose(_("printws: hm: " + str(hmapratio)))
        if hregionratio > hmapratio:
            grass.verbose(
                _("printws: Map area height correction / " + str(hregionratio))
            )
            mapsizes["h"] = mapsizes["w"] / hregionratio
        elif hregionratio < hmapratio:
            grass.verbose(
                _("printws: Map area width correction * " + str(hregionratio))
            )
            mapsizes["w"] = mapsizes["h"] * hregionratio
        mapsizesindots = dictodots(mapsizes, dpioption)

        # changing region resolution to match print resolution
        # to eliminate unnecessary CPU heating/data transfer
        # so as to make it faster
        # with only invisible detail loss.
        colsregiontomap = float(mapsizesindots["w"]) / regioncols
        rowsregiontomap = float(mapsizesindots["h"]) / regionrows

        newewres = ewres
        newnsres = nsres

        # if colsregiontomap < 1:
        # CHANGE: also enables raising of resolution to prevent
        # pixelation because of low resolution setting...
        newewres = ewres / colsregiontomap
        # if rowsregiontomap < 1:
        newnsres = nsres / rowsregiontomap

        # WOW - no necessary to convert back to DMS for nsres / ewres
        # if ISLATLONG:
        #    newewresstr=decdeg2dms(newewres)
        #    newnsresstr=decdeg2dms(newnsres)
        # else:
        newewresstr = str(newewres)
        newnsresstr = str(newnsres)

        grass.run_command("g.region", ewres=newewresstr, nsres=newnsresstr)

        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # it seems that d.wms uses the GRASS_REGION from region info
        # others may also do so we set it
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        kv2 = {}
        kv2["e"] = kv["east"]
        kv2["n"] = kv["north"]
        kv2["s"] = kv["south"]
        kv2["w"] = kv["west"]
        kv2["ewres"] = newewresstr
        kv2["nsres"] = newnsresstr
        # kv2['rows']    #- autocalculated to resolution - no need to set explicitly
        # kv2['cols']    #- autocalculated to resolution - no need to set explicitly
        # grass.message(str(kv2))
        # grass.message(grass.region_env(**kv2))
        # grass.message(s)
        os.environ["GRASS_REGION"] = grass.region_env(**kv2)

        # Getting mapping area in dots
        # Correcting mxfd to leave space for title and subscript
        pagemarginstitles = copy.deepcopy(pagemargins)
        pagemarginstitles["t"] = pagemarginstitles["t"] + upperspace
        pagemarginstitles["b"] = pagemarginstitles["b"] + lowerspace
        pagemarginsindotstitles = dictodots(pagemarginstitles, dpioption)
        mxfdtitles = getmaxframeindots(pagemarginsindotstitles, pagesizesindots)

        mpfd = getmapframeindots(mapulindots, mapsizesindots, mxfdtitles)
        if pageoption == "Flexi":
            # For 'Flexi' page we modify the setup to create
            # a page containing only the map without margins
            grass.verbose(_("printws: pre Flexi mapframe: " + str(mpfd)))
            mpfd["b"] = mpfd["b"] - mpfd["t"]
            mpfd["t"] = 0
            mpfd["r"] = mpfd["r"] - mpfd["l"]
            mpfd["l"] = 0
            os.environ["GRASS_RENDER_WIDTH"] = str(mpfd["r"])
            os.environ["GRASS_RENDER_HEIGHT"] = str(mpfd["b"])
            grass.verbose(_("printws: post Flexi mapframe: " + str(mpfd)))
        mapframe = (
            str(mpfd["t"])
            + ","
            + str(mpfd["b"])
            + ","
            + str(mpfd["l"])
            + ","
            + str(mpfd["r"])
        )

        grass.verbose(_("printws: DOT VALUES ARE:"))
        grass.verbose(_("printws: maxframe: " + str(mxfd)))
        grass.verbose(_("printws: maxframe: " + maxframe))
        grass.verbose(_("printws: mapframe: " + str(mpfd)))
        grass.verbose(_("printws: mapframe: " + mapframe))
        grass.verbose(_("printws: page: " + str(pagesizesindots)))
        grass.verbose(_("printws: margins: " + str(pagemarginsindots)))
        grass.verbose(_("printws: mapUL: " + str(mapulindots)))
        grass.verbose(_("printws: mapsizes (corrected): " + str(mapsizesindots)))
        grass.verbose(_("printws: ewres (corrected): " + str(newewres)))
        grass.verbose(_("printws: nsres (corrected): " + str(newnsres)))

        # quit()

        # ------------------- INMAP -------------------

        # Do not limit -map. It was: -limit map 720000000 before...
        # So we can grow on disk as long as it lasts
        imcommand = (
            "convert  -limit memory 720000000 -units PixelsPerInch -density "
            + str(int(dpioption))
            + " "
        )

        if os.name == "nt":
            imcommand = "magick " + imcommand

        os.environ["GRASS_RENDER_FRAME"] = mapframe

        grass.verbose(_("printws: Rendering: the following layers: "))
        lastopacity = "-1"

        for lay in layers:
            grass.verbose(_(lay[1] + " at: " + lay[0] + " opacity"))
            if lay[0] == "1":
                if lastopacity != "1":
                    LASTFILE = os.path.join(
                        TMPDIR,
                        str(os.getpid())
                        + "_DIS_"
                        + str(displaycounter)
                        + "_GEN_"
                        + str(LAYERCOUNT)
                        + "."
                        + TMPFORMAT,
                    )
                    os.environ["GRASS_RENDER_FILE"] = LASTFILE
                    LAYERCOUNT = LAYERCOUNT + 2
                    imcommand = imcommand + " " + LASTFILE
                    lastopacity = "1"
                render(lay[1], lay[2], lay[3])
            else:
                lastopacity = lay[0]
                LASTFILE = os.path.join(
                    TMPDIR,
                    str(os.getpid())
                    + "_DIS_"
                    + str(displaycounter)
                    + "_GEN_"
                    + str(LAYERCOUNT)
                    + "."
                    + TMPFORMAT,
                )
                LAYERCOUNT = LAYERCOUNT + 2
                os.environ["GRASS_RENDER_FILE"] = LASTFILE
                grass.verbose("LAY: " + str(lay))
                render(lay[1], lay[2], lay[3])
                imcommand = (
                    imcommand
                    + " \( "
                    + LASTFILE
                    + " -channel a -evaluate multiply "
                    + lay[0]
                    + " +channel \)"
                )

        # setting resolution back to pre-script state since map rendering is
        # finished
        # CHANGE: not necessary anymore since we use temp_region now
        # However, since we did set GRASS_REGION, let's redo it here

        os.environ.pop("GRASS_REGION")

        # ------------------- OUTSIDE MAP texts, etc -------------------
        if pageoption == "Flexi":
            grass.verbose(
                _("m.printws: WARNING! Felxi mode, will not create titles, etc...")
            )
        else:
            os.environ["GRASS_RENDER_FRAME"] = maxframe

            dict = {}
            dict["task"] = "d.text"
            dict["color"] = titlecolor
            dict["font"] = titlefont
            dict["charset"] = "UTF-8"

            if len(options["maintitle"]) > 1:
                dict["text"] = decodetextmacros(options["maintitle"], textmacros)
                dict["at"] = "50," + str(titletoppercent)
                dict["align"] = "uc"
                dict["size"] = str(maintitlesize)
                render(str(dict), dict, {})

            if len(options["subtitle"]) > 1:
                dict["text"] = decodetextmacros(options["subtitle"], textmacros)
                dict["at"] = "50," + str(subtitletoppercent)
                dict["align"] = "uc"
                dict["size"] = str(subtitlesize)
                render(str(dict), dict, {})

            dict["size"] = str(pssize)

            if len(options["psundercentral"]) > 1:
                dict["text"] = decodetextmacros(options["psundercentral"], textmacros)
                dict["at"] = "50,1"
                dict["align"] = "lc"
                render(str(dict), dict, {})
            if len(options["psunderleft"]) > 1:
                dict["text"] = decodetextmacros(options["psunderleft"], textmacros)
                dict["at"] = "0,1"
                dict["align"] = "ll"
                render(str(dict), dict, {})
            if len(options["psunderright"]) > 1:
                dict["text"] = decodetextmacros(options["psunderright"], textmacros)
                dict["at"] = "100,1"
                dict["align"] = "lr"
                render(str(dict), dict, {})

        # ------------------- GENERATING OUTPUT FILE -------------------

        if len(options["output"]) > 1:
            output = options["output"]
        else:
            output = "map_" + str(os.getpid())

        # remove extension AND display number and naming if any
        output = os.path.splitext(output)[0]
        output = re.sub("_DISPLAY_[0-9]+_.*", "", output)

        if len(options["format"]) > 1:
            extension = options["format"]
        else:
            extension = "pdf"

        displaypart = ""
        if len(displays) > 1:
            displaypart = "_DISPLAY_" + str(displaycounter) + "_" + key

        pagedata = getpagedata(pageoption)
        # params= ' -extent '+str(pagesizesindots['w'])+'x'+str(pagesizesindots['h'])+' -gravity center -compress jpeg -page '+pagedata['page']+' '+pagedata['parameters']+' -units PixelsPerInch -density '+str(dpioption)+'x'+str(dpioption)+' '
        params = (
            " -compress jpeg -quality 92 "
            + pagedata["parameters"]
            + " -units PixelsPerInch -density "
            + str(int(dpioption))
            + " "
        )

        imcommand = (
            imcommand
            + " -layers flatten "
            + params
            + '"'
            + output
            + displaypart
            + "."
            + extension
            + '"'
        )

        grass.verbose(_("printws: And the imagemagick command is... " + imcommand))
        os.system(imcommand)

    if not flags["d"]:
        grass.verbose(_("printws: Doing graceful cleanup..."))
        os.system("rm " + os.path.join(TMPDIR, str(os.getpid()) + "*_GEN_*"))
        if REMOVE_TMPDIR:
            try_rmdir(TMPDIR)
        else:
            grass.message(
                "\n%s\n" % _("printws: Temp dir remove failed. Do it yourself, please:")
            )
            sys.stderr.write("%s\n" % TMPDIR % " <---- this")

    # restoring pre-script region
    # - not necessary as we are using grass.use_temp_region() in the future

    return 0
예제 #38
0
def main():
    # Hard-coded parameters needed for USGS datasets
    usgs_product_dict = {
        "ned": {
            'product': 'National Elevation Dataset (NED)',
            'dataset': {
                'ned1sec': (1. / 3600, 30, 100),
                'ned13sec': (1. / 3600 / 3, 10, 30),
                'ned19sec': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': ['1 x 1 degree', '15 x 15 minute'],
            'format': 'IMG',
            'extension': 'img',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'bilinear',
            'url_split': '/'
        },
        "nlcd": {
            'product': 'National Land Cover Database (NLCD)',
            'dataset': {
                'National Land Cover Database (NLCD) - 2001':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2006':
                (1. / 3600, 30, 100),
                'National Land Cover Database (NLCD) - 2011':
                (1. / 3600, 30, 100)
            },
            'subset': {
                'Percent Developed Imperviousness', 'Percent Tree Canopy',
                'Land Cover'
            },
            'extent': ['3 x 3 degree'],
            'format': 'GeoTIFF',
            'extension': 'tif',
            'zip': True,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "naip": {
            'product': 'USDA National Agriculture Imagery Program (NAIP)',
            'dataset': {
                'Imagery - 1 meter (NAIP)': (1. / 3600 / 27, 1, 3)
            },
            'subset': {},
            'extent': [
                '3.75 x 3.75 minute',
            ],
            'format': 'JPEG2000',
            'extension': 'jp2',
            'zip': False,
            'srs': 'wgs84',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        },
        "lidar": {
            'product': 'Lidar Point Cloud (LPC)',
            'dataset': {
                'Lidar Point Cloud (LPC)': (1. / 3600 / 9, 3, 10)
            },
            'subset': {},
            'extent': [''],
            'format': 'LAS,LAZ',
            'extension': 'las,laz',
            'zip': True,
            'srs': '',
            'srs_proj4': "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
            'interpolation': 'nearest',
            'url_split': '/'
        }
    }

    # Set GRASS GUI options and flags to python variables
    gui_product = options['product']

    # Variable assigned from USGS product dictionary
    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extensions = tuple(nav_string['extension'].split(','))
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    has_pdal = gscript.find_program(pgm='v.in.pdal')
    if gui_product == 'lidar':
        gui_dataset = 'Lidar Point Cloud (LPC)'
        product_tag = nav_string['product']
        if not has_pdal:
            gscript.warning(
                _("Module v.in.pdal is missing,"
                  " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(
            _("Directory <{}> does not exist."
              " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == 'lidar' and options['resolution']:
        product_resolution = float(options['resolution'])

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    min_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['w'], gregion['s']),
                                      proj_out=wgs84,
                                      separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['e'], gregion['n']),
                                      proj_out=wgs84,
                                      separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://tnmaccess.nationalmap.gov/api/v1/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _(
        "Possibly, the query has timed out. Check network configuration and try again."
    )
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(
            _("HTTP(S) error from USGS TNM API:"
              " {code}: {reason} ({instructions})").format(
                  reason=error.reason,
                  code=error.code,
                  instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(
            _("Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == 'lidar' and options['title_filter']:
            return_JSON['items'] = [
                item for item in return_JSON['items']
                if options['title_filter'] in item['title']
            ]
            return_JSON['total'] = len(return_JSON['items'])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    # TODO: Make the tolerance configurable.
    # Some combinations produce >10 byte differences.
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # If file exists, do not download,
            # but if incomplete (e.g. interupted download), redownload.
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    gscript.verbose(
                        _("Size of local file {filename} ({local_size}) differs"
                          " from a file size specified in the API ({api_size})"
                          " by {difference} bytes"
                          " which is more than tolerance ({tolerance})."
                          " It will be downloaded again.").format(
                              filename=local_file_path,
                              local_size=existing_local_file_size,
                              api_size=TNM_file_size,
                              difference=abs(existing_local_file_size -
                                             TNM_file_size),
                              tolerance=size_diff_tolerance,
                          ))
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(
                        gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(
                                            extracted_tile) < os.path.getmtime(
                                                z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file '{filename}'"
                      " from ZIP archive '{zipname}': {error}").format(
                          filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(
            _("Extracted {extracted} new tiles and"
              " used {used} existing tiles").format(
                  used=used_existing_extracted_tiles_num,
                  extracted=extracted_tiles_num))
        if old_extracted_tiles_num:
            gscript.verbose(
                _("Found {removed} existing tiles older"
                  " than the corresponding downloaded archive").format(
                      removed=old_extracted_tiles_num))
        if removed_extracted_tiles_num:
            gscript.verbose(
                _("Removed {removed} existing tiles").format(
                    removed=removed_extracted_tiles_num))

    if gui_product == 'lidar' and not has_pdal:
        gscript.fatal(
            _("Module v.in.pdal is missing,"
              " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists(
                    "raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name,
                                count=i + 1,
                                total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != 'lidar':
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_file_import,
                        kwargs=dict(identifier=i,
                                    results=results,
                                    input=t,
                                    output=LT_layer_name,
                                    resolution='value',
                                    resolution_value=product_resolution,
                                    extent="region",
                                    resample=product_interpolation,
                                    memory=memory))
                else:
                    srs = options['input_srs']
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_lidar_import,
                        kwargs=dict(identifier=i,
                                    results=results,
                                    input=t,
                                    output=LT_layer_name,
                                    input_srs=srs if srs else None))
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(
                            _("Parallel import and reprojection failed."
                              " Try running with nprocs=1."))
                    else:
                        gscript.fatal(
                            _("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(
        _("Imported {imported} new tiles and"
          " used {used} existing tiles").format(
              used=used_existing_imported_tiles_num,
              imported=imported_tiles_num))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [
                            name + '.' + i for name in patch_names
                        ]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch',
                                            input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == 'lidar':
                    gscript.run_command('v.patch',
                                        flags='nzb',
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.run_command('v.surf.rst',
                                        input=gui_output_layer,
                                        elevation=gui_output_layer,
                                        nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command('r.patch',
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [
                                name + '.' + i for name in patch_names
                            ]
                            gscript.run_command('g.remove',
                                                type='raster',
                                                name=patch_names_i,
                                                flags='f')
                    elif gui_product == 'lidar':
                        gscript.run_command('g.remove',
                                            type='vector',
                                            name=patch_names +
                                            [gui_output_layer],
                                            flags='f')
                    else:
                        gscript.run_command('g.remove',
                                            type='raster',
                                            name=patch_names,
                                            flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename',
                                        raster=(patch_names[0] + '.' + i,
                                                gui_output_layer + '.' + i))
            elif gui_product == 'lidar':
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                gscript.run_command('v.surf.rst',
                                    input=patch_names[0],
                                    elevation=gui_output_layer,
                                    nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command('g.remove',
                                        type='vector',
                                        name=patch_names[0],
                                        flags='f')
            else:
                gscript.run_command('g.rename',
                                    raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(
                _("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(
            _("Error in getting or importing the data (see above). Please retry."
              ))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors',
                            map=gui_output_layer,
                            color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite',
                            red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2',
                            blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
예제 #39
0
def get_current_mapset():
    """Get curret mapset name as a string"""
    return gscript.read_command('g.mapset', flags='p').strip()
예제 #40
0
def main():
    # Get user inputs
    friction_original = options['friction']  # Input friction map
    out = options['out']  # Output totalcost raster
    maxcost = options['maxcost']  # Max cost distance in cost units
    knight = "k" if flags[
        "k"] else ""  # Use Knight's move in r.cost instead Queen's move (a bit slower, but more accurate)
    mempercent = int(
        options['mempercent']
    )  # Percent of map to keep in memory in r.cost calculation

    # Error if no valid friction surface is given
    if not grass.find_file(friction_original)['name']:
        grass.message(_("Friction surface <%s> not found") % friction_original)
        sys.exit()

    # Calculate cost distances / edge effect distances from the friction map. Result is in map units
    info = grass.raster_info(friction_original)  # Read and get raster info
    edgeeffect_min = float(maxcost) / float(
        info['max'])  # Minimum cost distance / edge effect distance
    edgeeffect_max = float(maxcost) / float(
        info['min'])  # Maximum cost distance / edge effect distance

    # If "Only calculate edge effect" is selected
    if flags['e']:
        grass.message("Minimum distance / edge effect: " + str(edgeeffect_min))
        grass.message("Maximum distance / edge effect: " + str(edgeeffect_max))
        sys.exit()

    # If output file exists, but overwrite option isn't selected
    if not grass.overwrite():
        if grass.find_file(out)['name']:
            grass.message(_("Output raster map <%s> already exists") % out)
            sys.exit()

    # Get raster calculation region information
    regiondata = grass.read_command("g.region", flags='p')
    regvalues = grass.parse_key_val(regiondata, sep=':')
    # Assign variables for necessary region info bits
    nsres = float(regvalues['nsres'])
    ewres = float(regvalues['ewres'])
    # Calculate the mean resolution
    meanres = (nsres + ewres) / 2.0

    # Create a list holding cell coordinates
    coordinatelist = [
    ]  # An empty list that will be populated with coordinates
    rasterdata = grass.read_command(
        'r.stats', flags="1gn",
        input=friction_original)  # Read input raster coordinates
    rastervalues = rasterdata.split(
    )  # Split the values from r.stats into list entries
    # rastervalues list is structured like that: [x1, y1, rastervalue1, x2, y2, rastervalue2 ... xn, yn, rastervaluen], so iterate through that list with step of 3 and write a new list that has coordinates in a string: ["x1,y1", "x2,y2" ... "xn,yn"]
    for val in xrange(0, len(rastervalues), 3):
        coordinatelist.append(rastervalues[val] + "," + rastervalues[val + 1])

    # This is the number of cells (and hence cost surfaces) to be used
    n_coords = len(coordinatelist)

    # Create temporary filenames with unique process id in their name. Add each name to the tmp_layers list.
    pid = os.getpid()
    cost1 = str("tmp_totalcost_cost1_%d" % pid)
    tmp_layers.append(cost1)
    cost2 = str("tmp_totalcost_cost2_%d" % pid)
    tmp_layers.append(cost2)
    cost3 = str("tmp_totalcost_cost3_%d" % pid)
    tmp_layers.append(cost3)
    cost4 = str("tmp_totalcost_cost4_%d" % pid)
    tmp_layers.append(cost4)
    friction = str("tmp_friction_%d" % pid)
    tmp_layers.append(friction)
    calctemp = str("tmp_calctemp_%d" % pid)
    tmp_layers.append(calctemp)

    # Assuming the friction values are per map unit (not per cell), the raster should be multiplied with region resolution. This is because r.cost just uses cell values and adds them - slightly different approach compared to ArcGIS which compensates for the resolution automatically. The result is then divided by maxcost so that r.cost max_cost value can be fixed to 1 (it doesn't accept floating point values, hence the workaround).
    grass.mapcalc("$outmap = $inmap * $res / $mcost",
                  outmap=friction,
                  inmap=friction_original,
                  res=meanres,
                  mcost=maxcost)

    # Do the main loop
    for c in xrange(
            0, n_coords,
            4):  # Iterate through the numbers of cells with the step of 4

        # Start four r.cost processes with different coordinates. The first process (costproc1) is always made, but the other 3 have the condition that there exists a successive coordinate in the list. This is because the used step of 4 in the loop. In case there are no coordinates left, assign the redundant cost outputs null-values so they wont be included in the map calc.
        try:
            costproc1 = grass.start_command(
                'r.cost',
                overwrite=True,
                flags=knight,
                input=friction,
                output=cost1,
                start_coordinates=coordinatelist[c],
                max_cost=1,
                percent_memory=mempercent)
            if c + 1 < n_coords:
                costproc2 = grass.start_command(
                    'r.cost',
                    overwrite=True,
                    flags=knight,
                    input=friction,
                    output=cost2,
                    start_coordinates=coordinatelist[c + 1],
                    max_cost=1,
                    percent_memory=mempercent)
            else:
                cost2 = "null()"
            if c + 2 < n_coords:
                costproc3 = grass.start_command(
                    'r.cost',
                    overwrite=True,
                    flags=knight,
                    input=friction,
                    output=cost3,
                    start_coordinates=coordinatelist[c + 2],
                    max_cost=1,
                    percent_memory=mempercent)
            else:
                cost3 = "null()"
            if c + 3 < n_coords:
                costproc4 = grass.start_command(
                    'r.cost',
                    overwrite=True,
                    flags=knight,
                    input=friction,
                    output=cost4,
                    start_coordinates=coordinatelist[c + 3],
                    max_cost=1,
                    percent_memory=mempercent)
            else:
                cost4 = "null()"
        except:
            grass.message("Error with r.cost: " + str(sys.exc_info()[0]))
            sys.exit()

        # For the very first iteration just add those first r.cost results together
        if c == 0:
            # Wait for the r.cost processes to stop before moving on
            costproc1.wait()
            costproc2.wait()
            costproc3.wait()
            costproc4.wait()
            # Do the map algebra: merge the cost surfaces
            try:
                grass.mapcalc(
                    "$outmap = if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1) + if(isnull($tempmap3),0,1) + if(isnull($tempmap4),0,1)",
                    outmap=out,
                    tempmap1=cost1,
                    tempmap2=cost2,
                    tempmap3=cost3,
                    tempmap4=cost4,
                    overwrite=True)
            except:
                grass.message("Error with mapcalc: " + str(sys.exc_info()[0]))
                sys.exit()
        # If it's not the first iteration...
        else:
            # Rename the output of previous mapcalc iteration so that it can be used in the mapcalc expression (x = x + y logic doesn't work apparently)
            try:
                # If pygrass gets fixed, replace g.rename with those commented out pygrass-based lines as they seem to be a bit faster (are they really?)
                #map = pygrass.raster.RasterRow(out)
                #map.name = calctemp
                grass.run_command('g.rename',
                                  overwrite=True,
                                  rast=out + "," + calctemp)
            except:
                grass.message("Error: " + str(sys.exc_info()[0]))
                sys.exit()
            # Wait for the r.cost processes to stop before moving on
            costproc1.wait()
            costproc2.wait()
            costproc3.wait()
            costproc4.wait()
            # Merge the r.cost results and the cumulative map from previous iteration
            try:
                grass.mapcalc(
                    "$outmap = if(isnull($inmap),0,$inmap) + if(isnull($tempmap1),0,1) + if(isnull($tempmap2),0,1) + if(isnull($tempmap3),0,1) + if(isnull($tempmap4),0,1)",
                    inmap=calctemp,
                    outmap=out,
                    tempmap1=cost1,
                    tempmap2=cost2,
                    tempmap3=cost3,
                    tempmap4=cost4,
                    overwrite=True)
            except:
                grass.message("Error with mapcalc: " + str(sys.exc_info()[0]))
                sys.exit()

    # Finally print the edge effect values
    grass.message("---------------------------------------------")
    grass.message("Minimum distance / edge effect: " + str(edgeeffect_min))
    grass.message("Maximum distance / edge effect: " + str(edgeeffect_max))
예제 #41
0
def main():
    vector = options['map']
    table = options['table']
    layer = options['layer']
    columns = options['columns']
    key = options['key']

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    map_name = vector.split('@')[0]

    if not table:
        if layer == '1':
            grass.verbose(
                _("Using vector map name as table name: <%s>") % map_name)
            table = map_name
        else:
            # to avoid tables with identical names on higher layers
            table = "%s_%s" % (map_name, layer)
            grass.verbose(
                _("Using vector map name extended by layer number as table name: <%s>"
                  ) % table)
    else:
        grass.verbose(_("Using user specified table name: %s") % table)

    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags='c', quiet=True)
    grass.verbose(
        _("Creating new DB connection based on default mapset settings..."))
    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    schema = kv['schema']

    database2 = database.replace('$MAP/', map_name + '/')

    # maybe there is already a table linked to the selected layer?
    nuldev = open(os.devnull, 'w')
    try:
        grass.vector_db(map_name, stderr=nuldev)[int(layer)]
        grass.fatal(_("There is already a table linked to layer <%s>") % layer)
    except KeyError:
        pass

    # maybe there is already a table with that name?
    tables = grass.read_command('db.tables',
                                flags='p',
                                database=database2,
                                driver=driver,
                                stderr=nuldev)
    tables = decode(tables)

    if not table in tables.splitlines():
        colnames = []
        column_def = []
        if columns:
            column_def = []
            for x in ' '.join(columns.split()).split(','):
                colname = x.lower().split()[0]
                if colname in colnames:
                    grass.fatal(
                        _("Duplicate column name '%s' not allowed") % colname)
                colnames.append(colname)
                column_def.append(x)

        # if not existing, create it:
        if not key in colnames:
            column_def.insert(0, "%s integer" % key)
        column_def = ','.join(column_def)

        grass.verbose(_("Creating table with columns (%s)...") % column_def)

        sql = "CREATE TABLE %s (%s)" % (table, column_def)
        try:
            grass.run_command('db.execute',
                              database=database2,
                              driver=driver,
                              sql=sql)
        except CalledModuleError:
            grass.fatal(_("Unable to create table <%s>") % table)

    # connect the map to the DB:
    if schema:
        table = '{schema}.{table}'.format(schema=schema, table=table)
    grass.verbose(_("Connecting new table to vector map <%s>...") % map_name)
    grass.run_command('v.db.connect',
                      quiet=True,
                      map=map_name,
                      database=database,
                      driver=driver,
                      layer=layer,
                      table=table,
                      key=key)

    # finally we have to add cats into the attribute DB to make
    # modules such as v.what.rast happy: (creates new row for each
    # vector line):
    try:
        grass.run_command('v.to.db',
                          map=map_name,
                          layer=layer,
                          option='cat',
                          column=key,
                          qlayer=layer)
    except CalledModuleError:
        # remove link
        grass.run_command('v.db.connect',
                          quiet=True,
                          flags='d',
                          map=map_name,
                          layer=layer)
        return 1

    grass.verbose(_("Current attribute table links:"))
    if grass.verbosity() > 2:
        grass.run_command('v.db.connect', flags='p', map=map_name)

    # write cmd history:
    grass.vector_history(map_name)

    return 0
예제 #42
0
#! written by ADW on 16 Jan 2015
#  Released to the public domain

### NOTE! This file was in the iceWGS84 directory when it was run
### Well, actually was all just copy/pate-run through ipython

# Note: before writing or using this script, I had:
# (1) Renamed the Dyke et al. (2003) files
# (2) Imported them into a GRASS GIS location with a projection given by the original files
# (3) Projected these to this location

# The land and water boxes were just really messy in their projection to
# WGS84 (well, their unprojection) and they weren't so necessary. So I
# remove them here after projecting and leave only the ice and the lakes

icenames = grass.read_command('g.list', type='vect',
                              pattern='ice??????').split('\n')[:-1]

for icename in icenames:
    orig_new_name = 'ice_with_land_ocean_' + icename[-6:]
    grass.run_command('g.rename', vector=icename + ',' + orig_new_name)

for icename in icenames:
    orig_new_name = 'ice_with_land_ocean_' + icename[-6:]
    grass.run_command('v.extract',
                      input=orig_new_name,
                      output=icename,
                      where="SYMB='LAKE' OR SYMB='ICE'",
                      overwrite=True)

# Some seams have appeared at particular lines of latitude. Hm. Well, let's
# just dissolve these boundaries between contigious areas with the same
예제 #43
0
def main():
    import matplotlib #required by windows
    matplotlib.use('wxAGG') #required by windows
    import matplotlib.pyplot as plt

    # input
    vector = options['map']
    column = options['column']
    group_by = options['group_by'] if options['group_by'] else None
    output = options['plot_output'] if options['plot_output'] else None
    where = options['where'] + " AND " + column + " IS NOT NULL" \
            if options['where'] else column + " IS NOT NULL"
    sort = options['order'] if options['order'] else None
    if sort == 'descending':
        reverse = True
    elif sort == 'ascending':
        reverse = False
    else:
        reverse = None
    cols = filter(None, [group_by, column])
    flag_h = not flags['h']
    flag_o = not flags['o']
    flag_n = flags['n']
    flag_r = flags['r']
    
    # Get data with where clause
    if where:
        df=[x for x in gscript.read_command('v.db.select',
                                            map_=vector,
                                            column=cols,
                                            where=where,
                                            flags='c').splitlines()]
    # Get all column data
    else:
        df=[x for x in gscript.read_command('v.db.select',
                                            map_=vector,
                                            column=cols,
                                            flags='c').splitlines()]
    # for grouped boxplot
    if group_by:
        # Split columns and create list with data and with labels
        df=[x.split('|') for x in df]
        vals = [float(i[1]) for i in df]
        groups = [i[0] for i in df] 
        uid = list(set(groups))
        data = []
        sf = []
        for i,m in enumerate(uid):
            a = [ j for j, grp in enumerate(groups) if grp == m]
            data.append([vals[i] for i in a])
            sf.append([m, np.median([vals[i] for i in a])])
        
        # Order boxes
        if sort:
            sf.sort(key = operator.itemgetter(1), reverse=reverse)
        sf = [i[0] for i in sf] 
        ii = { e: i for i, e in enumerate(sf) }
        sfo = [(ii[e]) for i, e in enumerate(uid) if e in ii]
      
        # Draw boxplot
        plt.boxplot(data, notch=flag_n, sym='gD', labels=uid, vert=flag_h,
                        showfliers=flag_o, positions=sfo)
    else:
        data=[float(x) for x in df]
        plt.boxplot(data, notch=flag_n, sym='gD', vert=flag_h,
                        showfliers=flag_o)
    if flag_r:
        plt.xticks(rotation=90)
    plt.tight_layout()
    if output:
        plt.savefig(output)
    else:
        plt.show()
예제 #44
0
def main():
    global tile, tmpdir, in_temp

    in_temp = False
    
    # to support SRTM water body
    swbd = False

    input = options['input']
    output = options['output']
    one = flags['1']

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)
    if not '+proj' in kv.keys() or kv['+proj'] != 'longlat':
        grass.fatal(_("This module only operates in LatLong locations"))

    # use these from now on:
    infile = input
    while infile[-4:].lower() in ['.hgt', '.zip', '.raw']:
        infile = infile[:-4]
    (fdir, tile) = os.path.split(infile)

    if not output:
        tileout = tile
    else:
        tileout = output

    if '.hgt' in input:
        suff = '.hgt'
    else:
        suff = '.raw'
        swbd = True

    zipfile = "{im}{su}.zip".format(im=infile, su=suff)
    hgtfile = "{im}{su}".format(im=infile, su=suff)

    if os.path.isfile(zipfile):
        # really a ZIP file?
        if not zfile.is_zipfile(zipfile):
            grass.fatal(_("'%s' does not appear to be a valid zip file.") % zipfile)

        is_zip = True
    elif os.path.isfile(hgtfile):
        # try and see if it's already unzipped
        is_zip = False
    else:
        grass.fatal(_("File '%s' or '%s' not found") % (zipfile, hgtfile))

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    if is_zip:
        shutil.copyfile(zipfile, os.path.join(tmpdir,
                                              "{im}{su}.zip".format(im=tile,
                                                                    su=suff)))
    else:
        shutil.copyfile(hgtfile, os.path.join(tmpdir,
                                              "{im}{su}".format(im=tile[:7],
                                                                su=suff)))
    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True


    zipfile = "{im}{su}.zip".format(im=tile, su=suff)
    hgtfile = "{im}{su}".format(im=tile[:7], su=suff)

    bilfile = tile + ".bil"

    if is_zip:
        # unzip & rename data file:
        grass.message(_("Extracting '%s'...") % infile)
        try:
            zf=zfile.ZipFile(zipfile)
            zf.extractall()
        except:
            grass.fatal(_("Unable to unzip file."))

    grass.message(_("Converting input file to BIL..."))
    os.rename(hgtfile, bilfile)

    north = tile[0]
    ll_latitude = int(tile[1:3])
    east = tile[3]
    ll_longitude = int(tile[4:7])

    # are we on the southern hemisphere? If yes, make LATITUDE negative.
    if north == "S":
        ll_latitude *= -1

    # are we west of Greenwich? If yes, make LONGITUDE negative.
    if east == "W":
        ll_longitude *= -1

    # Calculate Upper Left from Lower Left
    ulxmap = "%.1f" % ll_longitude
    # SRTM90 tile size is 1 deg:
    ulymap = "%.1f" % (ll_latitude + 1)

    if not one:
        tmpl = tmpl3sec
    elif swbd:
        grass.message(_("Attempting to import 1-arcsec SWBD data"))
        tmpl = swbd1sec
    else:
        grass.message(_("Attempting to import 1-arcsec data"))
        tmpl = tmpl1sec

    header = tmpl % (ulxmap, ulymap)
    hdrfile = tile + '.hdr'
    outf = open(hdrfile, 'w')
    outf.write(header)
    outf.close()

    # create prj file: To be precise, we would need EGS96! But who really cares...
    prjfile = tile + '.prj'
    outf = open(prjfile, 'w')
    outf.write(proj)
    outf.close()

    try:
        grass.run_command('r.in.gdal', input=bilfile, out=tileout)
    except:
        grass.fatal(_("Unable to import data"))

    # nice color table
    if not swbd:
        grass.run_command('r.colors', map=tileout, color='srtm')

    # write cmd history:
    grass.raster_history(tileout)

    grass.message(_("Done: generated map ") + tileout)
    grass.message(_("(Note: Holes in the data can be closed with 'r.fillnulls' using splines)"))
예제 #45
0
########### DATA
# Set GISDBASE environment variable
os.environ['GISDBASE'] = gisdb
 
# import GRASS Python bindings (see also pygrass)
import grass.script as gscript
import grass.script.setup as gsetup
#from grass.pygrass.modules.shortcuts import raster as r
 
###########
# launch session
gsetup.init(gisbase,
            gisdb, location, mapset)
 
#gscript.message('Current GRASS GIS 7 environment:')
#print gscript.gisenv()

MIN=str(sys.argv[3])
MAX=str(sys.argv[4])

print gscript.read_command('v.in.ogr', output='sectors_group_modified', input=DATAPATH +'/pracovni', layer='sektory_group', snap=1, overwrite=True, flags="o")
print gscript.read_command('r.mapcalc', expression='distances_costed_cum_selected = if(distances_costed_cum<='+MIN+'||distances_costed_cum>='+MAX+', null(), 1)', overwrite=True)
print gscript.read_command('r.to.vect', input='distances_costed_cum_selected',  output='distances_costed_cum_selected', type='area', overwrite=True)
print gscript.read_command('v.select', ainput='sectors_group_modified', binput='distances_costed_cum_selected', output='sektory_group_selected', overwrite=True)
#Linux
#print gscript.read_command('v.out.ogr', input='sektory_group_selected', output=DATAPATH +'/pracovni/', overwrite=True)
#Windows
print gscript.read_command('v.out.ogr', format='ESRI_Shapefile', input='sektory_group_selected', output=DATAPATH +'/pracovni/sektory_group_selected.shp', overwrite=True)
##print gscript.read_command('v.out.ogr', input='sektory_group_selected', output='C:/TEMP/sektory_group_selected.shp', overwrite=True)
예제 #46
0
def main():

    global nuldev, tmp
    nuldev = file(os.devnull, 'w')
    tmp = "v_tin_to_rast_%d" % os.getpid()

    input = options['input']
    output = options['output']

    # initialize GRASS library
    G_gisinit('')

    # check if vector map exists
    mapset = G_find_vector2(input, "")
    if not mapset:
        grass.fatal("Vector map <%s> not found" % input)

    # define map structure
    map_info = pointer(Map_info())

    # set vector topology to level 2
    Vect_set_open_level(2)

    # opens the vector map
    Vect_open_old(map_info, input, mapset)

    Vect_maptype_info(map_info, input, mapset)

    # check if vector map is 3D
    if Vect_is_3d(map_info):
        grass.message("Vector map <%s> is 3D" % input)
    else:
        grass.fatal("Vector map <%s> is not 3D" % input)

    # allocation of the output buffer using the values of the current region
    window = pointer(Cell_head())
    G_get_window(window)
    nrows = window.contents.rows
    ncols = window.contents.cols
    xref = window.contents.west
    yref = window.contents.south
    xres = window.contents.ew_res
    yres = window.contents.ns_res

    outrast = []
    for i in range(nrows):
        outrast[i:] = [Rast_allocate_d_buf()]

    # create new raster
    outfd = Rast_open_new(output, DCELL_TYPE)
    if outfd < 0:
        grass.fatal("Impossible to create a raster <%s>" % output)

    # insert null values in cells
    grass.message(_("Step 1/4: Inserting null values in cells..."))
    for i in range(nrows):
        Rast_set_d_null_value(outrast[i], ncols)
        G_percent(i, nrows, 2)

    #####  main work #####
    grass.message(_("Step 2/4: TIN preprocessing..."))
    z = c_double()
    G_percent(0, nrows, 2)
    Vect_tin_get_z(map_info, xref, yref, byref(z), None, None)

    grass.message(_("Step 3/4: Converting TIN to raster..."))
    for i in range(nrows):
        for j in range(ncols):
            x = xref + j * xres
            y = yref + i * yres
            Vect_tin_get_z(map_info, x, y, byref(z), None, None)
            outrast[i][j] = z
        G_percent(i, nrows, 2)

    grass.message(_("Step 4/4: Writing raster map..."))

    for i in range(nrows - 1, -1, -1):
        Rast_put_d_row(outfd, outrast[i])
        G_percent(nrows - i, nrows, 2)

    # clear buffer
    for i in range(nrows):
        G_free(outrast[i])

    # close raster
    Rast_close(outfd)

    # close vector
    Vect_close(map_info)

    # cut output raster to TIN vertical range
    vtop = grass.read_command('v.info', flags='g',
                              map=input).rsplit()[4].split('=')[1]
    vbottom = grass.read_command('v.info', flags='g',
                                 map=input).rsplit()[5].split('=')[1]

    tmp = "v_tin_to_rast_%d" % os.getpid()
    grass.mapcalc(
        "$tmp = if($vbottom < $output && $output < $vtop, $output, null())",
        tmp=tmp,
        output=output,
        vbottom=vbottom,
        vtop=vtop,
        quiet=True,
        stderr=nuldev)

    grass.parse_command('g.rename',
                        rast=(tmp, output),
                        quiet=True,
                        stderr=nuldev)

    # write cmd history:
    grass.run_command('r.support',
                      map=output,
                      title="%s" % output,
                      history="",
                      description="generated by v.tin.to.rast")
    grass.raster_history(output)

    grass.message(_("Done."))
예제 #47
0
def main():
    stats = grass.read_command('r.stats',
                               input=options['map'],
                               fs='space',
                               nv='*',
                               nsteps='255',
                               flags='inc').split('\n')[:-1]

    # res = cellsize
    res = float(
        grass.read_command('g.region', rast=options['map'],
                           flags='m').strip().split('\n')[4].split('=')[1])
    zn = np.zeros((len(stats), 6), float)
    kl = np.zeros((len(stats), 2), float)
    prc = np.zeros((9, 2), float)

    for i in range(len(stats)):
        if i == 0:
            zn[i, 0], zn[i, 1] = map(float, stats[i].split(' '))
            zn[i, 2] = zn[i, 1]
        else:
            zn[i, 0], zn[i, 1] = map(float, stats[i].split(' '))
            zn[i, 2] = zn[i, 1] + zn[i - 1, 2]

    totcell = sum(zn[:, 1])
    print "Tot. cells", totcell

    for i in range(len(stats)):
        zn[i, 3] = 1 - (zn[i, 2] / sum(zn[:, 1]))
        zn[i, 4] = zn[i, 3] * (((res**2) / 1000000) * sum(zn[:, 1]))
        zn[i,
           5] = ((zn[i, 0] - min(zn[:, 0])) / (max(zn[:, 0]) - min(zn[:, 0])))
        kl[i, 0] = zn[i, 0]
        kl[i, 1] = 1 - (zn[i, 2] / totcell)

    # quantiles
    prc[0, 0], prc[0, 1] = findint(kl, 0.025), 0.025
    prc[1, 0], prc[1, 1] = findint(kl, 0.05), 0.05
    prc[2, 0], prc[2, 1] = findint(kl, 0.1), 0.1
    prc[3, 0], prc[3, 1] = findint(kl, 0.25), 0.25
    prc[4, 0], prc[4, 1] = findint(kl, 0.5), 0.5
    prc[5, 0], prc[5, 1] = findint(kl, 0.75), 0.75
    prc[6, 0], prc[6, 1] = findint(kl, 0.9), 0.9
    prc[7, 0], prc[7, 1] = findint(kl, 0.95), 0.95
    prc[8, 0], prc[8, 1] = findint(kl, 0.975), 0.975

    # Managing flag & plot
    if flags['a']:
        plotImage(zn[:, 3], zn[:, 5], options['image'] + '_Ipsometric.png',
                  '-', 'A(i) / A', 'Z(i) / Zmax', 'Ipsometric Curve')
    if flags['b']:
        plotImage(zn[:, 4], zn[:, 0], options['image'] + '_Ipsographic.png',
                  '-', 'A [km^2]', 'Z [m.slm]', 'Ipsographic Curve')

    print "==========================="
    print "Ipsometric | quantiles"
    print "==========================="
    print '%.0f' % findint(kl, 0.025), "|", 0.025
    print '%.0f' % findint(kl, 0.05), "|", 0.05
    print '%.0f' % findint(kl, 0.1), "|", 0.1
    print '%.0f' % findint(kl, 0.25), "|", 0.25
    print '%.0f' % findint(kl, 0.5), "|", 0.5
    print '%.0f' % findint(kl, 0.75), "|", 0.75
    print '%.0f' % findint(kl, 0.7), "|", 0.7
    print '%.0f' % findint(kl, 0.9), "|", 0.9
    print '%.0f' % findint(kl, 0.975), "|", 0.975
    print '\n'
    print 'Done!'
def run_road(real_elev, scanned_elev, eventHandler, env, **kwargs):
    env2 = get_environment(raster=real_elev)
    before = 'scan_saved'
    analyses.change_detection(before=before,
                              after=scanned_elev,
                              change='change',
                              height_threshold=[12, 80],
                              cells_threshold=[3, 70],
                              add=True,
                              max_detected=1,
                              debug=True,
                              env=env)
    point = gscript.read_command('v.out.ascii',
                                 input='change',
                                 type='point',
                                 format='point',
                                 env=env).strip()

    conn = 'transfer_connection'
    drain = 'transfer_drain'
    resulting = "transfer_slopedir"
    if point:
        x, y, cat = point.split('|')
        gscript.run_command('r.drain',
                            input='transfer_cost',
                            direction='transfer_costdir',
                            output=conn,
                            start_points='change',
                            drain=conn,
                            flags='d',
                            env=env2)

        gscript.run_command('v.to.rast',
                            input=conn,
                            type='line',
                            output=conn + '_dir',
                            use='dir',
                            env=env2)
        gscript.mapcalc(
            "slope_dir = abs(atan(tan({slope}) * cos({aspect} - {line_dir})))".
            format(slope='transfer_slope',
                   aspect='transfer_aspect',
                   line_dir=conn + '_dir'),
            env=env2)
        # set new color table
        colors = [
            '0 green', '5 green', '5 yellow', '12 yellow', '12 red', '90 red'
        ]
        gscript.write_command('r.colors',
                              map='slope_dir',
                              rules='-',
                              stdin='\n'.join(colors),
                              env=env2)
        # increase thickness

        gscript.run_command('r.grow',
                            input='slope_dir',
                            radius=1.8,
                            output=resulting,
                            env=env2)

        # drain
        gscript.run_command('r.drain',
                            input=real_elev,
                            output=drain,
                            start_points='change',
                            drain=drain,
                            env=env2)

        gscript.run_command('r.viewshed',
                            input=real_elev,
                            output='transfer_viewshed',
                            observer_elevation=67,
                            coordinates=[x, y],
                            flags='b',
                            env=env2)
        gscript.write_command('r.colors',
                              map='transfer_viewshed',
                              rules='-',
                              stdin='0 black',
                              env=env2)

        env3 = get_environment(raster='transfer_road')
        gscript.mapcalc(
            'visible_road = if(transfer_viewshed == 1 && ! isnull(transfer_road), 1, null())',
            env=env3)
        #road_full = float(gscript.parse_command('r.univar', map='transfer_road', flags='g', env=env3)['n'])
        road_full = 500  # number of road cells
        try:
            road_v = float(
                gscript.parse_command('r.univar',
                                      map='visible_road',
                                      flags='g',
                                      env=env3)['n'])
        except KeyError:
            road_v = 0
        event = updateDisplay(value=int(100 * road_v / road_full))

        with VectorTopo(conn, mode='r') as v:
            try:
                line = v.read(1)
                event2 = updateProfile(
                    points=[(line[0].x, line[0].y), (line[-1].x, line[-1].y)])
            except IndexError:
                event2 = updateProfile(points=[])
    else:
        gscript.run_command('v.edit', map=conn, tool='create', env=env)
        gscript.run_command('v.edit', map=drain, tool='create', env=env)
        gscript.mapcalc('{} = null()'.format(resulting), env=env)
        gscript.mapcalc('{} = null()'.format('transfer_viewshed'), env=env)
        event = updateDisplay(value=None)
        event2 = updateProfile(points=[])

    # update viewshed score
    eventHandler.postEvent(receiver=eventHandler.activities_panel, event=event)
    eventHandler.postEvent(receiver=eventHandler.activities_panel,
                           event=event2)

    # copy results
    if point:
        postfix = datetime.now().strftime('%H_%M_%S')
        prefix = 'transfer1'
        gscript.run_command(
            'g.copy',
            vector=['change', '{}_change_{}'.format(prefix, postfix)],
            raster=[
                'visible_road', '{}_visible_road_{}'.format(prefix, postfix)
            ],
            env=env)
        gscript.run_command(
            'g.copy',
            raster=['slope_dir', '{}_slope_dir_{}'.format(prefix, postfix)],
            env=env)
예제 #49
0
def main():
    global temp_dist, temp_src

    input = options['input']
    output = options['output']
    distances = options['distances']
    units = options['units']
    zero = flags['z']

    tmp = str(os.getpid())
    temp_dist = "r.buffer.tmp.%s.dist" % tmp
    temp_src = "r.buffer.tmp.%s.src" % tmp

    # check if input file exists
    if not grass.find_file(input)['file']:
        grass.fatal(_("Raster map <%s> not found") % input)

    scale = scales[units]

    distances = distances.split(',')
    distances1 = [scale * float(d) for d in distances]
    distances2 = [d * d for d in distances1]

    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)
    if kv['+proj'] == 'longlat':
        metric = 'geodesic'
    else:
        metric = 'squared'

    grass.run_command('r.grow.distance',
                      input=input,
                      metric=metric,
                      distance=temp_dist,
                      flags='m')

    if zero:
        exp = "$temp_src = if($input == 0,null(),1)"
    else:
        exp = "$temp_src = if(isnull($input),null(),1)"

    grass.message(_("Extracting buffers (1/2)..."))
    grass.mapcalc(exp, temp_src=temp_src, input=input)

    exp = "$output = if(!isnull($input),$input,%s)"
    if metric == 'squared':
        for n, dist2 in enumerate(distances2):
            exp %= "if($dist <= %f,%d,%%s)" % (dist2, n + 2)
    else:
        for n, dist2 in enumerate(distances1):
            exp %= "if($dist <= %f,%d,%%s)" % (dist2, n + 2)
    exp %= "null()"

    grass.message(_("Extracting buffers (2/2)..."))
    grass.mapcalc(exp, output=output, input=temp_src, dist=temp_dist)

    p = grass.feed_command('r.category', map=output, separator=':', rules='-')
    msg = "1:distances calculated from these locations\n"
    p.stdin.write(encode(msg))
    d0 = "0"
    for n, d in enumerate(distances):
        msg = "%d:%s-%s %s\n" % (n + 2, d0, d, units)
        p.stdin.write(encode(msg))
        d0 = d
    p.stdin.close()
    p.wait()

    grass.run_command('r.colors', map=output, color='rainbow')

    # write cmd history:
    grass.raster_history(output)
예제 #50
0
gisrc += 'GUI: text'

grass_gisrc = open('/tmp/gisrc', 'w')
grass_gisrc.write(gisrc)
grass_gisrc.close()
os.environ['GISRC'] = '/tmp/gisrc'

os.environ[
    'PATH'] = '/usr/sbin:/bin:/usr/bin:%s/bin:%s/scripts:/home/epi/.grass7/addons/bin:/home/epi/.grass7/addons/scripts:/usr/local/opt/gdal2/bin/:/Users/epi/.grass7/addons/bin:$PATH' % (
        GISBASE, GISBASE)

import grass.script as grass
from g2g import Grass2img

outputmaps = grass.read_command('g.list',
                                type='raster',
                                pattern='clustery_*',
                                exclude='*rej2').decode().strip().split('\n')
all = Grass2img(outputmaps, tmpdir='clustery').makeimg(html=True)

template = ''
for i in list(all.keys()):
    image = all[i]['raster']
    clat, clon = all[i]['C']
    ll_lat, ll_lon = all[i]['LL']
    ur_lat, ur_lon = all[i]['UR']
    tpl = """
    %s_imageBounds = [[%s, %s], [%s, %s]];
    var %s = L.imageOverlay('../%s', %s_imageBounds);
    """ % (i, ll_lat, ll_lon, ur_lat, ur_lon, i, image, i)
    template += tpl
def run_patches(real_elev, scanned_elev, scanned_color, blender_path, eventHandler, env, **kwargs):
    topo = 'topo_saved'

    # detect patches
    patches = 'patches'
    analyses.classify_colors(new=patches, group=scanned_color, compactness=2,
                             threshold=0.3, minsize=10, useSuperPixels=True, env=env)
    gscript.run_command('r.to.vect', flags='svt', input=patches, output=patches, type='area', env=env)
    
    base_cat = [3, 6, 7]
    #
    # r.liPATH = '/run/user/1000/gvfs/smb-share:server=192.168.0.2,share=coupling/Watch/'
    #
    indices = 'index_'
    # write config file if it doesn't exist
    rlipath = os.path.join(expanduser("~"), ".grass7", "r.li")
    if not os.path.exists(rlipath):
        os.makedirs(rlipath)
    configpath = os.path.join(rlipath, "patches")
    outputpath = os.path.join(rlipath, "output")    

    if not os.path.exists(configpath):
        with open(configpath, 'w') as f:
            f.write('SAMPLINGFRAME 0|0|1|1\n')
            f.write('SAMPLEAREA 0.0|0.0|1|1')
    
    results = {}
    results_list = []
    # TODO: scaling values
    gscript.mapcalc('{p2} = if({p} != {cl1} && {p} != {cl2} && {p} != {cl3}, int({p}), null())'.format(p2=patches + '2', p=patches,
                    cl1=base_cat[0], cl2=base_cat[1], cl3=base_cat[2]), env=env)
    rliindices = ['patchnum', 'richness', 'mps', 'shannon', 'shape']
    for index in rliindices:
        gscript.run_command('r.li.' + index, input=patches + '2', output=indices + index, config=configpath, env=env)
        with open(os.path.join(outputpath, indices + index), 'r') as f:
            r = f.readlines()[0].strip().split('|')[-1]
            if index == 'patchnum' and float(r) == 0:
                results_list = [0] * len(rliindices)
                break

            results[index] = float(r)
            if index == 'mps':
                results[index] *= 10
            results_list.append(results[index])
    
    # remediation
    gscript.run_command('r.grow', flags='m', input='waterall', output='waterallg', radius=30, new=1, env=env)
    
    gscript.mapcalc('{new} = if({w} && {p} == 5, 1, null())'.format(new='remed', w='waterallg', p=patches + '2'), env=env)
    univar = gscript.parse_command('r.univar', map='remed', flags='g', env=env)
    remed_size = waterall = 0
    if univar and 'n' in univar:
        remed_size = float(univar['n'])
    univar = gscript.parse_command('r.univar', map='waterall', flags='g', env=env)
    if univar and 'n' in univar:
        waterall = int(univar['n'])
        
    perc = 0
    if waterall:
        perc = 100* remed_size/waterall
    results_list.insert(0, perc)
    
    # update dashboard
    event = updateDisplay(value=results_list)
    eventHandler.postEvent(receiver=eventHandler.activities_panel, event=event)        
    
    # export patches
    gscript.mapcalc('scanned_scan_int = int({})'.format(topo), env=env)
    
    # set color for patches
    gscript.run_command('r.to.vect', flags='svt', input=patches + '2', output=patches + '2', type='area', env=env)
    gscript.run_command('v.generalize', input=patches + '2', type='area', output=patches + '2gen', method='snakes', threshold=100, env=env)
    
    # creates color table as temporary file
    # this is the training map from where the colors are taken
    training = 'training_areas'
    color_path = '/tmp/patch_colors.txt'
    if not os.path.exists(color_path):
        color_table = gscript.read_command('r.colors.out', map=training, env=env).strip()
        with open(color_path, 'w') as f:
            for line in color_table.splitlines():
                if line.startswith('nv') or line.startswith('default'):
                    continue
                elif int(line.split(' ')[0]) in base_cat:
                    continue
                else:
                    f.write(line)
                    f.write('\n')
    try:
        gscript.run_command('v.colors', map=patches + '2gen', rules=color_path, env=env)
    except CalledModuleError:
        return

    try:
        gscript.run_command('r.mask', flags='r', env=env)
    except:
        pass
    cats = gscript.read_command('r.describe', flags='1ni', map=patches, env=env).strip()
    cats = [int(cat) for cat in cats.splitlines()]
    toexport = []   
    for cat in cats:
        if cat in base_cat:
            continue
        gscript.mapcalc('patch_' + trees[cat] + ' = if(isnull({p}), 0, if({p} == {c}, {c}, 0))'.format(p=patches, c=cat), env=env)
        gscript.run_command('r.colors', map='patch_' + trees[cat], color='grey', flags='n', env=env)
        toexport.append('patch_' + trees[cat])
    blender_send_file('empty.txt', path=blender_path)

    for png in toexport:
        blender_export_PNG(png, name=png, time_suffix=True, path=blender_path, env=env)
예제 #52
0
def main():

    pan = options["pan"]
    msxlst = options["msx"].split(",")
    outputsuffix = options["suffix"]
    custom_ratio = options["ratio"]
    center = options["center"]
    center2 = options["center2"]
    modulation = options["modulation"]
    modulation2 = options["modulation2"]

    if options["trim"]:
        trimming_factor = float(options["trim"])
    else:
        trimming_factor = False

    histogram_match = flags["l"]
    second_pass = flags["2"]
    color_match = flags["c"]

    #    # Check & warn user about "ns == ew" resolution of current region ======
    #    region = grass.region()
    #    nsr = region['nsres']
    #    ewr = region['ewres']
    #
    #    if nsr != ewr:
    #        msg = ('>>> Region's North:South ({ns}) and East:West ({ew}) '
    #               'resolutions do not match!')
    #        msg = msg.format(ns=nsr, ew=ewr)
    #        grass.message(msg, flag='w')

    mapset = grass.gisenv()["MAPSET"]  # Current Mapset?
    region = grass.region()  # and region settings

    # List images and their properties

    # pygrass.raster.abstract.Info can not cope with
    # Info(name@mapset, mapset)
    # -> fully qualified names and input images from other mapsets are
    # not supported
    # -> use r.info via raster_info

    imglst = [pan]
    imglst.extend(msxlst)  # List of input imagery

    images = {}
    for img in imglst:  # Retrieving Image Info
        # images[img] = Info(img, mapset)
        # images[img].read()
        try:
            images[img] = grass.raster_info(img)
        except:
            grass.fatal(_("msx input not found"))

    panres = images[pan]["nsres"]  # Panchromatic resolution

    grass.use_temp_region()  # to safely modify the region
    if flags["a"]:
        run("g.region", align=pan)  # Respect extent, change resolution
    else:
        run("g.region", res=panres)  # Respect extent, change resolution
        grass.message(
            "|! Region's resolution matched to Pan's ({p})".format(p=panres))

    # Loop Algorithm over Multi-Spectral images

    for msx in msxlst:
        grass.message("\nProcessing image: {m}".format(m=msx))

        # Tracking command history -- Why don't do this all r.* modules?
        cmd_history = []

        #
        # 1. Compute Ratio
        #

        grass.message("\n|1 Determining ratio of low to high resolution")

        # Custom Ratio? Skip standard computation method.
        if custom_ratio:
            ratio = float(custom_ratio)
            grass.warning("Using custom ratio, overriding standard method!")

        # Multi-Spectral resolution(s), multiple
        else:
            # Image resolutions
            grass.message("   > Retrieving image resolutions")

            msxres = images[msx]["nsres"]

            # check
            if panres == msxres:
                msg = ("The Panchromatic's image resolution ({pr}) "
                       "equals to the Multi-Spectral's one ({mr}). "
                       "Something is probably not right! "
                       "Please check your input images.")
                msg = msg.format(pr=panres, mr=msxres)
                grass.fatal(_(msg))

            # compute ratio
            ratio = msxres / panres
            msg_ratio = ("   >> Resolution ratio "
                         "low ({m:.{dec}f}) to high ({p:.{dec}f}): {r:.1f}")
            msg_ratio = msg_ratio.format(m=msxres, p=panres, r=ratio, dec=3)
            grass.message(msg_ratio)

        # 2nd Pass requested, yet Ratio < 5.5
        if second_pass and ratio < 5.5:
            grass.message(
                "   >>> Resolution ratio < 5.5, skipping 2nd pass.\n"
                "   >>> If you insist, force it via the <ratio> option!",
                flag="i",
            )
            second_pass = bool(0)

        #
        # 2. High Pass Filtering
        #

        grass.message("\n|2 High Pass Filtering the Panchromatic Image")

        tmpfile = grass.tempfile()  # Temporary file - replace with os.getpid?
        tmp = "tmp." + grass.basename(tmpfile)  # use its basename
        tmp_pan_hpf = "{tmp}_pan_hpf".format(tmp=tmp)  # HPF image
        tmp_msx_blnr = "{tmp}_msx_blnr".format(tmp=tmp)  # Upsampled MSx
        tmp_msx_hpf = "{tmp}_msx_hpf".format(tmp=tmp)  # Fused image
        tmp_msx_mapcalc = tmp_msx_hpf + "_mapcalc"
        tmp_hpf_matrix = grass.tempfile()  # ASCII filter

        # Construct and apply Filter
        hpf = get_high_pass_filter(ratio, center)
        hpf_ascii(center, hpf, tmp_hpf_matrix, second_pass)
        run(
            "r.mfilter",
            input=pan,
            filter=tmp_hpf_matrix,
            output=tmp_pan_hpf,
            title="High Pass Filtered Panchromatic image",
            overwrite=True,
        )

        # 2nd pass
        if second_pass and ratio > 5.5:
            # Temporary files
            # 2nd Pass HPF image
            tmp_pan_hpf_2 = "{tmp}_pan_hpf_2".format(tmp=tmp)
            # 2nd Pass ASCII filter
            tmp_hpf_matrix_2 = grass.tempfile()
            # Construct and apply 2nd Filter
            hpf_2 = get_high_pass_filter(ratio, center2)
            hpf_ascii(center2, hpf_2, tmp_hpf_matrix_2, second_pass)
            run(
                "r.mfilter",
                input=pan,
                filter=tmp_hpf_matrix_2,
                output=tmp_pan_hpf_2,
                title="2-High-Pass Filtered Panchromatic Image",
                overwrite=True,
            )

        #
        # 3. Upsampling low resolution image
        #

        grass.message("\n|3 Upsampling (bilinearly) low resolution image")

        run(
            "r.resamp.interp",
            method="bilinear",
            input=msx,
            output=tmp_msx_blnr,
            overwrite=True,
        )

        #
        # 4. Weighting the High Pass Filtered image(s)
        #

        grass.message("\n|4 Weighting the High-Pass-Filtered image (HPFi)")

        # Compute (1st Pass) Weighting
        msg_w = "   > Weighting = StdDev(MSx) / StdDev(HPFi) * " "Modulating Factor"
        grass.message(msg_w)

        # StdDev of Multi-Spectral Image(s)
        msx_avg = avg(msx)
        msx_sd = stddev(msx)
        grass.message("   >> StdDev of <{m}>: {sd:.3f}".format(m=msx,
                                                               sd=msx_sd))

        # StdDev of HPF Image
        hpf_sd = stddev(tmp_pan_hpf)
        grass.message("   >> StdDev of HPFi: {sd:.3f}".format(sd=hpf_sd))

        # Modulating factor
        modulator = get_modulator_factor(modulation, ratio)
        grass.message("   >> Modulating Factor: {m:.2f}".format(m=modulator))

        # weighting HPFi
        weighting = hpf_weight(msx_sd, hpf_sd, modulator, 1)

        #
        # 5. Adding weighted HPF image to upsampled Multi-Spectral band
        #

        grass.message("\n|5 Adding weighted HPFi to upsampled image")
        fusion = "{hpf} = {msx} + {pan} * {wgt}"
        fusion = fusion.format(hpf=tmp_msx_hpf,
                               msx=tmp_msx_blnr,
                               pan=tmp_pan_hpf,
                               wgt=weighting)
        grass.mapcalc(fusion)

        # command history
        hst = "Weigthing applied: {msd:.3f} / {hsd:.3f} * {mod:.3f}"
        cmd_history.append(hst.format(msd=msx_sd, hsd=hpf_sd, mod=modulator))

        if second_pass and ratio > 5.5:

            #
            # 4+ 2nd Pass Weighting the High Pass Filtered image
            #

            grass.message("\n|4+ 2nd Pass Weighting the HPFi")

            # StdDev of HPF Image #2
            hpf_2_sd = stddev(tmp_pan_hpf_2)
            grass.message(
                "   >> StdDev of 2nd HPFi: {h:.3f}".format(h=hpf_2_sd))

            # Modulating factor #2
            modulator_2 = get_modulator_factor2(modulation2)
            msg = "   >> 2nd Pass Modulating Factor: {m:.2f}"
            grass.message(msg.format(m=modulator_2))

            # 2nd Pass weighting
            weighting_2 = hpf_weight(msx_sd, hpf_2_sd, modulator_2, 2)

            #
            # 5+ Adding weighted HPF image to upsampled Multi-Spectral band
            #

            grass.message("\n|5+ Adding small-kernel-based weighted "
                          "2nd HPFi back to fused image")

            add_back = "{final} = {msx_hpf} + {pan_hpf} * {wgt}"
            # r.mapcalc: do not use input as output
            add_back = add_back.format(
                final=tmp_msx_mapcalc,
                msx_hpf=tmp_msx_hpf,
                pan_hpf=tmp_pan_hpf_2,
                wgt=weighting_2,
            )
            grass.mapcalc(add_back)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # 2nd Pass history entry
            hst = "2nd Pass Weighting: {m:.3f} / {h:.3f} * {mod:.3f}"
            cmd_history.append(
                hst.format(m=msx_sd, h=hpf_2_sd, mod=modulator_2))

        #
        # 6. Stretching linearly the HPF-Sharpened image(s) to match the Mean
        #     and Standard Deviation of the input Multi-Sectral image(s)
        #

        if histogram_match:

            # adapt output StdDev and Mean to the input(ted) ones
            # technically, this is not histogram matching but
            # normalizing to the input's mean + stddev
            grass.message("\n|+ Matching histogram of Pansharpened image "
                          "to %s" % (msx))

            # Collect stats for linear histogram matching
            msx_hpf_avg = avg(tmp_msx_hpf)
            msx_hpf_sd = stddev(tmp_msx_hpf)

            msx_info = images[msx]
            outfn = "round"
            if msx_info["datatype"] == "FCELL":
                outfn = "float"
            elif msx_info["datatype"] == "DCELL":
                outfn = "double"

            # expression for mapcalc
            lhm = ("{out} = {outfn}(double({hpf} - {hpfavg}) / {hpfsd} * "
                   "{msxsd} + {msxavg})")
            # r.mapcalc: do not use input as output
            lhm = lhm.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                hpfavg=msx_hpf_avg,
                hpfsd=msx_hpf_sd,
                msxsd=msx_sd,
                msxavg=msx_avg,
            )

            # compute
            grass.mapcalc(lhm, quiet=True, overwrite=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = ("{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, "
                       "if({hpf} > {oldmax}, {oldmax}, {hpf})))")
            snapout = snapout.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                oldmin=msx_info["min"],
                oldmax=msx_info["max"],
            )

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Histogram Matching: %s" % lhm)
        else:
            # scale result to input using quantiles
            grass.message("\n|+ Quantile scaling of Pansharpened image "
                          "to %s" % (msx))

            msx_info = images[msx]
            outfn = "round"
            if msx_info["datatype"] == "FCELL":
                outfn = "float"
            elif msx_info["datatype"] == "DCELL":
                outfn = "double"

            # quantile scaling
            percentiles = "10,50,90"
            allq = grass.read_command("r.quantile",
                                      input=msx,
                                      percentiles=percentiles,
                                      quiet=True)
            allq = allq.splitlines()
            msx_plo = float(allq[0].split(":")[2])
            msx_med = float(allq[1].split(":")[2])
            msx_phi = float(allq[2].split(":")[2])

            allq = grass.read_command("r.quantile",
                                      input=tmp_msx_hpf,
                                      percentiles=percentiles,
                                      quiet=True)
            allq = allq.splitlines()
            hpf_plo = float(allq[0].split(":")[2])
            hpf_med = float(allq[1].split(":")[2])
            hpf_phi = float(allq[2].split(":")[2])

            # scale factors
            if msx_med != msx_plo and hpf_med != hpf_plo:
                sfplo = (msx_med - msx_plo) / (hpf_med - hpf_plo)
            else:
                # avoid zero and division by zero
                sfplo = 1
            if msx_phi != msx_med and hpf_phi != hpf_med:
                sfphi = (msx_phi - msx_med) / (hpf_phi - hpf_med)
            else:
                # avoid zero and division by zero
                sfphi = 1

            scale = ("{out} = {outfn}(double({hpf} - {hpf_med}) * "
                     "if({hpf} < {hpf_med}, {sfplo}, "
                     "{sfphi}) + {msx_med})")
            scale = scale.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                hpf_med=hpf_med,
                sfplo=sfplo,
                sfphi=sfphi,
                msx_med=msx_med,
            )
            grass.mapcalc(scale, quiet=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # snap outliers to input range
            snapout = ("{out} = {outfn}(if({hpf} < {oldmin}, {oldmin}, "
                       "if({hpf} > {oldmax}, {oldmax}, {hpf})))")
            snapout = snapout.format(
                out=tmp_msx_mapcalc,
                outfn=outfn,
                hpf=tmp_msx_hpf,
                oldmin=msx_info["min"],
                oldmax=msx_info["max"],
            )

            grass.mapcalc(snapout, quiet=True, overwrite=True)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

            # update history string
            cmd_history.append("Linear Scaling: %s" % scale)

        if color_match:
            grass.message("\n|* Matching output to input color table")
            run("r.colors", map=tmp_msx_hpf, raster=msx)

        #
        # Optional. Trim to remove black border effect (rectangular only)
        #

        if trimming_factor:

            tf = trimming_factor

            # communicate
            msg = "\n|* Trimming output image border pixels by "
            msg += "{factor} times the low resolution\n".format(factor=tf)
            nsew = "   > Input extent: n: {n}, s: {s}, e: {e}, w: {w}"
            nsew = nsew.format(n=region["n"],
                               s=region["s"],
                               e=region["e"],
                               w=region["w"])
            msg += nsew

            grass.message(msg)

            # re-set borders
            region.n -= tf * images[msx]["nsres"]
            region.s += tf * images[msx]["nsres"]
            region.e -= tf * images[msx]["ewres"]
            region.w += tf * images[msx]["ewres"]

            # communicate and act
            msg = "   > Output extent: n: {n}, s: {s}, e: {e}, w: {w}"
            msg = msg.format(n=region["n"],
                             s=region["s"],
                             e=region["e"],
                             w=region["w"])
            grass.message(msg)

            # modify only the extent
            run("g.region",
                n=region["n"],
                s=region["s"],
                e=region["e"],
                w=region["w"])
            # r.mapcalc: do not use input as output
            trim = "{out} = {input}".format(out=tmp_msx_mapcalc,
                                            input=tmp_msx_hpf)
            grass.mapcalc(trim)
            run("g.remove", flags="f", type="raster", name=tmp_msx_hpf)
            run("g.rename", raster=(tmp_msx_mapcalc, tmp_msx_hpf))

        #
        # End of Algorithm

        # history entry
        run("r.support", map=tmp_msx_hpf, history="\n".join(cmd_history))

        # add suffix to basename & rename end product
        msx_name = "{base}{suffix}"
        msx_name = msx_name.format(base=msx.split("@")[0], suffix=outputsuffix)
        run("g.rename", raster=(tmp_msx_hpf, msx_name))

        # remove temporary files
        cleanup()

    # visualising-related information
    grass.del_temp_region()  # restoring previous region settings
    grass.message("\n|! Original Region restored")
    grass.message(
        "\n>>> Hint, rebalancing colors (via i.colors.enhance) "
        "may improve appearance of RGB composites!",
        flag="i",
    )
예제 #53
0
def main():
    global TMPLOC, GISDBASE
    global orgenv, switchloc

    partner_regions = options['partner_regions']
    partner_regions_layer = options['partner_regions_layer']
    partner_id_column = options['partner_id']
    grid_points = options['grid_points']
    grid_points_layer = options['grid_points_layer']
    all_partner_id_column = options['all_partner_id']

    basins = options['basins']
    basins_layer = options['basins_layer']

    output = options['output']
    output_layer = options['output_layer']
    output_format = options['format']

    orgenv = gscript.gisenv()
    GISDBASE = orgenv['GISDBASE']
    TMPLOC = 'ECMWF_temp_location_' + str(os.getpid())

    # import grid points with v.in.ogr into new location
    kwargs = dict()
    if grid_points_layer:
        kwargs['layer'] = grid_points_layer
    gscript.run_command('v.in.ogr',
                        input=grid_points,
                        output="grid_points",
                        location=TMPLOC,
                        **kwargs)
    del kwargs

    # switch to new location
    gscript.run_command('g.mapset', location=TMPLOC, mapset="PERMANENT")
    switchloc = True

    # check if we have an attribute table
    dbinfo = gscript.vector_db("grid_points")
    if 1 not in dbinfo.keys():
        # add new table
        gscript.run_command('v.db.addtable', map="grid_points")
        dbinfo = gscript.vector_db("grid_points")

    # check if the column all_partner_id_column exists
    columns = gscript.read_command('v.info', map="grid_points", flags="c")

    found = False
    for line in columns.splitlines():
        colname = line.split("|", 1)[1]
        if colname == all_partner_id_column:
            found = True

    if found is False:
        # add column
        gscript.run_command('v.db.addcolumn',
                            map="grid_points",
                            column="%s varchar(255)" % (all_partner_id_column))
    else:
        # clear column entries
        table = dbinfo[1]['table']
        database = dbinfo[1]['database']
        driver = dbinfo[1]['driver']
        sqlcmd = "UPDATE %s SET %s=NULL" % (table, all_partner_id_column)
        gscript.write_command('db.execute',
                              input='-',
                              database=database,
                              driver=driver,
                              stdin=sqlcmd)

    # import all partner polygons with v.import
    # need to snap, assume units are meters !!!

    kwargs = dict()
    if partner_regions_layer:
        kwargs['layer'] = partner_regions_layer
    gscript.run_command('v.import',
                        input=partner_regions,
                        output="partner_regions_1",
                        snap="0.01",
                        **kwargs)
    del kwargs

    # the column partner_id_column must exist
    columns = gscript.read_command('v.info',
                                   map="partner_regions_1",
                                   flags="c")

    found = False
    for line in columns.splitlines():
        colname = line.split("|", 1)[1]
        if colname == partner_id_column:
            found = True

    if found is False:
        gscript.fatal("Column <%s> not found in input <%s>" %
                      (partner_id_column, partner_regions))

    # clean partner regions
    # clean up overlapping parts and gaps smaller mingapsize
    mingapsize = 10000000
    gscript.run_command('v.clean',
                        input="partner_regions_1",
                        output="partner_regions_2",
                        tool="rmarea",
                        thresh=mingapsize,
                        flags="c")

    # import river basins with v.import
    # need to snap, assume units are meters !!!

    kwargs = dict()
    if basins_layer:
        kwargs['layer'] = basins_layer
    gscript.run_command('v.import',
                        input=basins,
                        output="basins",
                        snap="10",
                        **kwargs)
    del kwargs

    # add new column basin_cat to gird_points
    gscript.run_command('v.db.addcolumn',
                        map="grid_points",
                        column="basin_cat integer")

    # upload basin cat to grid points
    gscript.run_command('v.what.vect',
                        map="grid_points",
                        column="basin_cat",
                        query_map="basins",
                        query_column="cat")

    # combine basins and partner regions with v.overlay with snap=0.01
    gscript.run_command('v.overlay',
                        ainput="basins",
                        atype="area",
                        binput="partner_regions_2",
                        btype="area",
                        operator="and",
                        output="basins_partners",
                        olayer="1,0,0",
                        snap="0.01")

    # select all basin cats from grid points
    basincats = gscript.read_command('v.db.select',
                                     map="grid_points",
                                     column="basin_cat",
                                     where="basin_cat is not null",
                                     flags="c")

    basincatsint = [int(c) for c in basincats.splitlines()]
    basincatsint = sorted(set(basincatsint))

    # loop over basin cats
    gscript.message(
        _("Updating grid points with partner region IDs for %d basins, this can take some time.."
          ) % (len(basincatsint)))
    for bcat in basincatsint:

        # for each basin cat, select all partner ids from the overlay
        pcats = gscript.read_command('v.db.select',
                                     map="basins_partners",
                                     column="b_%s" % (partner_id_column),
                                     where="a_cat = %d" % (bcat),
                                     flags="c")

        # create comma-separated list and upload to grid points,
        # column all_partner_id_column
        if len(pcats) > 0:
            pcatlist = []
            for c in pcats.splitlines():
                # the MOU_IDS column can already contain a comma-separated list of IDs
                for cc in c.split(','):
                    pcatlist.append(int(cc))

            pcatlist = sorted(set(pcatlist))
            pcatstring = ','.join(str(c) for c in pcatlist)
            gscript.run_command('v.db.update',
                                map="grid_points",
                                column=all_partner_id_column,
                                value=pcatstring,
                                where="basin_cat = %d" % (bcat),
                                quiet=True)

    # export updated grid points
    kwargs = dict()
    if output_layer:
        kwargs['output_layer'] = output_layer
    gscript.run_command('v.out.ogr',
                        input="grid_points",
                        output=output,
                        type="point",
                        format=output_format,
                        flags="s",
                        **kwargs)
    del kwargs

    return 0
예제 #54
0
def main():
    global tmp_rmaps

    # user keys
    in_raster = options['input']  # in_raster = 'srtm_1sec_amazonia'
    out_raster = options['output']  # out_raster = 'teste_dnoise'
    iterations = options['iterations']
    threshold = options['threshold']
    epsg = options['epsg']

    # check if input file exists
    if not grass.find_file(in_raster)['file']:
        grass.fatal(_("Raster map <%s> not found") % in_raster)

    # name the files
    tmp_xyz = "{}.xyz".format(grass.tempfile())
    tmp_xyz_proj = "{}.xyz".format(grass.tempfile())
    tmp_out_dnoise = "{}.xyz".format(grass.tempfile())
    tmp_xyz_merge = "{}.xyz".format(grass.tempfile())
    # list for cleanup
    tmp_rmaps = [tmp_xyz, tmp_xyz_proj, tmp_out_dnoise, tmp_xyz_merge]

    # Export the map to xyz points.
    grass.message(_("Exporting points..."))
    grass.run_command('r.out.xyz',
                      input=in_raster,
                      output=tmp_xyz,
                      separator='space',
                      overwrite=True)

    # check if current location is in a projected coordinate system
    reproject = check_proj(epsg)

    # Reproject if necessary
    if reproject:
        # define projections
        loc_proj = grass.read_command('g.proj', flags='jf')
        loc_proj = pyproj.Proj(loc_proj.strip())
        epsg_proj = pyproj.Proj(init='epsg:' + str(epsg))
        do_proj(xyz_in=tmp_xyz,
                xyz_out=tmp_xyz_proj,
                in_proj=loc_proj,
                out_proj=epsg_proj)
        tmp_xyz = tmp_xyz_proj

    # Denoise.  The -z flag preserves the xy positions of the points.
    grass.message(_("Denoising..."))
    cmd = ['mdenoise'] + ['-i'] + [tmp_xyz] + ['-t'] + [str(threshold)] + [
        '-n'
    ] + [str(iterations)] + ['-z'] + ['-o'] + [tmp_out_dnoise]
    grass.call(cmd)

    # As only the z coordinates have changed in denoising,
    # the new z coordinates are combined with the original xy coordinates.
    f_merged = open(tmp_xyz_merge, 'w')  # new, merged
    #read input coordinates file

    with open(tmp_out_dnoise) as f_dnoise, open(tmp_xyz) as f_orig:
        for line_dnoise, line_orig in zip(f_dnoise, f_orig):
            xyz_dnoise = line_dnoise.split()  # denoised
            xyz_orig = line_orig.split()  # original
            f_merged.write('%s %s %s\n' %
                           (xyz_orig[0], xyz_orig[1], xyz_dnoise[2]))

    # close files
    f_merged.close()

    # Reload data
    grass.message(_("Reloading data..."))
    grass.run_command('r.in.xyz',
                      flags='i',
                      input=tmp_xyz_merge,
                      output=out_raster,
                      method='min',
                      x=1,
                      y=2,
                      z=3,
                      separator='space',
                      overwrite=True)

    # Edit metadata to record denoising parameters
    grass.run_command('r.support',
                      map=out_raster,
                      title="A denoised version of <%s>" % in_raster)
    grass.run_command(
        'r.support',
        map=out_raster,
        history="Generated by: r.denoise %s iterations=%s threshold=%s" %
        (in_raster, str(threshold), str(iterations)))
예제 #55
0
def main():

    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    global tile, tmpdir, in_temp, currdir, tmpregionname

    in_temp = False

    url = options['url']
    username = options['username']
    password = options['password']
    local = options['local']
    output = options['output']
    memory = options['memory']
    fillnulls = flags['n']
    srtmv3 = (flags['2'] == 0)
    one = flags['1']
    dozerotile = flags['z']
    reproj_res = options['resolution']

    overwrite = grass.overwrite()

    res = '00:00:03'
    if srtmv3:
        fillnulls = 0
        if one:
            res = '00:00:01'
    else:
        one = None

    if len(local) == 0:
        if len(url) == 0:
            if srtmv3:
                if one:
                    url = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11/'
                else:
                    url = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11/'
            else:
                url = 'http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'

    if len(local) == 0:
        local = None

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)

    if fillnulls == 1 and memory <= 0:
        grass.warning(_("Amount of memory to use for interpolation must be positive, setting to 300 MB"))
        memory = '300'

    # make a temporary directory
    tmpdir = grass.tempfile()
    grass.try_remove(tmpdir)
    os.mkdir(tmpdir)
    currdir = os.getcwd()
    pid = os.getpid()

    # change to temporary directory
    os.chdir(tmpdir)
    in_temp = True
    if local is None:
        local = tmpdir

    # save region
    tmpregionname = 'r_in_srtm_tmp_region'
    grass.run_command('g.region', save=tmpregionname, overwrite=overwrite)

    # get extents
    if kv['+proj'] == 'longlat':
        reg = grass.region()
    else:
        if not options['resolution']:
            grass.fatal(_("The <resolution> must be set if the projection is not 'longlat'."))
        reg2 = grass.parse_command('g.region', flags='uplg')
        north = [float(reg2['ne_lat']), float(reg2['nw_lat'])]
        south = [float(reg2['se_lat']), float(reg2['sw_lat'])]
        east = [float(reg2['ne_long']), float(reg2['se_long'])]
        west = [float(reg2['nw_long']), float(reg2['sw_long'])]
        reg = {}
        if np.mean(north) > np.mean(south):
            reg['n'] = max(north)
            reg['s'] = min(south)
        else:
            reg['n'] = min(north)
            reg['s'] = max(south)
        if np.mean(west) > np.mean(east):
            reg['w'] = max(west)
            reg['e'] = min(east)
        else:
            reg['w'] = min(west)
            reg['e'] = max(east)
        # get actual location, mapset, ...
        grassenv = grass.gisenv()
        tgtloc = grassenv['LOCATION_NAME']
        tgtmapset = grassenv['MAPSET']
        GISDBASE = grassenv['GISDBASE']
        TGTGISRC = os.environ['GISRC']

    if kv['+proj'] != 'longlat':
        SRCGISRC, TMPLOC = createTMPlocation()
    if options['region'] is None or options['region'] == '':
        north = reg['n']
        south = reg['s']
        east = reg['e']
        west = reg['w']
    else:
        west, south, east, north = options['region'].split(',')
        west = float(west)
        south = float(south)
        east = float(east)
        north = float(north)

    # adjust extents to cover SRTM tiles: 1 degree bounds
    tmpint = int(north)
    if tmpint < north:
        north = tmpint + 1
    else:
        north = tmpint

    tmpint = int(south)
    if tmpint > south:
        south = tmpint - 1
    else:
        south = tmpint

    tmpint = int(east)
    if tmpint < east:
        east = tmpint + 1
    else:
        east = tmpint

    tmpint = int(west)
    if tmpint > west:
        west = tmpint - 1
    else:
        west = tmpint

    if north == south:
        north += 1
    if east == west:
        east += 1

    rows = abs(north - south)
    cols = abs(east - west)
    ntiles = rows * cols
    grass.message(_("Importing %d SRTM tiles...") % ntiles, flag = 'i')
    counter = 1

    srtmtiles = ''
    valid_tiles = 0
    for ndeg in range(south, north):
        for edeg in range(west, east):
            grass.percent(counter, ntiles, 1)
            counter += 1
            if ndeg < 0:
                tile = 'S'
            else:
                tile = 'N'
            tile = tile + '%02d' % abs(ndeg)
            if edeg < 0:
                tile = tile + 'W'
            else:
                tile = tile + 'E'
            tile = tile + '%03d' % abs(edeg)
            grass.debug("Tile: %s" % tile, debug = 1)

            if local != tmpdir:
                gotit = import_local_tile(tile, local, pid, srtmv3, one)
            else:
                gotit = download_tile(tile, url, pid, srtmv3, one, username, password)
                if gotit == 1:
                    gotit = import_local_tile(tile, tmpdir, pid, srtmv3, one)
            if gotit == 1:
                grass.verbose(_("Tile %s successfully imported") % tile)
                valid_tiles += 1
            elif dozerotile:
                # create tile with zeros
                if one:
                    # north
                    if ndeg < -1:
                        tmpn = '%02d:59:59.5S' % (abs(ndeg) - 2)
                    else:
                        tmpn = '%02d:00:00.5N' % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = '%02d:00:00.5S' % abs(ndeg)
                    else:
                        tmps = '%02d:59:59.5N' % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = '%03d:59:59.5W' % (abs(edeg) - 2)
                    else:
                        tmpe = '%03d:00:00.5E' % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = '%03d:00:00.5W' % abs(edeg)
                    else:
                        tmpw = '%03d:59:59.5E' % (edeg - 1)
                else:
                    # north
                    if ndeg < -1:
                        tmpn = '%02d:59:58.5S' % (abs(ndeg) - 2)
                    else:
                        tmpn = '%02d:00:01.5N' % (ndeg + 1)
                    # south
                    if ndeg < 1:
                        tmps = '%02d:00:01.5S' % abs(ndeg)
                    else:
                        tmps = '%02d:59:58.5N' % (ndeg - 1)
                    # east
                    if edeg < -1:
                        tmpe = '%03d:59:58.5W' % (abs(edeg) - 2)
                    else:
                        tmpe = '%03d:00:01.5E' % (edeg + 1)
                    # west
                    if edeg < 1:
                        tmpw = '%03d:00:01.5W' % abs(edeg)
                    else:
                        tmpw = '%03d:59:58.5E' % (edeg - 1)

                grass.run_command('g.region', n = tmpn, s = tmps, e = tmpe, w = tmpw, res = res)
                grass.run_command('r.mapcalc', expression = "%s = 0" % (tile + '.r.in.srtm.tmp.' + str(pid)), quiet = True)
                grass.run_command('g.region', region = tmpregionname)


    # g.list with sep = comma does not work ???
    pattern = '*.r.in.srtm.tmp.%d' % pid
    srtmtiles = grass.read_command('g.list', type = 'raster',
                                   pattern = pattern,
                                   sep = 'newline',
                                   quiet = True)

    srtmtiles = srtmtiles.splitlines()
    srtmtiles = ','.join(srtmtiles)
    grass.debug("'List of Tiles: %s" % srtmtiles, debug = 1)

    if valid_tiles == 0:
        grass.run_command('g.remove', type = 'raster', name = str(srtmtiles), flags = 'f', quiet = True)
        grass.warning(_("No tiles imported"))
        if local != tmpdir:
            grass.fatal(_("Please check if local folder <%s> is correct.") % local)
        else:
            grass.fatal(_("Please check internet connection, credentials, and if url <%s> is correct.") % url)

    grass.run_command('g.region', raster = str(srtmtiles));

    grass.message(_("Patching tiles..."))
    if fillnulls == 0:
        if valid_tiles > 1:
            if kv['+proj'] != 'longlat':
                grass.run_command('r.buildvrt', input = srtmtiles, output = output)
            else:
                grass.run_command('r.patch', input = srtmtiles, output = output)
        else:
            grass.run_command('g.rename', raster = '%s,%s' % (srtmtiles, output ), quiet = True)
    else:
        ncells = grass.region()['cells']
        if long(ncells) > 1000000000:
            grass.message(_("%s cells to interpolate, this will take some time") % str(ncells), flag = 'i')
        if kv['+proj'] != 'longlat':
            grass.run_command('r.buildvrt', input = srtmtiles, output = output + '.holes')
        else:
            grass.run_command('r.patch', input = srtmtiles, output = output + '.holes')
        mapstats = grass.parse_command('r.univar', map = output + '.holes', flags = 'g', quiet = True)
        if mapstats['null_cells'] == '0':
            grass.run_command('g.rename', raster = '%s,%s' % (output + '.holes', output), quiet = True)
        else:
            grass.run_command('r.resamp.bspline',
                              input = output + '.holes',
                              output = output + '.interp',
                              se = '0.0025', sn = '0.0025',
                              method = 'linear',
                              memory = memory,
                              flags = 'n')
            grass.run_command('r.patch',
                              input = '%s,%s' % (output + '.holes',
                              output + '.interp'),
                              output = output + '.float',
                              flags = 'z')
            grass.run_command('r.mapcalc', expression = '%s = round(%s)' % (output, output + '.float'))
            grass.run_command('g.remove', type = 'raster',
                              name = '%s,%s,%s' % (output + '.holes', output + '.interp', output + '.float'),
                              flags = 'f',
                              quiet = True)


    # switch to target location
    if kv['+proj'] != 'longlat':
        os.environ['GISRC'] = str(TGTGISRC)
        # r.proj
        grass.message(_("Reprojecting <%s>...") % output)
        kwargs = {
            'location': TMPLOC,
            'mapset': 'PERMANENT',
            'input': output,
            'memory': memory,
            'resolution': reproj_res
        }
        if options['method']:
            kwargs['method'] = options['method']
        try:
            grass.run_command('r.proj', **kwargs)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % output)
    else:
        if fillnulls != 0:
            grass.run_command('g.remove', type = 'raster', pattern = pattern, flags = 'f', quiet = True)

    # nice color table
    grass.run_command('r.colors', map = output, color = 'srtm', quiet = True)

    # write metadata:
    tmphist = grass.tempfile()
    f = open(tmphist, 'w+')
    f.write(os.environ['CMDLINE'])
    f.close()
    if srtmv3:
        source1 = 'SRTM V3'
    else:
        source1 = 'SRTM V2.1'
    grass.run_command('r.support', map = output,
                      loadhistory = tmphist,
                      description = 'generated by r.in.srtm.region',
                      source1 = source1,
                      source2 = (local if local != tmpdir else url))
    grass.try_remove(tmphist)

    grass.message(_("Done: generated map <%s>") % output)
v.buffer(input = 'comm_data_neotro_checked_2020_d11_06', 
	output = 'buffers_5km_comm_data_neotro_checked_2020_d11_06',
	type = 'point', distance = 0.045, flags = 't') 

#---------------------------------------
# cutting variables using buffers
#
# Here we cut the tree cover data from GFW to each buffer.
# Then we set as 0 the tree cover of areas deforested until the correspondent year of the sampling
# at each point, and create binary forest/non-forest maps using the threshold or tree cover
# > 70, 80, and 90.

# years for forest prop

# read all lines, column sampling_y
years = grass.read_command('v.db.select', map = 'buffers_5km_comm_data_neotro_checked_2020_d11_06',
 columns = 'sampling_y')
# transforms into list, removes the first element which is the title of the column
years = years.replace('\r', '').split('\n')[1:-1]
# transforms into numeric with only two digits
years = [int(i[2:]) for i in years]
# consider as 0 all years before 2000
for i in range(len(years)):
  if years[i] > 20:
    years[i] = 0

# community codes
comm_code = grass.read_command('v.db.select', map = 'buffers_5km_comm_data_neotro_checked_2020_d11_06',
 columns = 'comm_code')
comm_code = comm_code.replace('\r', '').split('\n')[1:-1]

# list of buffers
예제 #57
0
def main():
    # gscript.run_command('g.region', flags='p')
    input = options['input']
    # input = '{}_net'.format(oinput)
    layer = options['layer']
    output = options['output']
    node_layer = options['node_layer']

    table = '{}_{}'.format(output, 1)

    # # a tempfile would be needed if graph could be read from an edgelist
    # tmpFile = grass.tempfile()

    gscript.verbose(_("Reading network data..."))

    ## Read Network data from vector map
    gscript.run_command('v.net',
                        flags='c',
                        input=input,
                        output=output,
                        operation='nodes',
                        node_layer=node_layer,
                        quiet=True)

    #gscript.run_command('v.db.addtable', map=output, layer=2, key='cat')
    #gscript.run_command('v.to.db', map=output, layer=node_layer, option='cat', columns='cat', quiet=True)

    # Data has to be parsed or written to file as StringIO objects are not supported by igraph
    # https://github.com/igraph/python-igraph/issues/8
    net = gscript.read_command('v.net',
                               input=output,
                               points=output,
                               node_layer=node_layer,
                               operation='report',
                               quiet=True).split('\n')

    # Parse network data and extract vertices, edges and edge names
    edges = []
    vertices = []
    edge_cat = []
    for l in net:
        if l != '':
            # Names for edges and vertices have to be of type string
            # Names (cat) for edges
            edge_cat.append(l.split(' ')[0])

            # From- and to-vertices for edges
            edges.append((l.split(' ')[1], l.split(' ')[2]))

            # Names (cat) for from-vertices
            vertices.append(l.split(' ')[1])

            # Names (cat) for to-vertices
            vertices.append(l.split(' ')[2])

    # Create Graph object
    g = Graph().as_directed()

    # Add vertices with names
    vertices.sort()
    vertices = set(vertices)
    g.add_vertices(list(vertices))

    # Add vertices with names
    g.add_edges(edges)

    gscript.verbose(_("Computing neighborhood..."))

    # Compute number of vertices that can be reached from each vertex
    # Indicates upstream or downstream position of a node
    g.vs['nbh'] = g.neighborhood_size(mode='out', order=g.diameter())
    g.vs['cl'] = g.as_undirected().clusters().membership

    # Compute incoming degree centrality
    # sources have incoming degree centrality of 0
    g.vs['indegree'] = g.degree(type="in")

    # Compute outgoing degree centrality
    # outlets have outgoing degree centrality of 0
    g.vs['outdegree'] = g.degree(type="out")

    gscript.verbose(_("Writing result to table..."))

    # Get Attributes
    attrs = []
    for n in g.vs:
        attrs.append((int(n['name']), int(n['nbh']), int(n['cl']),
                      int(n['indegree']), int(n['outdegree'])))

    # Write results back to attribute table
    # Note: Backend depenent! For a more general solution this has to be handled
    path = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db'
    conn = sqlite3.connect(get_path(path))
    c = conn.cursor()
    c.execute('DROP TABLE IF EXISTS {}'.format(table))

    # Create temporary table
    c.execute('''CREATE TABLE {}
                 (cat integer, neighborhood integer,
                  cluster integer, indegree integer,
                  outdegree integer)'''.format(table))
    conn.commit()

    # Insert data into temporary table
    c.executemany('INSERT INTO {} VALUES (?,?,?,?,?)'.format(table), attrs)

    # Save (commit) the changes
    conn.commit()

    # Connect table to output node layer
    gscript.run_command('v.db.connect',
                        map=output,
                        table=table,
                        layer=node_layer,
                        flags='o')
    # Join temporary table to output
    #gscript.run_command('v.db.join', map=output, layer=node_layer,
    #                    column='cat', other_table=tmpTable,
    #                    other_column='cat', quiet=True)

    # Remove temporary table
    #c = conn.cursor()
    #c.execute('DROP TABLE IF EXISTS {}'.format(tmpTable))
    #conn.commit()

    # We can also close the connection if we are done with it.
    # Just be sure any changes have been committed or they will be lost.
    conn.close()
예제 #58
0
def main():
    dsn = options['dsn']
    db_table = options['db_table']
    output = options['output']
    key = options['key']

    mapset = grass.gisenv()['MAPSET']

    if db_table:
        input = db_table
    else:
        input = dsn

    if not output:
        tmpname = input.replace('.', '_')
        output = grass.basename(tmpname)

    if not grass.overwrite():
        s = grass.read_command('db.tables', flags='p')
        for l in s.splitlines():
            if l == output:
                grass.fatal(_("Table <%s> already exists") % output)
    else:
        grass.write_command('db.execute',
                            input='-',
                            stdin="DROP TABLE %s" % output)

    # treat DB as real vector map...
    if db_table:
        layer = db_table
    else:
        layer = None

    if grass.run_command(
            'v.in.ogr', flags='o', dsn=dsn, output=output, layer=layer,
            quiet=True) != 0:
        if db_table:
            grass.fatal(
                _("Input table <%s> not found or not readable") % input)
        else:
            grass.fatal(_("Input DSN <%s> not found or not readable") % input)

    # rename ID col if requested from cat to new name
    if key:
        grass.write_command('db.execute',
                            quiet=True,
                            input='-',
                            stdin="ALTER TABLE %s ADD COLUMN %s integer" %
                            (output, key))
        grass.write_command('db.execute',
                            quiet=True,
                            input='-',
                            stdin="UPDATE %s SET %s=cat" % (output, key))

    # ... and immediately drop the empty geometry
    vectfile = grass.find_file(output, element='vector', mapset=mapset)['file']
    if not vectfile:
        grass.fatal(_("Something went wrong. Should not happen"))
    else:
        # remove the vector part
        grass.run_command('v.db.connect',
                          quiet=True,
                          map=output,
                          layer='1',
                          flags='d')
        grass.run_command('g.remove',
                          flags='f',
                          quiet=True,
                          type='vect',
                          pattern=output)

    # get rid of superfluous auto-added cat column (and cat_ if present)
    nuldev = file(os.devnull, 'w+')
    grass.run_command('db.dropcolumn',
                      quiet=True,
                      flags='f',
                      table=output,
                      column='cat',
                      stdout=nuldev,
                      stderr=nuldev)
    nuldev.close()

    records = grass.db_describe(output)['nrows']
    grass.message(_("Imported table <%s> with %d rows") % (output, records))
예제 #59
0
def main():
    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    overwrite = grass.overwrite()

    # list formats and exit
    if flags['f']:
        grass.run_command('v.in.ogr', flags='f')
        return 0

    # list layers and exit
    if flags['l']:
        try:
            grass.run_command('v.in.ogr', flags='l', input=options['input'])
        except CalledModuleError:
            return 1
        return 0

    OGRdatasource = options['input']
    output = options['output']
    layers = options['layer']

    vflags = ''
    if options['extent'] == 'region':
        vflags += 'r'
    if flags['o']:
        vflags += 'o'

    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']

    if options['datum_trans'] and options['datum_trans'] == '-1':
        # list datum transform parameters
        if not options['epsg']:
            grass.fatal(_("Missing value for parameter <%s>") % 'epsg')

        return grass.run_command('g.proj',
                                 epsg=options['epsg'],
                                 datum_trans=options['datum_trans'])

    if layers:
        vopts['layer'] = layers
    if output:
        vopts['output'] = output
    vopts['snap'] = options['snap']

    # try v.in.ogr directly
    if flags['o'] or grass.run_command('v.in.ogr',
                                       input=OGRdatasource,
                                       flags='j',
                                       errors='status',
                                       quiet=True,
                                       overwrite=overwrite,
                                       **vopts) == 0:
        try:
            grass.run_command('v.in.ogr',
                              input=OGRdatasource,
                              flags=vflags,
                              overwrite=overwrite,
                              **vopts)
            grass.message(
                _("Input <%s> successfully imported without reprojection") %
                OGRdatasource)
            return 0
        except CalledModuleError:
            grass.fatal(_("Unable to import <%s>") % OGRdatasource)

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']

    # make sure target is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>"
              ) % tgtloc)

    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    TGTGISRC = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = 'temp_import_location_' + str(os.getpid())

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    tgtsrs = grass.read_command('g.proj', flags='j', quiet=True)

    # create temp location from input without import
    grass.verbose(_("Creating temporary location for <%s>...") % OGRdatasource)
    try:
        if OGRdatasource.lower().endswith("gml"):
            try:
                from osgeo import gdal
            except:
                grass.fatal(
                    _("Unable to load GDAL Python bindings (requires package 'python-gdal' being installed)"
                      ))
            if int(gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(
                    2, 4, 1):
                fix_gfsfile(OGRdatasource)
        grass.run_command('v.in.ogr',
                          input=OGRdatasource,
                          location=TMPLOC,
                          flags='i',
                          quiet=True,
                          overwrite=overwrite,
                          **vopts)
    except CalledModuleError:
        grass.fatal(
            _("Unable to create location from OGR datasource <%s>") %
            OGRdatasource)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    if options['epsg']:  # force given EPSG
        kwargs = {}
        if options['datum_trans']:
            kwargs['datum_trans'] = options['datum_trans']
        grass.run_command('g.proj', flags='c', epsg=options['epsg'], **kwargs)

    # print projection at verbose level
    grass.verbose(grass.read_command('g.proj', flags='p').rstrip(os.linesep))

    # make sure input is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for input <%s>") %
            OGRdatasource)

    if options['extent'] == 'region':
        # switch to target location
        os.environ['GISRC'] = str(TGTGISRC)

        # v.in.region in tgt
        vreg = 'vreg_' + str(os.getpid())
        grass.run_command('v.in.region', output=vreg, quiet=True)

        # reproject to src
        # switch to temp location
        os.environ['GISRC'] = str(SRCGISRC)
        try:
            grass.run_command('v.proj',
                              input=vreg,
                              output=vreg,
                              location=tgtloc,
                              mapset=tgtmapset,
                              quiet=True,
                              overwrite=overwrite)
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', res='1')
        grass.run_command('g.region', vector=vreg)

    # import into temp location
    grass.message(_("Importing <%s> ...") % OGRdatasource)
    try:
        if OGRdatasource.lower().endswith("gml"):
            try:
                from osgeo import gdal
            except:
                grass.fatal(
                    _("Unable to load GDAL Python bindings (requires package 'python-gdal' being installed)"
                      ))
            if int(gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(
                    2, 4, 1):
                fix_gfsfile(OGRdatasource)
        grass.run_command('v.in.ogr',
                          input=OGRdatasource,
                          flags=vflags,
                          overwrite=overwrite,
                          **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to import OGR datasource <%s>") % OGRdatasource)

    # if output is not define check source mapset
    if not output:
        output = grass.list_grouped('vector')['PERMANENT'][0]

    # switch to target location
    os.environ['GISRC'] = str(TGTGISRC)

    # check if map exists
    if not grass.overwrite() and \
       grass.find_file(output, element='vector', mapset='.')['mapset']:
        grass.fatal(_("option <%s>: <%s> exists.") % ('output', output))

    if options['extent'] == 'region':
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True)

    # v.proj
    grass.message(_("Reprojecting <%s>...") % output)
    try:
        grass.run_command('v.proj',
                          location=TMPLOC,
                          mapset='PERMANENT',
                          input=output,
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to to reproject vector <%s>") % output)

    return 0
예제 #60
0
def import_tif(basedir, rem, write, pm, prod, target=None, listfile=None):
    """Import TIF files"""
    # list of tif files
    pref = modis_prefix(pm.hdfname)
    tifiles = glob.glob1(basedir, "{pr}*.tif".format(pr=pref))
    if not tifiles:
        tifiles = glob.glob1(os.getcwd(), "{pr}*.tif".format(pr=pref))
    if not tifiles:
        grass.fatal(_("Error during the conversion"))
    outfile = []
    # for each file import it
    for t in tifiles:
        basename = os.path.splitext(t)[0]
        basename = basename.replace(" ", "_")
        name = os.path.join(basedir, t)
        if not os.path.exists(name):
            name = os.path.join(os.getcwd(), t)
        if not os.path.exists(name):
            grass.warning(_("File %s doesn't find" % name))
            continue
        filesize = int(os.path.getsize(name))
        if filesize < 1000:
            grass.warning(
                _("Probably some error occur during the conversion"
                  "for file <%s>. Escape import" % name))
            continue
        try:
            basename = basename.replace('"', "").replace(" ", "_")
            grass.run_command("r.in.gdal",
                              input=name,
                              output=basename,
                              overwrite=write,
                              quiet=True)
            outfile.append(basename)

            # check number of bands
            nbands = int(grass.read_command("r.in.gdal", input=name,
                                            flags="p"))
        except CalledModuleError as e:
            grass.warning(_("Error during import of {}".format(basename)))
            continue

        # process bands
        for b in range(nbands):
            if nbands > 1:
                mapname = "{}.{}".format(basename, b + 1)
            else:
                mapname = basename
            data = metadata(pm, mapname)

            if listfile:
                days = prod["days"]
                fdata = data + timedelta(days)
                if days == 31:
                    fdata = datetime(fdata.year, fdata.month, 1)
                if days != 1 and data.year != fdata.year:
                    fdata = datetime(fdata.year, fdata.month, 1)
                listfile.write("{name}|{sd}|{fd}\n".format(
                    name=mapname,
                    sd=data.strftime("%Y-%m-%d"),
                    fd=fdata.strftime("%Y-%m-%d"),
                ))

        # handle temporary data
        if rem:
            os.remove(name)
        if target:
            if target != basedir:
                shutil.move(name, target)

    return outfile