Пример #1
0
    def _getRegionParams(self,opt_region):
        """!Get region parameters from region specified or active default region

        @return region_params as a dictionary
        """
        self._debug("_getRegionParameters", "started")

        if opt_region:
            reg_spl = opt_region.strip().split('@', 1)
            reg_mapset = '.'
            if len(reg_spl) > 1:
                reg_mapset = reg_spl[1]

            if not gscript.find_file(name = reg_spl[0], element = 'windows',
                                   mapset = reg_mapset)['name']:
                 gscript.fatal(_("Region <%s> not found") % opt_region)

        if opt_region:
            s = gscript.read_command('g.region',
                                    quiet = True,
                                    flags = 'ug',
                                    region = opt_region)
            region_params = gscript.parse_key_val(s, val_type = float)
            gscript.verbose("Using region parameters for region %s" %opt_region)
        else:
            region_params = gscript.region()
            gscript.verbose("Using current grass region")

        self._debug("_getRegionParameters", "finished")
        return region_params
Пример #2
0
def process_loop(nprocs, maps, file_name, count, maps_per_process, 
                 remaining_maps_per_loop, output_files, 
                 output_time_list, r_what, process_queue):
    """Call r.what in parallel subprocesses"""
    first = True
    for process in range(nprocs):
        num = maps_per_process
        # Add the remaining maps to the first process
        if first is True:
            num += remaining_maps_per_loop
        first = False

        # Temporary output file
        final_file_name = file_name + "_%i"%(process)
        output_files.append(final_file_name)
        
        map_names = []
        map_list = []
        for i in range(num):
            map = maps[count]
            map_names.append(map.get_id())
            map_list.append(map)
            count += 1

        output_time_list.append(map_list)

        gscript.verbose(_("Process maps %(samp_start)i to %(samp_end)i (of %(total)i)"\
                                  %({"samp_start":count-len(map_names)+1, 
                                  "samp_end":count, "total":len(maps)})))
        mod = copy.deepcopy(r_what)
        mod(map=map_names, output=final_file_name)
        #print(mod.get_bash())
        process_queue.put(mod)

    return count
Пример #3
0
def list_layers():
    """Get list of available layers from WMS server"""
    qstring = "service=WMS&request=GetCapabilities&" + options['wmsquery']
    grass.debug("POST-data: %s" % qstring)
    
    # download capabilities file
    grass.verbose("List of layers for server <%s>:" % options['mapserver'])
    url = options['mapserver'] + '?' + qstring
    try:
        if options['cap_file']:
            cap_file, headers = urllib.urlretrieve(url, options['cap_file'])
        else:
            cap_file = urllib.urlopen(url, options['mapserver'] + '?' + qstring)
    except IOError:
        grass.fatal(_("Unable to get capabilities of '%s'") % options['mapserver'])
    
    # check DOCTYPE first
    if options['cap_file']:
        if headers['content-type'] != 'application/vnd.ogc.wms_xml':
            grass.fatal(_("Unable to get capabilities: %s") % url)
    else:
        if cap_file.info()['content-type'] != 'application/vnd.ogc.wms_xml':
            grass.fatal(_("Unable to get capabilities: %s") % url)

    # parse file with sax
    cap_xml = wms_parse.ProcessCapFile()
    try:
        xml.sax.parse(cap_file, cap_xml)
    except xml.sax.SAXParseException, err:
        grass.fatal(_("Reading capabilities failed. "
                      "Unable to parse XML document: %s") % err)
Пример #4
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['otable']
    ocolumn = options['ocolumn']

    f = grass.vector_layer_db(map, layer)

    maptable = f['table']
    database = f['database']
    driver = f['driver']

    if driver == 'dbf':
	grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
	grass.fatal(_("There is no table connected to this map. Unable to join any column."))

    if not grass.vector_columns(map, layer).has_key(column):
	grass.fatal(_("Column <%s> not found in table <%s> at layer <%s>") % (column, map, layer))

    all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols']
    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in all_cols_ot:
	# Skip the vector column which is used for join
	colname = col[0]
	if colname == column:
	    continue
	# Sqlite 3 does not support the precision number any more
	if len(col) > 2 and driver != "sqlite":
	    coltype = "%s(%s)" % (col[1], col[2])
	else:
	    coltype = "%s" % col[1]

	colspec = "%s %s" % (colname, coltype)

	# Add only the new column to the table
	if colname not in all_cols_tt:
	    if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0:
	        grass.fatal(_("Error creating column <%s>") % colname)

	stmt = template.substitute(table = maptable, column = column,
				   otable = otable, ocolumn = ocolumn,
				   colname = colname)

        grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map))
	if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0:
	    grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history:
    grass.vector_history(map)
Пример #5
0
def cleanup():
    
    # see end of main()
    grass.verbose(_("Module cleanup in: "+TMPDIR))
    os.system('rm '+ os.path.join( TMPDIR, '*'))
    if REMOVETMPDIR:
        try_rmdir(TMPDIR)
    else:
        grass.message("\n%s\n" % _("printws: Temp dir remove failed. Do it yourself, please:"))
        sys.stderr.write('%s\n' % TMPDIR % ' <---- this')
Пример #6
0
def render(astring,adic):
    grass.verbose(_("printws: Rendering into - BASE: " + LASTFILE))
    grass.verbose(_("printws: Rendering command: " + astring))
    
    dic = copy.deepcopy(adic)
    
    task = dic['task']
    del dic['task']
    # it should be replaced by grass.* API calls
    # os.system(astring)
    grass.run_command(task, "" , **dic)   #migration is going on
Пример #7
0
def main():
    vector = options['map']
    layer = options['layer']
    column = options['column']
    value = options['value']
    qcolumn = options['qcolumn']
    where = options['where']

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element = 'vector', mapset = mapset)['file']:
	grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
	grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))

    table = f['table']
    database = f['database']
    driver = f['driver']

    # checking column types
    try:
        coltype = grass.vector_columns(vector, layer)[column]['type']
    except KeyError:
	grass.fatal(_('Column <%s> not found') % column)

    if qcolumn:
	if value:
	    grass.fatal(_('<value> and <qcolumn> are mutually exclusive'))
	# special case: we copy from another column
	value = qcolumn
    else:
	if not value:
	    grass.fatal(_('Either <value> or <qcolumn> must be given'))
	# we insert a value
	if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
	    value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
	cmd += " WHERE " + where

    grass.verbose("SQL: \"%s\"" % cmd)

    grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = cmd)

    # write cmd history:
    grass.vector_history(vector)

    return 0
Пример #8
0
    def _computeBbox(self,region_params):
        """!Get extent for WCS query (bbox) from region parameters

        @return bounding box defined by list [minx,miny,maxx,maxy]
        """
        self._debug("_computeBbox", "started")
        boundingboxvars = ("w","s","e","n")
        boundingbox = list()
        for f in boundingboxvars:
            boundingbox.append(int(self.params['region'][f]))
        gscript.verbose("Boundingbox coordinates:\n %s  \n [West, South, Eest, North]" %boundingbox)
        self._debug("_computeBbox", "finished")
        return boundingbox
Пример #9
0
    def _download(self):
        """!Downloads data from WCS server using GDAL WCS driver

        @return ret (exit code of r.in.gdal module)
        """
        self._debug("_download", "started")
        self.xml_file = self._createXML()
        self.vrt_file = self._createVRT()

        gscript.message('Starting module r.in.gdal ...')
        
        if self.params['location'] == "":
            p = gscript.start_command('r.in.gdal',
                     input=self.vrt_file,
                     output=self.params['output'],
                     stderr = gscript.PIPE,
                     env = self._env
            )
        
        else:
            p = gscript.start_command('r.in.gdal',
                     input=self.vrt_file,
                     output=self.params['output'],
                     location = self.params['location'],
                     stderr = gscript.PIPE,
                     env = self._env
            )
        
        # percent status messagei
        while p.poll() is None:
            line = p.stderr.readline()
            linepercent = line.replace('GRASS_INFO_PERCENT:','').strip()
            if linepercent.isdigit():
                #print linepercent
                gscript.percent(int(linepercent),100,1)
            else:
                gscript.verbose(line)
        
        gscript.percent(100,100,5)

        ret = p.wait()
        if ret != 0:
            gscript.fatal('r.in.gdal for %s failed.' % self.vrt_file )
        else:
            gscript.message('r.in.gdal was successful for new raster map %s ' % self.params['output'] )

        gscript.try_remove(self.vrt_file)
        gscript.try_remove(self.xml_file)
        self._debug("_download", "finished")

        return ret
Пример #10
0
    def rusle(self):
        """!main method in rusle_base
        controlling the whole process, once called by main()

        @return soillossbare name of output raster map
        """

        flowacc = outprefix + 'flowacc'
        slope  =  outprefix + 'slope'
        lsfactor = outprefix+ 'lsfactor'
        #self.tmp_rast.append(flowacc)
        #self.tmp_rast.append(slope)
        #self.tmp_rast.append(lsfactor)

        global fieldblock
        if not fieldblock:
            if fieldblockvect:
                fieldblock = outprefix + "fieldblock"
                gscript.run_command("v.to.rast",
                        input=fieldblockvect,
                        output= fieldblock,
                        use="val",
                        value="1",
                        quiet=quiet
                        )
        if fieldblock:
            gscript.verbose('Raster map fieldblock is in "%s"'%fieldblock)
        else: fieldblock = ""


        if not options['flowacc']:
            self._getFlowacc(elevation,flowacc,fieldblock)
            gscript.verbose('Raster map flowacc is in "%s".'%flowacc)
        else:
            gscript.verbose('Raster map flowacc taken from "%s".'%flowacc)


        self._getSlope(elevation,slope)
        gscript.verbose('Raster map slope is in  "%s"'%slope)

        self._getLsfac(flowacc,slope,lsfactor)
        gscript.verbose('Raster map lsfactor is in  "%s"'%lsfactor)

        self._getSoillossbare(lsfactor,kfactor,rfactor,soillossbare)
        gscript.message('Soilloss for bare soil in map "%s".' %soillossbare)

        stats = gscript.parse_command('r.univar', flags="g", map=soillossbare, delimiter = '=')
        gscript.message('mean = %s \n stddev = %s \n min = %s \n max = %s' % (stats['mean'],stats['stddev'], stats['min'], stats['max']))

        return soillossbare
Пример #11
0
def one_point_per_row_output(separator, output_files, output_time_list,
                             output, write_header, site_input):
    """Write one point per row
       output is of type: x,y,start,end,value
    """
    # open the output file for writing
    out_file = open(output, 'w') if output != "-" else sys.stdout
    
    if write_header is True:
        if site_input:
            out_file.write("x%(sep)sy%(sep)ssite%(sep)sstart%(sep)send%(sep)svalue\n"\
                       %({"sep":separator}))
        else:
            out_file.write("x%(sep)sy%(sep)sstart%(sep)send%(sep)svalue\n"\
                       %({"sep":separator}))

    for count in range(len(output_files)):
        file_name = output_files[count]
        gscript.verbose(_("Transforming r.what output file %s"%(file_name)))
        map_list = output_time_list[count]
        in_file = open(file_name, "r")
        for line in in_file:
            line = line.split(separator)
            x = line[0]
            y = line[1]
            if site_input:
                site = line[2]

            # We ignore the site name
            values = line[3:]
            for i in range(len(values)):
                start, end = map_list[i].get_temporal_extent_as_tuple()
                if site_input:
                    coor_string = "%(x)10.10f%(sep)s%(y)10.10f%(sep)s%(site_name)s%(sep)s"\
                               %({"x":float(x),"y":float(y),"site_name":str(site),"sep":separator})
                else:
                    coor_string = "%(x)10.10f%(sep)s%(y)10.10f%(sep)s"\
                               %({"x":float(x),"y":float(y),"sep":separator})
                time_string = "%(start)s%(sep)s%(end)s%(sep)s%(val)s\n"\
                               %({"start":str(start), "end":str(end),
                                  "val":(values[i].strip()),"sep":separator})

                out_file.write(coor_string + time_string)
        
        in_file.close()
    
    if out_file is not sys.stdout:
        out_file.close()
Пример #12
0
def cleanup():
    grass.verbose("Cleaning up ...")
    if tmp:
	grass.try_remove(tmp)
    if tmp_proj:
	grass.try_remove(tmp_proj)
    if tmp_gpx:
	grass.try_remove(tmp_gpx)

    # only try to remove map if it exists to avoid ugly warnings
    if tmp_vogb:
	if grass.find_file(tmp_vogb, element = 'vector')['name']:
	    grass.run_command('g.remove', vect = tmp_vogb, quiet = True)
    if tmp_extr:
	if grass.find_file(tmp_extr, element = 'vector')['name']:
	    grass.run_command('g.remove', vect = tmp_vogb, quiet = True)
Пример #13
0
    def compute(self):
        # computing
        grass.message('"nnbathy" is performing the interpolation now. \
                      This may take some time...')
        grass.verbose("Once it completes an 'All done.' \
                      message will be printed.")

        # nnbathy calling
        fsock = open(self._xyzout, 'w')
        grass.call(['nnbathy',
                    '-W', '%d' % 0,
                    '-i', '%s' % self._tmpxyz,
                    '-x', '%d' % self.nn_w, '%d' % self.nn_e,
                    '-y', '%d' % self.nn_n, '%d' % self.nn_s,
                    '-P', '%s' % self.ALG,
                    '-n', '%dx%d' % (self.cols, self.rows)],
                   stdout=fsock)
        fsock.close()
Пример #14
0
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns']
    columns = [col.strip() for col in columns.split(',')]

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file(map, element='vector', mapset=mapset)['file'])

    if not exists:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    try:
        f = grass.vector_db(map)[int(layer)]
    except KeyError:
        grass.fatal(
            _("There is no table connected to this map. Run v.db.connect or v.db.addtable first."))

    table = f['table']
    database = f['database']
    driver = f['driver']
    column_existing = grass.vector_columns(map, int(layer)).keys()

    for col in columns:
        if not col:
            grass.fatal(_("There is an empty column. Did you leave a trailing comma?"))
        col_name = col.split(' ')[0].strip()
        if col_name in column_existing:
            grass.error(_("Column <%s> is already in the table. Skipping.") % col_name)
            continue
        grass.verbose(_("Adding column <%s> to the table") % col_name)
        p = grass.feed_command('db.execute', input='-', database=database, driver=driver)
        p.stdin.write("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        grass.debug("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        p.stdin.close()
        if p.wait() != 0:
            grass.fatal(_("Unable to add column <%s>.") % col)

    # write cmd history:
    grass.vector_history(map)
Пример #15
0
    def _createVRT(self):
        '''! create VRT with help of gdalbuildvrt program
        VRT is a virtual GDAL dataset format

        @return path to VRT file
        '''
        self._debug("_createVRT", "started")
        vrt_file = self._tempfile()
        command = ["gdalbuildvrt", '-te']
        command += self.params['boundingbox']
        command += [vrt_file, self.xml_file]
        command = [str(i) for i in command]

        gscript.verbose(' '.join(command))

        self.process = subprocess.Popen(command,
                                        stdout=subprocess.PIPE,
                                        stderr=subprocess.PIPE)
        self.out, self.err = self.process.communicate()
        gscript.verbose(self.out)

        if self.err:
            gscript.verbose(self.err+"\n")
            if "does not exist" in self.err:
                gscript.warning('Coverage "%s" cannot be opened / does not exist.' % self.params['coverage'])
            gscript.fatal("Generation of VRT-File failed (gdalbuildvrt ERROR). Set verbose-flag for details.")

        self._debug("_createVRT", "finished")
        return vrt_file
Пример #16
0
def main():
    global TMPLOC, SRCGISRC, GISDBASE, TMP_REG_NAME

    GDALdatasource = options['input']
    output = options['output']
    method = options['resample']
    memory = options['memory']
    bands = options['band']
    tgtres = options['resolution']
    title = options["title"]
    if flags['e'] and not output:
        output = 'rimport_tmp'  # will be removed with the entire tmp location
    if options['resolution_value']:
        if tgtres != 'value':
            grass.fatal(
                _("To set custom resolution value, select 'value' in resolution option"
                  ))
        tgtres_value = float(options['resolution_value'])
        if tgtres_value <= 0:
            grass.fatal(_("Resolution value can't be smaller than 0"))
    elif tgtres == 'value':
        grass.fatal(
            _("Please provide the resolution for the imported dataset or change to 'estimated' resolution"
              ))

    # try r.in.gdal directly first
    additional_flags = 'l' if flags['l'] else ''
    if flags['o']:
        additional_flags += 'o'
    region_flag = ''
    if options['extent'] == 'region':
        region_flag += 'r'
    if flags['o'] or is_projection_matching(GDALdatasource):
        parameters = dict(input=GDALdatasource,
                          output=output,
                          memory=memory,
                          flags='ak' + additional_flags + region_flag)
        if bands:
            parameters['band'] = bands
        try:
            grass.run_command('r.in.gdal', **parameters)
            grass.verbose(
                _("Input <%s> successfully imported without reprojection") %
                GDALdatasource)
            return 0
        except CalledModuleError as e:
            grass.fatal(
                _("Unable to import GDAL dataset <%s>") % GDALdatasource)

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']

    # make sure target is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>"
              ) % tgtloc)

    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']

    TMPLOC = grass.append_node_pid("tmp_r_import_location")
    TMP_REG_NAME = grass.append_node_pid("tmp_r_import_region")

    SRCGISRC, src_env = grass.create_environment(GISDBASE, TMPLOC, 'PERMANENT')

    # create temp location from input without import
    grass.verbose(
        _("Creating temporary location for <%s>...") % GDALdatasource)
    # creating a new location with r.in.gdal requires a sanitized env
    env = os.environ.copy()
    env = grass.sanitize_mapset_environment(env)
    parameters = dict(input=GDALdatasource,
                      output=output,
                      memory=memory,
                      flags='c',
                      title=title,
                      location=TMPLOC,
                      quiet=True)
    if bands:
        parameters['band'] = bands
    try:
        grass.run_command('r.in.gdal', env=env, **parameters)
    except CalledModuleError:
        grass.fatal(_("Unable to read GDAL dataset <%s>") % GDALdatasource)

    # prepare to set region in temp location
    if 'r' in region_flag:
        tgtregion = TMP_REG_NAME
        grass.run_command('v.in.region', output=tgtregion, flags='d')

    # switch to temp location

    # print projection at verbose level
    grass.verbose(
        grass.read_command('g.proj', flags='p',
                           env=src_env).rstrip(os.linesep))

    # make sure input is not xy
    if grass.parse_command('g.proj', flags='g',
                           env=src_env)['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for input <%s>") %
            GDALdatasource)

    # import into temp location
    grass.verbose(
        _("Importing <%s> to temporary location...") % GDALdatasource)
    parameters = dict(input=GDALdatasource,
                      output=output,
                      memory=memory,
                      flags='ak' + additional_flags)
    if bands:
        parameters['band'] = bands
    if 'r' in region_flag:
        grass.run_command('v.proj',
                          location=tgtloc,
                          mapset=tgtmapset,
                          input=tgtregion,
                          output=tgtregion,
                          env=src_env)
        grass.run_command('g.region', vector=tgtregion, env=src_env)
        parameters['flags'] = parameters['flags'] + region_flag
    try:
        grass.run_command('r.in.gdal', env=src_env, **parameters)
    except CalledModuleError:
        grass.fatal(_("Unable to import GDAL dataset <%s>") % GDALdatasource)

    outfiles = grass.list_grouped('raster', env=src_env)['PERMANENT']

    # is output a group?
    group = False
    path = os.path.join(GISDBASE, TMPLOC, 'group', output)
    if os.path.exists(path):
        group = True
        path = os.path.join(GISDBASE, TMPLOC, 'group', output, 'POINTS')
        if os.path.exists(path):
            grass.fatal(_("Input contains GCPs, rectification is required"))

    if 'r' in region_flag:
        grass.run_command('g.remove',
                          type="vector",
                          flags="f",
                          name=tgtregion,
                          env=src_env)

    # switch to target location
    if 'r' in region_flag:
        grass.run_command('g.remove', type="vector", flags="f", name=tgtregion)

    region = grass.region()

    rflags = None
    if flags['n']:
        rflags = 'n'

    vreg = TMP_REG_NAME

    for outfile in outfiles:

        n = region['n']
        s = region['s']
        e = region['e']
        w = region['w']

        env = os.environ.copy()
        if options['extent'] == 'input':
            # r.proj -g
            try:
                tgtextents = grass.read_command('r.proj',
                                                location=TMPLOC,
                                                mapset='PERMANENT',
                                                input=outfile,
                                                flags='g',
                                                memory=memory,
                                                quiet=True)
            except CalledModuleError:
                grass.fatal(_("Unable to get reprojected map extent"))
            try:
                srcregion = grass.parse_key_val(tgtextents,
                                                val_type=float,
                                                vsep=' ')
                n = srcregion['n']
                s = srcregion['s']
                e = srcregion['e']
                w = srcregion['w']
            except ValueError:  # import into latlong, expect 53:39:06.894826N
                srcregion = grass.parse_key_val(tgtextents, vsep=' ')
                n = grass.float_or_dms(srcregion['n'][:-1]) * \
                    (-1 if srcregion['n'][-1] == 'S' else 1)
                s = grass.float_or_dms(srcregion['s'][:-1]) * \
                    (-1 if srcregion['s'][-1] == 'S' else 1)
                e = grass.float_or_dms(srcregion['e'][:-1]) * \
                    (-1 if srcregion['e'][-1] == 'W' else 1)
                w = grass.float_or_dms(srcregion['w'][:-1]) * \
                    (-1 if srcregion['w'][-1] == 'W' else 1)

            env['GRASS_REGION'] = grass.region_env(n=n, s=s, e=e, w=w)

        # v.in.region in tgt
        grass.run_command('v.in.region', output=vreg, quiet=True, env=env)

        # reproject to src
        # switch to temp location
        try:
            grass.run_command('v.proj',
                              input=vreg,
                              output=vreg,
                              location=tgtloc,
                              mapset=tgtmapset,
                              quiet=True,
                              env=src_env)
            # test if v.proj created a valid area
            if grass.vector_info_topo(vreg, env=src_env)['areas'] != 1:
                grass.fatal(_("Please check the 'extent' parameter"))
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', raster=outfile, env=src_env)
        grass.run_command('g.region', vector=vreg, env=src_env)
        # align to first band
        grass.run_command('g.region', align=outfile, env=src_env)
        # get number of cells
        cells = grass.region(env=src_env)['cells']

        estres = math.sqrt((n - s) * (e - w) / cells)
        # remove from source location for multi bands import
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True,
                          env=src_env)

        # switch to target location
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True)

        grass.message(
            _("Estimated target resolution for input band <{out}>: {res}").
            format(out=outfile, res=estres))
        if flags['e']:
            continue

        env = os.environ.copy()

        if options['extent'] == 'input':
            env['GRASS_REGION'] = grass.region_env(n=n, s=s, e=e, w=w)

        res = None
        if tgtres == 'estimated':
            res = estres
        elif tgtres == 'value':
            res = tgtres_value
            grass.message(
                _("Using given resolution for input band <{out}>: {res}").
                format(out=outfile, res=res))
            # align to requested resolution
            env['GRASS_REGION'] = grass.region_env(res=res, flags='a', env=env)
        else:
            curr_reg = grass.region()
            grass.message(
                _("Using current region resolution for input band "
                  "<{out}>: nsres={ns}, ewres={ew}").format(
                      out=outfile, ns=curr_reg['nsres'], ew=curr_reg['ewres']))

        # r.proj
        grass.message(_("Reprojecting <%s>...") % outfile)
        try:
            grass.run_command('r.proj',
                              location=TMPLOC,
                              mapset='PERMANENT',
                              input=outfile,
                              method=method,
                              resolution=res,
                              memory=memory,
                              flags=rflags,
                              quiet=True,
                              env=env)
        except CalledModuleError:
            grass.fatal(_("Unable to to reproject raster <%s>") % outfile)

        if grass.raster_info(outfile)['min'] is None:
            grass.fatal(_("The reprojected raster <%s> is empty") % outfile)

    if flags['e']:
        return 0

    if group:
        grass.run_command('i.group', group=output, input=','.join(outfiles))

    # TODO: write metadata with r.support

    return 0
Пример #17
0
def main():
    # old connection
    old_database = options["old_database"]
    old_schema = options["old_schema"]
    # new connection
    default_connection = gscript.db_connection()
    if options["new_driver"]:
        new_driver = options["new_driver"]
    else:
        new_driver = default_connection["driver"]
    if options["new_database"]:
        new_database = options["new_database"]
    else:
        new_database = default_connection["database"]
    if options["new_schema"]:
        new_schema = options["new_schema"]
    else:
        new_schema = default_connection["schema"]

    if old_database == "":
        old_database = None
    old_database_subst = None
    if old_database is not None:
        old_database_subst = substitute_db(old_database)

    new_database_subst = substitute_db(new_database)

    if old_database_subst == new_database_subst and old_schema == new_schema:
        gscript.fatal(
            _("Old and new database connection is identical. "
              "Nothing to do."))

    mapset = gscript.gisenv()["MAPSET"]

    vectors = gscript.list_grouped("vect")[mapset]
    num_vectors = len(vectors)

    if flags["c"]:
        # create new database if not existing
        create_db(new_driver, new_database)

    i = 0
    for vect in vectors:
        vect = "%s@%s" % (vect, mapset)
        i += 1
        gscript.message(
            _("%s\nReconnecting vector map <%s> "
              "(%d of %d)...\n%s") %
            ("-" * 80, vect, i, num_vectors, "-" * 80))
        for f in gscript.vector_db(vect, stderr=nuldev).values():
            layer = f["layer"]
            schema_table = f["table"]
            key = f["key"]
            database = f["database"]
            driver = f["driver"]

            # split schema.table
            if "." in schema_table:
                schema, table = schema_table.split(".", 1)
            else:
                schema = ""
                table = schema_table

            if new_schema:
                new_schema_table = "%s.%s" % (new_schema, table)
            else:
                new_schema_table = table

            gscript.debug(
                "DATABASE = '%s' SCHEMA = '%s' TABLE = '%s' ->\n"
                "      NEW_DATABASE = '%s' NEW_SCHEMA_TABLE = '%s'" %
                (old_database, schema, table, new_database, new_schema_table))

            do_reconnect = True
            if old_database_subst is not None:
                if database != old_database_subst:
                    do_reconnect = False
            if database == new_database_subst:
                do_reconnect = False
            if schema != old_schema:
                do_reconnect = False

            if do_reconnect:
                gscript.verbose(_("Reconnecting layer %d...") % layer)

                if flags["c"]:
                    # check if table exists in new database
                    copy_tab(
                        driver,
                        database,
                        schema_table,
                        new_driver,
                        new_database,
                        new_schema_table,
                    )

                # drop original table if required
                if flags["d"]:
                    drop_tab(vect, layer, schema_table, driver,
                             substitute_db(database))

                # reconnect tables (don't use substituted new_database)
                # NOTE: v.db.connect creates an index on the key column
                try:
                    gscript.run_command(
                        "v.db.connect",
                        flags="o",
                        quiet=True,
                        map=vect,
                        layer=layer,
                        driver=new_driver,
                        database=new_database,
                        table=new_schema_table,
                        key=key,
                    )
                except CalledModuleError:
                    gscript.warning(
                        _("Unable to connect table <%s> to vector "
                          "<%s> on layer <%s>") % (table, vect, str(layer)))

            else:
                if database != new_database_subst:
                    gscript.warning(
                        _("Layer <%d> will not be reconnected "
                          "because database or schema do not "
                          "match.") % layer)
    return 0
Пример #18
0
def main():
    vector = options['map']
    table = options['table']
    layer = options['layer']
    columns = options['columns']
    key = options['key']

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    map_name = vector.split('@')[0]

    if not table:
        if layer == '1':
            grass.verbose(
                _("Using vector map name as table name: <%s>") % map_name)
            table = map_name
        else:
            # to avoid tables with identical names on higher layers
            table = "%s_%s" % (map_name, layer)
            grass.verbose(
                _("Using vector map name extended by layer number as table name: <%s>"
                  ) % table)
    else:
        grass.verbose(_("Using user specified table name: %s") % table)

    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags='c', quiet=True)
    grass.verbose(
        _("Creating new DB connection based on default mapset settings..."))
    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    schema = kv['schema']

    # maybe there is already a table linked to the selected layer?
    nuldev = open(os.devnull, 'w')
    try:
        grass.vector_db(map_name, stderr=nuldev)[int(layer)]
        grass.fatal(_("There is already a table linked to layer <%s>") % layer)
    except KeyError:
        pass

    # maybe there is already a table with that name?
    tables = grass.read_command('db.tables',
                                flags='p',
                                database=database,
                                driver=driver,
                                stderr=nuldev)
    tables = decode(tables)

    if not table in tables.splitlines():
        colnames = []
        if columns:
            column_def = []
            for x in ' '.join(columns.lower().split()).split(','):
                colname = x.split()[0]
                if colname in colnames:
                    grass.fatal(
                        _("Duplicate column name '%s' not allowed") % colname)
                colnames.append(colname)
                column_def.append(x)
        else:
            column_def = []

        # if not existing, create it:
        if not key in colnames:
            column_def.insert(0, "%s integer" % key)
        column_def = ','.join(column_def)

        grass.verbose(_("Creating table with columns (%s)...") % column_def)

        sql = "CREATE TABLE %s (%s)" % (table, column_def)
        try:
            grass.run_command('db.execute',
                              database=database,
                              driver=driver,
                              sql=sql)
        except CalledModuleError:
            grass.fatal(_("Unable to create table <%s>") % table)

    # connect the map to the DB:
    if schema:
        table = '{schema}.{table}'.format(schema=schema, table=table)
    grass.run_command('v.db.connect',
                      quiet=True,
                      map=map_name,
                      database=database,
                      driver=driver,
                      layer=layer,
                      table=table,
                      key=key)

    # finally we have to add cats into the attribute DB to make
    # modules such as v.what.rast happy: (creates new row for each
    # vector line):
    try:
        grass.run_command('v.to.db',
                          map=map_name,
                          layer=layer,
                          option='cat',
                          column=key,
                          qlayer=layer)
    except CalledModuleError:
        # remove link
        grass.run_command('v.db.connect',
                          quiet=True,
                          flags='d',
                          map=map_name,
                          layer=layer)
        return 1

    grass.verbose(_("Current attribute table links:"))
    if grass.verbosity() > 2:
        grass.run_command('v.db.connect', flags='p', map=map_name)

    # write cmd history:
    grass.vector_history(map_name)

    return 0
Пример #19
0
def download_gcs(scene, output):
    """Downloads a single S2 scene from Google Cloud Storage."""
    # Lazy import tqdm
    try:
        from tqdm import tqdm
    except ImportError as e:
        gs.fatal(_("Module requires tqdm library: {}").format(e))

    final_scene_dir = os.path.join(output, "{}.SAFE".format(scene))
    create_dir(final_scene_dir)
    level = scene.split("_")[1]
    if level == "MSIL1C":
        baseurl = "https://storage.googleapis.com/" "gcp-public-data-sentinel-2/tiles"
    elif level == "MSIL2A":
        baseurl = ("https://storage.googleapis.com/"
                   "gcp-public-data-sentinel-2/L2/tiles")
    tile_block = scene.split("_")[-2]
    tile_no = tile_block[1:3]
    tile_first_letter = tile_block[3]
    tile_last_letters = tile_block[4:]

    url_scene = os.path.join(baseurl, tile_no, tile_first_letter,
                             tile_last_letters, "{}.SAFE".format(scene))

    # download the manifest.safe file
    safe_file = "manifest.safe"
    safe_url = os.path.join(url_scene, safe_file)
    output_path_safe = os.path.join(final_scene_dir, safe_file)
    r_safe = requests.get(safe_url, allow_redirects=True)
    if r_safe.status_code != 200:
        gs.warning(_("Scene <{}> was not found on Google Cloud").format(scene))
        return 1
    root_manifest = ET.fromstring(r_safe.content)
    open(output_path_safe, "wb").write(r_safe.content)
    # parse manifest.safe for the rest of the data
    files_list = parse_manifest_gcs(root_manifest)

    # get all required folders
    hrefs = [file["href"] for file in files_list]
    hrefs_heads = [os.path.split(path)[0] for path in hrefs]
    required_rel_folders = list(set(hrefs_heads))
    # some paths inconsistently start with "." and some don't
    if any([not folder.startswith(".") for folder in required_rel_folders]):
        required_abs_folders = [
            os.path.join(final_scene_dir, item)
            for item in required_rel_folders if item != "."
        ]

    else:
        required_abs_folders = [
            item.replace(".", final_scene_dir) for item in required_rel_folders
            if item != "."
        ]

    # some scenes don't have additional metadata (GRANULE/.../AUX_DATA or
    # DATASTRIP/.../QI_DATA) but sen2cor seems to require at least the empty folder
    rest_folders = []
    check_folders = [("GRANULE", "AUX_DATA"), ("DATASTRIP", "QI_DATA")]
    for check_folder in check_folders:
        if (len(
                fnmatch.filter(
                    required_abs_folders,
                    "*{}*/{}*".format(check_folder[0], check_folder[1]),
                )) == 0):
            # get required path
            basepath = min([
                fol for fol in required_abs_folders if check_folder[0] in fol
            ],
                           key=len)
            rest_folders.append(os.path.join(basepath, check_folder[1]))

    # two folders are not in the manifest.safe, but the empty folders may
    # be required for other software (e.g. sen2cor)
    rest_folders.extend([
        os.path.join(final_scene_dir, "rep_info"),
        os.path.join(final_scene_dir, "AUX_DATA"),
    ])
    required_abs_folders.extend(rest_folders)

    # create folders
    for folder in required_abs_folders:
        req_folder_code = create_dir(folder)
    if req_folder_code != 0:
        return 1
    failed_downloads = []
    # no .html files are available on GCS but the folder might be required
    files_list_dl = [file for file in files_list if "HTML" not in file["href"]]
    for dl_file in tqdm(files_list_dl):
        # remove the '.' for relative path in the URLS
        if dl_file["href"].startswith("."):
            href_url = dl_file["href"][1:]
        else:
            href_url = "/{}".format(dl_file["href"])
        # neither os.path.join nor urljoin join these properly...
        dl_url = "{}{}".format(url_scene, href_url)
        output_path_file = "{}{}".format(final_scene_dir, href_url)
        checksum_function = dl_file["checksumName"].lower()
        dl_code = download_gcs_file(
            url=dl_url,
            destination=output_path_file,
            checksum_function=checksum_function,
            checksum=dl_file["checksum"],
        )
        if dl_code != 0:
            failed_downloads.append(dl_url)

    if len(failed_downloads) > 0:
        gs.verbose(
            _("Downloading was not successful for urls \n{}").format(
                "\n".join(failed_downloads)))
        gs.warning(
            _("Downloading was not successful for scene <{}>").format(scene))
        return 1
    else:
        return 0
Пример #20
0
def main():
    global TMPLOC, SRCGISRC, GISDBASE
    overwrite = grass.overwrite()

    # list formats and exit
    if flags['f']:
        grass.run_command('v.in.ogr', flags='f')
        return 0

    # list layers and exit
    if flags['l']:
        try:
            grass.run_command('v.in.ogr', flags='l', input=options['input'])
        except CalledModuleError:
            return 1
        return 0

    OGRdatasource = options['input']
    output = options['output']
    layers = options['layer']

    vflags = ''
    if options['extent'] == 'region':
        vflags += 'r'
    if flags['o']:
        vflags += 'o'

    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']

    if options['datum_trans'] and options['datum_trans'] == '-1':
        # list datum transform parameters
        if not options['epsg']:
            grass.fatal(_("Missing value for parameter <%s>") % 'epsg')

        return grass.run_command('g.proj', epsg=options['epsg'],
                                 datum_trans=options['datum_trans'])

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']
    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    tgtgisrc = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = 'temp_import_location_' + str(os.getpid())

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    tgtsrs = grass.read_command('g.proj', flags='j', quiet=True)

    # create temp location from input without import
    grass.verbose(_("Creating temporary location for <%s>...") % OGRdatasource)
    if layers:
        vopts['layer'] = layers
    if output:
        vopts['output'] = output
    vopts['snap'] = options['snap']
    try:
        grass.run_command('v.in.ogr', input=OGRdatasource,
                          location=TMPLOC, flags='i', quiet=True, overwrite=overwrite, **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to create location from OGR datasource <%s>") % OGRdatasource)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    if options['epsg']:  # force given EPSG
        kwargs = {}
        if options['datum_trans']:
            kwargs['datum_trans'] = options['datum_trans']
        grass.run_command('g.proj', flags='c', epsg=options['epsg'], **kwargs)

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    # try v.in.ogr directly
    if flags['o'] or grass.run_command('v.in.ogr', input=OGRdatasource, flags='j',
                                       errors='status', quiet=True, overwrite=overwrite) == 0:
        try:
            grass.run_command('v.in.ogr', input=OGRdatasource,
                              flags=vflags, overwrite=overwrite, **vopts)
            grass.message(
                _("Input <%s> successfully imported without reprojection") %
                OGRdatasource)
            return 0
        except CalledModuleError:
            grass.fatal(_("Unable to import <%s>") % OGRdatasource)

    # make sure target is not xy
    if grass.parse_command('g.proj', flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>") %
            tgtloc)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    # print projection at verbose level
    grass.verbose(grass.read_command('g.proj', flags='p').rstrip(os.linesep))

    # make sure input is not xy
    if grass.parse_command('g.proj', flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(_("Coordinate reference system not available for input <%s>") % OGRdatasource)

    if options['extent'] == 'region':
        # switch to target location
        os.environ['GISRC'] = str(tgtgisrc)

        # v.in.region in tgt
        vreg = 'vreg_' + str(os.getpid())
        grass.run_command('v.in.region', output=vreg, quiet=True)

        # reproject to src
        # switch to temp location
        os.environ['GISRC'] = str(SRCGISRC)
        try:
            grass.run_command('v.proj', input=vreg, output=vreg,
                              location=tgtloc, mapset=tgtmapset, quiet=True, overwrite=overwrite)
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', res='1')
        grass.run_command('g.region', vector=vreg)

    # import into temp location
    grass.message(_("Importing <%s> ...") % OGRdatasource)
    try:
        grass.run_command('v.in.ogr', input=OGRdatasource,
                          flags=vflags, overwrite=overwrite, **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to import OGR datasource <%s>") % OGRdatasource)

    # if output is not define check source mapset
    if not output:
        output = grass.list_grouped('vector')['PERMANENT'][0]

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    # check if map exists
    if not grass.overwrite() and \
       grass.find_file(output, element='vector', mapset='.')['mapset']:
        grass.fatal(_("option <%s>: <%s> exists.") % ('output', output))

    if options['extent'] == 'region':
        grass.run_command('g.remove', type='vector', name=vreg,
                          flags='f', quiet=True)

    # v.proj
    grass.message(_("Reprojecting <%s>...") % output)
    try:
        grass.run_command('v.proj', location=TMPLOC,
                          mapset='PERMANENT', input=output, overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to to reproject vector <%s>") % output)

    return 0
Пример #21
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['otable']
    ocolumn = options['ocolumn']
    if options['scolumns']:
        scolumns = options['scolumns'].split(',')
    else:
        scolumns = None
    
    f = grass.vector_layer_db(map, layer)

    maptable = f['table']
    database = f['database']
    driver   = f['driver']

    if driver == 'dbf':
	grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
	grass.fatal(_("There is no table connected to this map. Unable to join any column."))

    # check if column is in map table
    if not grass.vector_columns(map, layer).has_key(column):
	grass.fatal(_("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols']

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
	grass.fatal(_("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
    	# check if scolumns exists in the other table
    	for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(_("Column <%s> not found in table <%s>.") % (scol, otable))
    
    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
	# skip the vector column which is used for join
	colname = col[0]
	if colname == column:
	    continue
	# Sqlite 3 does not support the precision number any more
	if len(col) > 2 and driver != "sqlite":
	    coltype = "%s(%s)" % (col[1], col[2])
	else:
	    coltype = "%s" % col[1]

	colspec = "%s %s" % (colname, coltype)

	# add only the new column to the table
	if colname not in all_cols_tt:
	    if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0:
	        grass.fatal(_("Error creating column <%s>") % colname)

	stmt = template.substitute(table = maptable, column = column,
				   otable = otable, ocolumn = ocolumn,
				   colname = colname)
        grass.debug(stmt, 1)
        grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map))
	if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0:
	    grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
Пример #22
0
def main(options, flags):
    # lazy imports
    import grass.temporal as tgis
    import grass.pygrass.modules as pymod

    # Get the options
    points = options["points"]
    coordinates = options["coordinates"]
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    order = options["order"]
    layout = options["layout"]
    null_value = options["null_value"]
    separator = gscript.separator(options["separator"])

    nprocs = int(options["nprocs"])
    write_header = flags["n"]
    use_stdin = flags["i"]
    vcat = flags["v"]

    #output_cat_label = flags["f"]
    #output_color = flags["r"]
    #output_cat = flags["i"]

    overwrite = gscript.overwrite()

    if coordinates and points:
        gscript.fatal(
            _("Options coordinates and points are mutually exclusive"))

    if not coordinates and not points and not use_stdin:
        gscript.fatal(
            _("Please specify the coordinates, the points option or use the 'i' flag to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates"
              ))

    if vcat and not points:
        gscript.fatal(_("Flag 'v' required option 'points'"))

    if use_stdin:
        coordinates_stdin = str(sys.__stdin__.read())
        # Check if coordinates are given with site names or IDs
        stdin_length = len(coordinates_stdin.split('\n')[0].split())
        if stdin_length <= 2:
            site_input = False
        elif stdin_length >= 3:
            site_input = True
    else:
        site_input = False

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where,
                                             order=order,
                                             dbif=dbif)
    dbif.close()
    if not maps:
        gscript.fatal(
            _("Space time raster dataset <%s> is empty") % sp.get_id())

    # Setup flags are disabled due to test issues
    flags = ""
    #if output_cat_label is True:
    #    flags += "f"
    #if output_color is True:
    #    flags += "r"
    #if output_cat is True:
    #    flags += "i"
    if vcat is True:
        flags += "v"

    # Configure the r.what module
    if points:
        r_what = pymod.Module("r.what",
                              map="dummy",
                              output="dummy",
                              run_=False,
                              separator=separator,
                              points=points,
                              overwrite=overwrite,
                              flags=flags,
                              null_value=null_value,
                              quiet=True)
    elif coordinates:
        # Create a list of values
        coord_list = coordinates.split(",")
        r_what = pymod.Module("r.what",
                              map="dummy",
                              output="dummy",
                              run_=False,
                              separator=separator,
                              coordinates=coord_list,
                              overwrite=overwrite,
                              flags=flags,
                              null_value=null_value,
                              quiet=True)
    elif use_stdin:
        r_what = pymod.Module("r.what",
                              map="dummy",
                              output="dummy",
                              run_=False,
                              separator=separator,
                              stdin_=coordinates_stdin,
                              overwrite=overwrite,
                              flags=flags,
                              null_value=null_value,
                              quiet=True)
    else:
        gscript.error(_("Please specify points or coordinates"))

    if len(maps) < nprocs:
        nprocs = len(maps)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))
    num_maps = len(maps)

    # 400 Maps is the absolute maximum in r.what
    # We need to determie the number of maps that can be processed
    # in parallel

    # First estimate the number of maps per process. We use 400 maps
    # simultaniously as maximum for a single process

    num_loops = int(num_maps / (400 * nprocs))
    remaining_maps = num_maps % (400 * nprocs)

    if num_loops == 0:
        num_loops = 1
        remaining_maps = 0

    # Compute the number of maps for each process
    maps_per_loop = int((num_maps - remaining_maps) / num_loops)
    maps_per_process = int(maps_per_loop / nprocs)
    remaining_maps_per_loop = maps_per_loop % nprocs

    # We put the output files in an ordered list
    output_files = []
    output_time_list = []

    count = 0
    for loop in range(num_loops):
        file_name = gscript.tempfile() + "_%i" % (loop)
        count = process_loop(nprocs, maps, file_name, count, maps_per_process,
                             remaining_maps_per_loop, output_files,
                             output_time_list, r_what, process_queue)

    process_queue.wait()

    gscript.verbose("Number of raster map layers remaining for sampling %i" %
                    (remaining_maps))
    if remaining_maps > 0:
        # Use a single process if less then 100 maps
        if remaining_maps <= 100:
            map_names = []
            for i in range(remaining_maps):
                map = maps[count]
                map_names.append(map.get_id())
                count += 1
            mod = copy.deepcopy(r_what)
            mod(map=map_names, output=file_name)
            process_queue.put(mod)
        else:
            maps_per_process = int(remaining_maps / nprocs)
            remaining_maps_per_loop = remaining_maps % nprocs

            file_name = "out_remain"
            process_loop(nprocs, maps, file_name, count, maps_per_process,
                         remaining_maps_per_loop, output_files,
                         output_time_list, r_what, process_queue)

    # Wait for unfinished processes
    process_queue.wait()

    # Out the output files in the correct order together
    if layout == "row":
        one_point_per_row_output(separator, output_files, output_time_list,
                                 output, write_header, site_input, vcat)
    elif layout == "col":
        one_point_per_col_output(separator, output_files, output_time_list,
                                 output, write_header, site_input, vcat)
    else:
        one_point_per_timerow_output(separator, output_files, output_time_list,
                                     output, write_header, site_input, vcat)
Пример #23
0
def main():
    global tmp, tmp_proj, tmp_gpx, tmp_extr, tmp_vogb

    format = options['format']
    input = options['input']
    layer = options['layer']
    output = options['output']
    type = options['type']
    where = options['where']
    wpt = flags['w']
    rte = flags['r']
    trk = flags['t']

    nflags = len(filter(None, [wpt, rte, trk]))
    if nflags > 1:
	grass.fatal(_("One feature at a time please."))
    if nflags < 1:
	grass.fatal(_("No features requested for export."))

    # set some reasonable defaults
    if not type:
	if wpt:
	    type = 'point'
	else:
	    type = 'line'

    #### check for gpsbabel
    ### FIXME: may need --help or similar?
    if not grass.find_program("gpsbabel"):
	grass.fatal(_("The gpsbabel program was not found, please install it first.\n") +
		    "http://gpsbabel.sourceforge.net")

    #### check for cs2cs
    if not grass.find_program("cs2cs"):
	grass.fatal(_("The cs2cs program was not found, please install it first.\n") +
		    "http://proj.osgeo.org")

    # check if we will overwrite data
    if os.path.exists(output) and not grass.overwrite():
	grass.fatal(_("Output file already exists."))

    #### set temporary files
    tmp = grass.tempfile()

    # SQL extract if needed
    if where:
	grass.verbose("Extracting data ...")
	tmp_extr = "tmp_vogb_extr_%d" % os.getpid()
	ret = grass.run_command('v.extract', input = "$GIS_OPT_INPUT",
				output = tmp_extr, type = type, layer = layer,
				where = where, quiet = True)
	if ret != 0:
	    grass.fatal(_("Error executing SQL query"))

	kv = grass.vector_info_topo(tmp_extr)
	if kv['primitives'] == 0:
	    grass.fatal(_("SQL query returned an empty map (no %s features?)") % type)

	inmap = tmp_extr
    else:
	#   g.copy "$GIS_OPT_INPUT,tmp_vogb_extr_$$"   # to get a copy of DB into local mapset
	#   INMAP="tmp_vogb_extr_$$"
	inmap = input

    #### set up projection info
    # TODO: check if we are already in ll/WGS84.  If so skip m.proj step.

    # TODO: multi layer will probably fail badly due to sed 's/^ 1   /'
    #   output as old GRASS 4 vector ascii and fight with dig_ascii/?
    #   Change to s/^ \([0-9]   .*\)    /# \1/' ??? mmph.

    # reproject to lat/lon WGS84
    grass.verbose("Reprojecting data ...")

    re1 = re.compile(r'^\([PLBCFKA]\)')
    re2 = re.compile(r'^ 1     ')

    re3 = re.compile(r'\t\([-\.0-9]*\) .*')
    re4 = re.compile(r'^\([-\.0-9]\)')
    re5 = re.compile(r'^#')

    tmp_proj = tmp + ".proj"
    tf = open(tmp_proj, 'w')
    p1 = grass.pipe_command('v.out.ascii', input = inmap, format = 'standard')
    p2 = grass.feed_command('m.proj', input = '-', flags = 'od', quiet = True, stdout = tf)
    tf.close()

    lineno = 0
    for line in p1.stdout:
	lineno += 1
	if lineno < 11:
	    continue
	line = re1.sub(r'#\1', line)
	line = re2.sub(r'# 1  ', line)
	p2.stdin.write(line)

    p2.stdin.close()
    p1.wait()
    p2.wait()

    if p1.returncode != 0 or p2.returncode != 0:
	grass.fatal(_("Error reprojecting data"))

    tmp_vogb = "tmp_vogb_epsg4326_%d" % os.getpid()
    p3 = grass.feed_command('v.in.ascii', out = tmp_vogb, format = 'standard', flags = 'n', quiet = True)
    tf = open(tmp_proj, 'r')

    for line in tf:
	line = re3.sub(r' \1', line)
	line = re4.sub(r' \1', line)
	line = re5.sub('', line)
	p3.stdin.write(line)

    p3.stdin.close()
    tf.close()
    p3.wait()

    if p3.returncode != 0:
	grass.fatal(_("Error reprojecting data"))

    # don't v.db.connect directly as source table will be removed with
    # temporary map in that case. So we make a temp copy of it to work with.
    kv = vector_db(inmap)
    if layer in kv:
	db_params = kv[layer]

	db_table = db_params['table']
	db_key = db_params['key']
	db_database = db_params['database']
	db_driver = db_params['driver']

	ret = grass.run_command('db.copy',
				from_driver = db_driver,
				from_database = db_database,
				from_table = db_table,
				to_table = tmp_vogb)
	if ret != 0:
	    grass.fatal(_("Error copying temporary DB"))

	ret = grass.run_command('v.db.connect', map = tmp_vogb, table = tmp_vogb, quiet = True)
	if ret != 0:
	    grass.fatal(_("Error reconnecting temporary DB"))

    # export as GPX using v.out.ogr
    if trk:
	linetype = "FORCE_GPX_TRACK=YES"
    elif rte:
	linetype = "FORCE_GPX_TRACK=YES"
    else:
	linetype = None

    # BUG: cat is being reported as evelation and attribute output is skipped.
    #   (v.out.ogr DB reading or ->OGR GPX driver bug<-
    #     resolved: see new Create opts at http://www.gdal.org/ogr/drv_gpx.html)
    #   v.out.ogr -> shapefile -> GPX works, but we try to avoid that as it's
    #     lossy. Also that would allow ogr2ogr -a_srs $IN_PROJ -t_srs EPSG:4326
    #     so skip m.proj pains.. if that is done ogr2ogr -s_srs MUST HAVE +wktext
    #     with PROJ.4 terms or else the +nadgrids will be ignored! best to feed
    #     it  IN_PROJ="`g.proj -jf` +wktext"  in that case.

    grass.verbose("Exporting data ...")

    tmp_gpx = tmp + ".gpx"
    ret = grass.run_command('v.out.ogr', input = tmp_vogb, dsn = tmp_gpx,
			    type = type, format = 'GPX', lco = linetype,
			    dsco = "GPX_USE_EXTENSIONS=YES", quiet = True)
    if ret != 0:
	grass.fatal(_("Error exporting data"))

    if format == 'gpx':
	# short circuit, we have what we came for.
	grass.try_remove(output)
	os.rename(tmp_gpx, output)
	grass.verbose("Fast exit.")
	sys.exit()

    # run gpsbabel
    if wpt:
	gtype = '-w'
    elif trk:
	gtype = '-t'
    elif rte:
	gtype = '-r'
    else:
	gtype = ''

    grass.verbose("Running GPSBabel ...")

    ret = grass.call(['gpsbabel',
		      gtype,
		      '-i', 'gpx',
		      '-f', tmp + '.gpx',
		      '-o', format,
		      '-F', output])

    if ret != 0:
	grass.fatal(_("Error running GPSBabel"))

    grass.verbose("Done.")
Пример #24
0
def export_png_in_projection(src_mapset_name, map_name, output_file,
                             epsg_code,
                             routpng_flags, compression, wgs84_file,
                             use_region=True):
    """

    :param use_region: use computation region and not map extent
    """
    if use_region:
        src_region = get_region()
        src_proj_string = get_location_proj_string()

    # TODO: change only location and not gisdbase?
    # we rely on the tmp dir having enough space for our map
    tgt_gisdbase = tempfile.mkdtemp()
    # this is not needed if we use mkdtemp but why not
    tgt_location = 'r.out.png.proj_location_%s' % epsg_code
    # because we are using PERMANENT we don't have to create mapset explicitly
    tgt_mapset_name = 'PERMANENT'

    src_mapset = Mapset(name=src_mapset_name, use_current=True)
    assert src_mapset.exists()

    # get source (old) and set target (new) GISRC enviromental variable
    # TODO: set environ only for child processes could be enough and it would
    # enable (?) parallel runs
    src_gisrc = os.environ['GISRC']
    tgt_gisrc = gsetup.write_gisrc(tgt_gisdbase,
                                   tgt_location, tgt_mapset_name)
    os.environ['GISRC'] = tgt_gisrc
    # we do this only after we obtained region, so it was applied
    # and we don't need it in the temporary (tgt) mapset
    if os.environ.get('WIND_OVERRIDE'):
        old_temp_region = os.environ['WIND_OVERRIDE']
        del os.environ['WIND_OVERRIDE']
    else:
        old_temp_region = None

    tgt_mapset = Mapset(tgt_gisdbase, tgt_location, tgt_mapset_name)

    try:
        # the function itself is not safe for other (backgroud) processes
        # (e.g. GUI), however we already switched GISRC for us
        # and child processes, so we don't influece others
        gs.create_location(dbase=tgt_gisdbase,
                           location=tgt_location,
                           epsg=epsg_code,
                           datum=None,
                           datum_trans=None)
        assert tgt_mapset.exists()

        # we need to make the mapset change in the current GISRC (tgt)
        # note that the C library for this process still holds the
        # path to the old GISRC file (src)
        tgt_mapset.set_as_current(gisrc=tgt_gisrc)

        # setting region
        if use_region:
            # respecting computation region of the src location
            # by previous use g.region in src location
            # and m.proj and g.region now
            # respecting MASK of the src location would be hard
            # null values in map are usually enough
            tgt_proj_string = get_location_proj_string()
            tgt_region = reproject_region(src_region,
                                          from_proj=src_proj_string,
                                          to_proj=tgt_proj_string)
            # uses g.region thus and sets region only for child processes
            # which is enough now
            # TODO: unlike the other branch, this keeps the current
            # resolution which is not correct
            set_region(tgt_region)
        else:
            # find out map extent to import everything
            # using only classic API because of some problems with pygrass
            # on ms windows
            rproj_out = gs.read_command('r.proj', input=map_name,
                                        dbase=src_mapset.database,
                                        location=src_mapset.location,
                                        mapset=src_mapset.name,
                                        output=map_name, flags='g')
            a = gs.parse_key_val(rproj_out, sep='=', vsep=' ')
            gs.run_command('g.region', **a)

        # map import
        gs.message("Reprojecting...")
        gs.run_command('r.proj', input=map_name, dbase=src_mapset.database,
                       location=src_mapset.location, mapset=src_mapset.name,
                       output=map_name, quiet=True)

        # actual export
        gs.message("Rendering...")
        raster_to_png(map_name, output_file, compression=compression,
                      routpng_flags=routpng_flags)

        # outputting file with WGS84 coordinates
        if wgs84_file:
            gs.verbose("Projecting coordinates to LL WGS 84...")
            with open(wgs84_file, 'w') as data_file:
                if use_region:
                    # map which is smaller than region is imported in its own
                    # small extent, but we export image in region, so we need
                    # bounds to be for region, not map
                    # hopefully this is consistent with r.out.png behavior
                    data_file.write(
                        map_extent_to_file_content(
                            proj_to_wgs84(get_region())) + '\n')
                else:
                    # use map to get extent
                    # the result is actually the same as using map
                    # if region is the same as map (use_region == False)
                    data_file.write(
                        map_extent_to_file_content(
                            get_map_extent_for_location(map_name)) +
                        '\n')

    finally:
        # juts in case we need to do something in the old location
        # our callers probably do
        os.environ['GISRC'] = src_gisrc
        if old_temp_region:
            os.environ['WIND_OVERRIDE'] = old_temp_region
        # set current in library
        src_mapset.set_as_current(gisrc=src_gisrc)

        # delete the whole gisdbase
        # delete file by file to ensure that we are deleting only our things
        # exception will be raised when removing non-empty directory
        tgt_mapset.delete()
        os.rmdir(tgt_mapset.location_path)
        # dir created by tempfile.mkdtemp() needs to be romved manually
        os.rmdir(tgt_gisdbase)
        # we have to remove file created by tempfile.mkstemp function
        # in write_gisrc function
        os.remove(tgt_gisrc)
Пример #25
0
def main():
    """Do the main processing"""

    # Lazy import GDAL python bindings
    try:
        from osgeo import gdal, osr
    except ImportError as e:
        grass.fatal(_("Module requires GDAL python bindings: {}").format(e))

    # Parse input options:
    patch_map = options["input"]
    patches = patch_map.split("@")[0]
    patches_mapset = patch_map.split("@")[1] if len(
        patch_map.split("@")) > 1 else None
    pop_proxy = options["pop_proxy"]
    layer = options["layer"]
    costs = options["costs"]
    cutoff = float(options["cutoff"])
    border_dist = int(options["border_dist"])
    conefor_dir = options["conefor_dir"]
    memory = int(options["memory"])

    # Parse output options:
    prefix = options["prefix"]
    edge_map = "{}_edges".format(prefix)
    vertex_map = "{}_vertices".format(prefix)
    shortest_paths = "{}_shortest_paths".format(prefix)

    # Parse flags:
    p_flag = flags["p"]
    t_flag = flags["t"]
    r_flag = flags["r"]

    dist_flags = "kn" if flags["k"] else "n"

    lin_cat = 1
    zero_dist = None

    folder = grass.tempdir()
    if not os.path.exists(folder):
        os.makedirs(folder)

    # Setup counter for progress message
    counter = 0

    # Check if location is lat/lon (only in lat/lon geodesic distance
    # measuring is supported)
    if grass.locn_is_latlong():
        grass.verbose("Location is lat/lon: Geodesic distance \
                      measure is used")

    # Check if prefix is legal GRASS name
    if not grass.legal_name(prefix):
        grass.fatal("{} is not a legal name for GRASS \
                    maps.".format(prefix))

    if prefix[0].isdigit():
        grass.fatal("Tables names starting with a digit are not SQL \
                    compliant.".format(prefix))

    # Check if output maps not already exists or could be overwritten
    for output in [edge_map, vertex_map, shortest_paths]:
        if grass.db.db_table_exist(output) and not grass.overwrite():
            grass.fatal("Vector map <{}> already exists".format(output))

    # Check if input has required attributes
    in_db_connection = grass.vector.vector_db(patch_map)
    if not int(layer) in in_db_connection.keys():
        grass.fatal("No attribute table connected vector map {} at \
                    layer {}.".format(patches, layer))

    # Check if cat column exists
    pcols = grass.vector.vector_columns(patch_map, layer=layer)

    # Check if cat column exists
    if "cat" not in pcols.keys():
        grass.fatal("Cannot find the reqired column cat in vector map \
                    {}.".format(patches))

    # Check if pop_proxy column exists
    if pop_proxy not in pcols.keys():
        grass.fatal("Cannot find column {} in vector map \
                    {}".format(pop_proxy, patches))

    # Check if pop_proxy column is numeric type
    if not pcols[pop_proxy]["type"] in ["INTEGER", "REAL", "DOUBLE PRECISION"]:
        grass.fatal("Column {} is of type {}. Only numeric types \
                    (integer or double precision) \
                    allowed!".format(pop_proxy, pcols[pop_proxy]["type"]))

    # Check if pop_proxy column does not contain values <= 0
    pop_vals = np.fromstring(
        grass.read_command("v.db.select",
                           flags="c",
                           map=patches,
                           columns=pop_proxy,
                           nv=-9999).rstrip("\n"),
        dtype=float,
        sep="\n",
    )

    if np.min(pop_vals) <= 0:
        grass.fatal("Column {} contains values <= 0 or NULL. Neither \
                    values <= 0 nor NULL allowed!}".format(pop_proxy))

    ##############################################
    # Use pygrass region instead of grass.parse_command !?!
    start_reg = grass.parse_command("g.region", flags="ugp")

    max_n = start_reg["n"]
    min_s = start_reg["s"]
    max_e = start_reg["e"]
    min_w = start_reg["w"]
    # cost_nsres = reg['nsres']
    # cost_ewres = reg['ewres']

    # Rasterize patches
    # http://www.gdal.org/gdal_tutorial.html
    # http://geoinformaticstutorial.blogspot.no/2012/11/convert-
    # shapefile-to-raster-with-gdal.html
    if t_flag:
        # Rasterize patches with "all-touched" mode using GDAL
        # Read region-settings (not needed canuse max_n, min_s, max_e,
        # min_w nsres, ewres...
        prast = os.path.join(folder, "patches_rast.tif")

        # Check if GDAL-GRASS plugin is installed
        if ogr.GetDriverByName("GRASS"):
            # With GDAL-GRASS plugin
            # Locate file for patch vector map
            pfile = grass.parse_command("g.findfile",
                                        element="vector",
                                        file=patches,
                                        mapset=patches_mapset)["file"]
            pfile = os.path.join(pfile, "head")

        else:
            # Without GDAL-GRASS-plugin
            grass.warning("Cannot find GDAL-GRASS plugin. Consider \
                          installing it in order to save time for \
                          all-touched rasterisation")
            pfile = os.path.join(folder, "patches_vect.gpkg")
            # Export patch vector map to temp-file in a GDAL-readable
            # format (shp)
            grass.run_command(
                "v.out.ogr",
                flags="m",
                quiet=True,
                input=patch_map,
                type="area",
                layer=layer,
                output=pfile,
                lco="GEOMETRY_NAME=geom",
            )

        # Rasterize vector map with all-touched option
        os.system("gdal_rasterize -l {} -at -tr {} {} \
                  -te {} {} {} {} -ot Uint32 -a cat \
                  {} {} -q".format(
            patches,
            start_reg["ewres"],
            start_reg["nsres"],
            start_reg["w"],
            start_reg["s"],
            start_reg["e"],
            start_reg["n"],
            pfile,
            prast,
        ))

        if not ogr.GetDriverByName("GRASS"):
            # Remove vector temp-file
            os.remove(os.path.join(folder, "patches_vect.gpkg"))

        # Import rasterized patches
        grass.run_command(
            "r.external",
            flags="o",
            quiet=True,
            input=prast,
            output="{}_patches_pol".format(TMP_PREFIX),
        )

    else:
        # Simple rasterisation (only area)
        # in G 7.6 also with support for 'centroid'
        if float(grass.version()["version"][:3]) >= 7.6:
            conv_types = ["area", "centroid"]
        else:
            conv_types = ["area"]
        grass.run_command(
            "v.to.rast",
            quiet=True,
            input=patches,
            use="cat",
            type=conv_types,
            output="{}_patches_pol".format(TMP_PREFIX),
        )

    # Extract boundaries from patch raster map
    grass.run_command(
        "r.mapcalc",
        expression="{p}_patches_boundary=if(\
    {p}_patches_pol,\
    if((\
    (isnull({p}_patches_pol[-1,0])||| \
    {p}_patches_pol[-1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,1])||| \
    {p}_patches_pol[0,1]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[1,0])||| \
    {p}_patches_pol[1,0]!={p}_patches_pol)||| \
    (isnull({p}_patches_pol[0,-1])||| \
    {p}_patches_pol[0,-1]!={p}_patches_pol)), \
    {p}_patches_pol,null()), null())".format(p=TMP_PREFIX),
        quiet=True,
    )

    rasterized_cats = (grass.read_command(
        "r.category",
        separator="newline",
        map="{p}_patches_boundary".format(p=TMP_PREFIX),
    ).replace("\t", "").strip("\n"))
    rasterized_cats = list(
        map(int, set([x for x in rasterized_cats.split("\n") if x != ""])))

    # Init output vector maps if they are requested by user
    network = VectorTopo(edge_map)
    network_columns = [
        (u"cat", "INTEGER PRIMARY KEY"),
        (u"from_p", "INTEGER"),
        (u"to_p", "INTEGER"),
        (u"min_dist", "DOUBLE PRECISION"),
        (u"dist", "DOUBLE PRECISION"),
        (u"max_dist", "DOUBLE PRECISION"),
    ]
    network.open("w", tab_name=edge_map, tab_cols=network_columns)

    vertex = VectorTopo(vertex_map)
    vertex_columns = [
        (u"cat", "INTEGER PRIMARY KEY"),
        (pop_proxy, "DOUBLE PRECISION"),
    ]
    vertex.open("w", tab_name=vertex_map, tab_cols=vertex_columns)

    if p_flag:
        # Init cost paths file for start-patch
        grass.run_command("v.edit",
                          quiet=True,
                          map=shortest_paths,
                          tool="create")
        grass.run_command(
            "v.db.addtable",
            quiet=True,
            map=shortest_paths,
            columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision",
        )

    start_region_bbox = Bbox(north=float(max_n),
                             south=float(min_s),
                             east=float(max_e),
                             west=float(min_w))
    vpatches = VectorTopo(patches, mapset=patches_mapset)
    vpatches.open("r", layer=int(layer))

    ###Loop through patches
    vpatch_ids = np.array(
        vpatches.features_to_wkb_list(feature_type="centroid",
                                      bbox=start_region_bbox),
        dtype=[("vid", "uint32"), ("cat", "uint32"), ("geom", "|S10")],
    )
    cats = set(vpatch_ids["cat"])
    n_cats = len(cats)
    if n_cats < len(vpatch_ids["cat"]):
        grass.verbose("At least one MultiPolygon found in patch map.\n \
                      Using average coordinates of the centroids for \
                      visual representation of the patch.")

    for cat in cats:
        if cat not in rasterized_cats:
            grass.warning("Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.".format(cat))

            continue
        grass.verbose("Calculating connectivity-distances for patch \
                      number {}".format(cat))

        # Filter
        from_vpatch = vpatch_ids[vpatch_ids["cat"] == cat]

        # Get patch ID
        if from_vpatch["vid"].size == 1:
            from_centroid = Centroid(v_id=int(from_vpatch["vid"]),
                                     c_mapinfo=vpatches.c_mapinfo)
            from_x = from_centroid.x
            from_y = from_centroid.y

            # Get centroid
            if not from_centroid:
                continue
        else:
            xcoords = []
            ycoords = []
            for f_p in from_vpatch["vid"]:
                from_centroid = Centroid(v_id=int(f_p),
                                         c_mapinfo=vpatches.c_mapinfo)
                xcoords.append(from_centroid.x)
                ycoords.append(from_centroid.y)

                # Get centroid
                if not from_centroid:
                    continue
            from_x = np.average(xcoords)
            from_y = np.average(ycoords)

        # Get BoundingBox
        from_bbox = grass.parse_command("v.db.select",
                                        map=patch_map,
                                        flags="r",
                                        where="cat={}".format(cat))

        attr_filter = vpatches.table.filters.select(pop_proxy)
        attr_filter = attr_filter.where("cat={}".format(cat))
        proxy_val = vpatches.table.execute().fetchone()

        # Prepare start patch
        start_patch = "{}_patch_{}".format(TMP_PREFIX, cat)
        reclass_rule = grass.encode("{} = 1\n* = NULL".format(cat))
        recl = grass.feed_command(
            "r.reclass",
            quiet=True,
            input="{}_patches_boundary".format(TMP_PREFIX),
            output=start_patch,
            rules="-",
        )
        recl.stdin.write(reclass_rule)
        recl.stdin.close()
        recl.wait()

        # Check if patch was rasterised (patches smaller raster resolution and close to larger patches may not be rasterised)
        # start_check = grass.parse_command('r.info', flags='r', map=start_patch)
        # start_check = grass.parse_command('r.univar', flags='g', map=start_patch)
        # print(start_check)
        """if start_check['min'] != '1':
            grass.warning('Patch {} has not been rasterized and will \
                          therefore not be treated as part of the \
                          network. Consider using t-flag or change \
                          resolution.'.format(cat))

            grass.run_command('g.remove', flags='f', vector=start_patch,
                              raster=start_patch, quiet=True)
            grass.del_temp_region()
            continue"""

        # Prepare stop patches
        ############################################
        reg = grass.parse_command(
            "g.region",
            flags="ug",
            quiet=True,
            raster=start_patch,
            n=float(from_bbox["n"]) + float(cutoff),
            s=float(from_bbox["s"]) - float(cutoff),
            e=float(from_bbox["e"]) + float(cutoff),
            w=float(from_bbox["w"]) - float(cutoff),
            align="{}_patches_pol".format(TMP_PREFIX),
        )

        north = reg["n"] if max_n > reg["n"] else max_n
        south = reg["s"] if min_s < reg["s"] else min_s
        east = reg["e"] if max_e < reg["e"] else max_e
        west = reg["w"] if min_w > reg["w"] else min_w

        # Set region to patch search radius
        grass.use_temp_region()
        grass.run_command(
            "g.region",
            quiet=True,
            n=north,
            s=south,
            e=east,
            w=west,
            align="{}_patches_pol".format(TMP_PREFIX),
        )

        # Create buffer around start-patch as a mask
        # for cost distance analysis
        grass.run_command("r.buffer",
                          quiet=True,
                          input=start_patch,
                          output="MASK",
                          distances=cutoff)
        grass.run_command(
            "r.mapcalc",
            quiet=True,
            expression="{pf}_patch_{p}_neighbours_contur=\
                                     if({pf}_patches_boundary=={p},\
                                     null(),\
                                     {pf}_patches_boundary)".format(
                pf=TMP_PREFIX, p=cat),
        )
        grass.run_command("r.mask", flags="r", quiet=True)

        # Calculate cost distance
        cost_distance_map = "{}_patch_{}_cost_dist".format(prefix, cat)
        grass.run_command(
            "r.cost",
            flags=dist_flags,
            quiet=True,
            overwrite=True,
            input=costs,
            output=cost_distance_map,
            start_rast=start_patch,
            memory=memory,
        )

        # grass.run_command('g.region', flags='up')
        # grass.raster.raster_history(cost_distance_map)
        cdhist = History(cost_distance_map)
        cdhist.clear()
        cdhist.creator = os.environ["USER"]
        cdhist.write()
        # History object cannot modify description
        grass.run_command(
            "r.support",
            map=cost_distance_map,
            description="Generated by r.connectivity.distance",
            history=os.environ["CMDLINE"],
        )

        # Export distance at boundaries
        maps = "{0}_patch_{1}_neighbours_contur,{2}_patch_{1}_cost_dist"
        maps = (maps.format(TMP_PREFIX, cat, prefix), )

        connections = grass.encode(
            grass.read_command("r.stats",
                               flags="1ng",
                               quiet=True,
                               input=maps,
                               separator=";").rstrip("\n"))
        if connections:
            con_array = np.genfromtxt(
                BytesIO(connections),
                delimiter=";",
                dtype=None,
                names=["x", "y", "cat", "dist"],
            )
        else:
            grass.warning("No connections for patch {}".format(cat))

            # Write centroid to vertex map
            vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val)
            vertex.table.conn.commit()

            # Remove temporary map data
            grass.run_command(
                "g.remove",
                quiet=True,
                flags="f",
                type=["raster", "vector"],
                pattern="{}*{}*".format(TMP_PREFIX, cat),
            )
            grass.del_temp_region()
            continue

        # Find closest points on neigbour patches
        to_cats = set(np.atleast_1d(con_array["cat"]))
        to_coords = []
        for to_cat in to_cats:
            connection = con_array[con_array["cat"] == to_cat]
            connection.sort(order=["dist"])
            pixel = (border_dist
                     if len(connection) > border_dist else len(connection) - 1)
            # closest_points_x = connection['x'][pixel]
            # closest_points_y = connection['y'][pixel]
            closest_points_to_cat = to_cat
            closest_points_min_dist = connection["dist"][0]
            closest_points_dist = connection["dist"][pixel]
            closest_points_max_dist = connection["dist"][-1]
            to_patch_ids = vpatch_ids[vpatch_ids["cat"] == int(to_cat)]["vid"]

            if len(to_patch_ids) == 1:
                to_centroid = Centroid(v_id=to_patch_ids,
                                       c_mapinfo=vpatches.c_mapinfo)
                to_x = to_centroid.x
                to_y = to_centroid.y
            elif len(to_patch_ids) >= 1:
                xcoords = []
                ycoords = []
                for t_p in to_patch_ids:
                    to_centroid = Centroid(v_id=int(t_p),
                                           c_mapinfo=vpatches.c_mapinfo)
                    xcoords.append(to_centroid.x)
                    ycoords.append(to_centroid.y)

                    # Get centroid
                    if not to_centroid:
                        continue
                to_x = np.average(xcoords)
                to_y = np.average(ycoords)

            to_coords.append("{},{},{},{},{},{}".format(
                connection["x"][0],
                connection["y"][0],
                to_cat,
                closest_points_min_dist,
                closest_points_dist,
                closest_points_max_dist,
            ))

            # Save edges to network dataset
            if closest_points_dist <= 0:
                zero_dist = 1

            # Write data to network
            network.write(
                Line([(from_x, from_y), (to_x, to_y)]),
                cat=lin_cat,
                attrs=(
                    cat,
                    int(closest_points_to_cat),
                    closest_points_min_dist,
                    closest_points_dist,
                    closest_points_max_dist,
                ),
            )
            network.table.conn.commit()

            lin_cat = lin_cat + 1

        # Save closest points and shortest paths through cost raster as
        # vector map (r.drain limited to 1024 points) if requested
        if p_flag:
            grass.verbose("Extracting shortest paths for patch number \
                          {}...".format(cat))

            points_n = len(to_cats)

            tiles = int(points_n / 1024.0)
            rest = points_n % 1024
            if not rest == 0:
                tiles = tiles + 1

            tile_n = 0
            while tile_n < tiles:
                tile_n = tile_n + 1
                # Import closest points for start-patch in 1000er blocks
                sp = grass.feed_command(
                    "v.in.ascii",
                    flags="nr",
                    overwrite=True,
                    quiet=True,
                    input="-",
                    stderr=subprocess.PIPE,
                    output="{}_{}_cp".format(TMP_PREFIX, cat),
                    separator=",",
                    columns="x double precision,\
                                           y double precision,\
                                           to_p integer,\
                                           dist_min double precision,\
                                           dist double precision,\
                                           dist_max double precision",
                )
                sp.stdin.write(grass.encode("\n".join(to_coords)))
                sp.stdin.close()
                sp.wait()

                # Extract shortest paths for start-patch in chunks of
                # 1024 points
                cost_paths = "{}_{}_cost_paths".format(TMP_PREFIX, cat)
                start_points = "{}_{}_cp".format(TMP_PREFIX, cat)
                grass.run_command(
                    "r.drain",
                    overwrite=True,
                    quiet=True,
                    input=cost_distance_map,
                    output=cost_paths,
                    drain=cost_paths,
                    start_points=start_points,
                )

                grass.run_command(
                    "v.db.addtable",
                    map=cost_paths,
                    quiet=True,
                    columns="cat integer,\
                                   from_p integer,\
                                   to_p integer,\
                                   dist_min double precision,\
                                   dist double precision,\
                                   dist_max double precision",
                )
                grass.run_command(
                    "v.db.update",
                    map=cost_paths,
                    column="from_p",
                    value=cat,
                    quiet=True,
                )
                grass.run_command(
                    "v.distance",
                    quiet=True,
                    from_=cost_paths,
                    to=start_points,
                    upload="to_attr",
                    column="to_p",
                    to_column="to_p",
                )
                grass.run_command(
                    "v.db.join",
                    quiet=True,
                    map=cost_paths,
                    column="to_p",
                    other_column="to_p",
                    other_table=start_points,
                    subset_columns="dist_min,dist,dist_max",
                )

                # grass.run_command('v.info', flags='c',
                #                  map=cost_paths)
                grass.run_command(
                    "v.patch",
                    flags="ae",
                    overwrite=True,
                    quiet=True,
                    input=cost_paths,
                    output=shortest_paths,
                )

                # Remove temporary map data
                grass.run_command(
                    "g.remove",
                    quiet=True,
                    flags="f",
                    type=["raster", "vector"],
                    pattern="{}*{}*".format(TMP_PREFIX, cat),
                )

        # Remove temporary map data for patch
        if r_flag:
            grass.run_command("g.remove",
                              flags="f",
                              type="raster",
                              name=cost_distance_map,
                              quiet=True)

        vertex.write(Point(from_x, from_y), cat=int(cat), attrs=proxy_val)

        vertex.table.conn.commit()

        # Print progress message
        grass.percent(i=int((float(counter) / n_cats) * 100), n=100, s=3)

        # Update counter for progress message
        counter = counter + 1

    if zero_dist:
        grass.warning("Some patches are directly adjacent to others. \
                       Minimum distance set to 0.0000000001")

    # Close vector maps and build topology
    network.close()
    vertex.close()

    # Add vertex attributes
    # grass.run_command('v.db.addtable', map=vertex_map)
    # grass.run_command('v.db.join', map=vertex_map, column='cat',
    #                   other_table=in_db_connection[int(layer)]['table'],
    #                   other_column='cat', subset_columns=pop_proxy,
    #                   quiet=True)

    # Add history and meta data to produced maps
    grass.run_command(
        "v.support",
        flags="h",
        map=edge_map,
        person=os.environ["USER"],
        cmdhist=os.environ["CMDLINE"],
    )

    grass.run_command(
        "v.support",
        flags="h",
        map=vertex_map,
        person=os.environ["USER"],
        cmdhist=os.environ["CMDLINE"],
    )

    if p_flag:
        grass.run_command(
            "v.support",
            flags="h",
            map=shortest_paths,
            person=os.environ["USER"],
            cmdhist=os.environ["CMDLINE"],
        )

    # Output also Conefor files if requested
    if conefor_dir:
        query = """SELECT p_from, p_to, avg(dist) FROM
                 (SELECT
                 CASE
                 WHEN from_p > to_p THEN to_p
                 ELSE from_p END AS p_from,
                    CASE
                 WHEN from_p > to_p THEN from_p
                 ELSE to_p END AS p_to,
                 dist
                 FROM {}) AS x
                 GROUP BY p_from, p_to""".format(edge_map)
        with open(os.path.join(conefor_dir, "undirected_connection_file"),
                  "w") as edges:
            edges.write(
                grass.read_command("db.select", sql=query, separator=" "))
        with open(os.path.join(conefor_dir, "directed_connection_file"),
                  "w") as edges:
            edges.write(
                grass.read_command("v.db.select",
                                   map=edge_map,
                                   separator=" ",
                                   flags="c"))
        with open(os.path.join(conefor_dir, "node_file"), "w") as nodes:
            nodes.write(
                grass.read_command("v.db.select",
                                   map=vertex_map,
                                   separator=" ",
                                   flags="c"))
Пример #26
0
def main():
    global TMPLOC, SRCGISRC, TGTGISRC, GISDBASE
    overwrite = grass.overwrite()

    # list formats and exit
    if flags['f']:
        grass.run_command('v.in.ogr', flags='f')
        return 0

    # list layers and exit
    if flags['l']:
        try:
            grass.run_command('v.in.ogr', flags='l', input=options['input'])
        except CalledModuleError:
            return 1
        return 0

    OGRdatasource = options['input']
    output = options['output']
    layers = options['layer']

    vflags = ''
    if options['extent'] == 'region':
        vflags += 'r'
    if flags['o']:
        vflags += 'o'

    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']

    if options['datum_trans'] and options['datum_trans'] == '-1':
        # list datum transform parameters
        if not options['epsg']:
            grass.fatal(_("Missing value for parameter <%s>") % 'epsg')

        return grass.run_command('g.proj',
                                 epsg=options['epsg'],
                                 datum_trans=options['datum_trans'])

    if layers:
        vopts['layer'] = layers
    if output:
        vopts['output'] = output
    vopts['snap'] = options['snap']

    # try v.in.ogr directly
    if flags['o'] or is_projection_matching(OGRdatasource):
        try:
            grass.run_command('v.in.ogr',
                              input=OGRdatasource,
                              flags=vflags,
                              overwrite=overwrite,
                              **vopts)
            grass.message(
                _("Input <%s> successfully imported without reprojection") %
                OGRdatasource)
            return 0
        except CalledModuleError:
            grass.fatal(_("Unable to import <%s>") % OGRdatasource)

    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']

    # make sure target is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for current location <%s>"
              ) % tgtloc)

    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    TGTGISRC = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = grass.append_node_pid("tmp_v_import_location")

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    tgtsrs = grass.read_command('g.proj', flags='j', quiet=True)

    # create temp location from input without import
    grass.verbose(_("Creating temporary location for <%s>...") % OGRdatasource)
    try:
        if OGRdatasource.lower().endswith("gml"):
            try:
                from osgeo import gdal
            except:
                grass.fatal(
                    _("Unable to load GDAL Python bindings (requires package 'python-gdal' being installed)"
                      ))
            if int(gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(
                    2, 4, 1):
                fix_gfsfile(OGRdatasource)
        grass.run_command('v.in.ogr',
                          input=OGRdatasource,
                          location=TMPLOC,
                          flags='i',
                          quiet=True,
                          overwrite=overwrite,
                          **vopts)
    except CalledModuleError:
        grass.fatal(
            _("Unable to create location from OGR datasource <%s>") %
            OGRdatasource)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    if options['epsg']:  # force given EPSG
        kwargs = {}
        if options['datum_trans']:
            kwargs['datum_trans'] = options['datum_trans']
        grass.run_command('g.proj', flags='c', epsg=options['epsg'], **kwargs)

    # print projection at verbose level
    grass.verbose(grass.read_command('g.proj', flags='p').rstrip(os.linesep))

    # make sure input is not xy
    if grass.parse_command('g.proj',
                           flags='g')['name'] == 'xy_location_unprojected':
        grass.fatal(
            _("Coordinate reference system not available for input <%s>") %
            OGRdatasource)

    if options['extent'] == 'region':
        # switch to target location
        os.environ['GISRC'] = str(TGTGISRC)

        # v.in.region in tgt
        vreg = grass.append_node_pid("tmp_v_import_region")

        grass.run_command('v.in.region', output=vreg, quiet=True)

        # reproject to src
        # switch to temp location
        os.environ['GISRC'] = str(SRCGISRC)
        try:
            grass.run_command('v.proj',
                              input=vreg,
                              output=vreg,
                              location=tgtloc,
                              mapset=tgtmapset,
                              quiet=True,
                              overwrite=overwrite)
        except CalledModuleError:
            grass.fatal(_("Unable to reproject to source location"))

        # set region from region vector
        grass.run_command('g.region', res='1')
        grass.run_command('g.region', vector=vreg)

    # import into temp location
    grass.message(_("Importing <%s> ...") % OGRdatasource)
    try:
        if OGRdatasource.lower().endswith("gml"):
            try:
                from osgeo import gdal
            except:
                grass.fatal(
                    _("Unable to load GDAL Python bindings (requires package 'python-gdal' being installed)"
                      ))
            if int(gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(
                    2, 4, 1):
                fix_gfsfile(OGRdatasource)
        grass.run_command('v.in.ogr',
                          input=OGRdatasource,
                          flags=vflags,
                          overwrite=overwrite,
                          **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to import OGR datasource <%s>") % OGRdatasource)

    # if output is not define check source mapset
    if not output:
        output = grass.list_grouped('vector')['PERMANENT'][0]

    # switch to target location
    os.environ['GISRC'] = str(TGTGISRC)

    # check if map exists
    if not grass.overwrite() and \
       grass.find_file(output, element='vector', mapset='.')['mapset']:
        grass.fatal(_("option <%s>: <%s> exists.") % ('output', output))

    if options['extent'] == 'region':
        grass.run_command('g.remove',
                          type='vector',
                          name=vreg,
                          flags='f',
                          quiet=True)

    # v.proj
    grass.message(_("Reprojecting <%s>...") % output)
    try:
        grass.run_command('v.proj',
                          location=TMPLOC,
                          mapset='PERMANENT',
                          input=output,
                          overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to to reproject vector <%s>") % output)

    return 0
Пример #27
0
def main():
    grass.set_raise_on_error(False)

    options, flags = grass.parser()

    # import wx only after running parser
    # to avoid issues with complex imports when only interface is needed
    import wx

    from grass.script.setup import set_gui_path

    set_gui_path()

    from core.render import Map
    from core.globalvar import ICONDIR
    from mapdisp.frame import MapPanel
    from gui_core.mapdisp import FrameMixin
    from mapdisp.main import DMonGrassInterface
    from core.settings import UserSettings
    from vdigit.main import haveVDigit, errorMsg
    from grass.exceptions import CalledModuleError

    # define classes which needs imports as local
    # for longer definitions, a separate file would be a better option
    class VDigitMapDisplay(FrameMixin, MapPanel):
        """Map display for wrapping map panel with v.digit mathods and frame methods"""
        def __init__(self, parent, vectorMap):
            MapPanel.__init__(self,
                              parent=parent,
                              Map=Map(),
                              giface=DMonGrassInterface(None))

            # set system icon
            parent.SetIcon(
                wx.Icon(os.path.join(ICONDIR, "grass_map.ico"),
                        wx.BITMAP_TYPE_ICO))

            # bindings
            parent.Bind(wx.EVT_CLOSE, self.OnCloseWindow)

            # extend shortcuts and create frame accelerator table
            self.shortcuts_table.append(
                (self.OnFullScreen, wx.ACCEL_NORMAL, wx.WXK_F11))
            self._initShortcuts()

            # this giface issue not solved yet, we must set mapframe aferwards
            self._giface._mapframe = self
            # load vector map
            mapLayer = self.GetMap().AddLayer(
                ltype="vector",
                name=vectorMap,
                command=["d.vect", "map=%s" % vectorMap],
                active=True,
                hidden=False,
                opacity=1.0,
                render=True,
            )

            # switch toolbar
            self.AddToolbar("vdigit", fixed=True)

            # start editing
            self.toolbars["vdigit"].StartEditing(mapLayer)
            # use Close instead of QuitVDigit for standalone tool
            self.toolbars["vdigit"].quitDigitizer.disconnect(self.QuitVDigit)
            self.toolbars["vdigit"].quitDigitizer.connect(lambda: self.Close())

            # add Map Display panel to Map Display frame
            sizer = wx.BoxSizer(wx.VERTICAL)
            sizer.Add(self, proportion=1, flag=wx.EXPAND)
            parent.SetSizer(sizer)
            parent.Layout()

    if not haveVDigit:
        grass.fatal(_("Vector digitizer not available. %s") % errorMsg)

    if not grass.find_file(name=options["map"],
                           element="vector",
                           mapset=grass.gisenv()["MAPSET"])["fullname"]:
        if not flags["c"]:
            grass.fatal(
                _("Vector map <%s> not found in current mapset. "
                  "New vector map can be created by providing '-c' flag.") %
                options["map"])
        else:
            grass.verbose(_("New vector map <%s> created") % options["map"])
            try:
                grass.run_command("v.edit",
                                  map=options["map"],
                                  tool="create",
                                  quiet=True)
            except CalledModuleError:
                grass.fatal(
                    _("Unable to create new vector map <%s>") % options["map"])

    # allow immediate rendering
    driver = UserSettings.Get(group="display", key="driver", subkey="type")
    if driver == "png":
        os.environ["GRASS_RENDER_IMMEDIATE"] = "png"
    else:
        os.environ["GRASS_RENDER_IMMEDIATE"] = "cairo"

    app = wx.App()
    frame = wx.Frame(
        None,
        id=wx.ID_ANY,
        size=(850, 600),
        style=wx.DEFAULT_FRAME_STYLE,
        title=_("Vector Digitizer - GRASS GIS"),
    )
    frame = VDigitMapDisplay(parent=frame, vectorMap=options["map"])
    frame.Show()

    app.MainLoop()
Пример #28
0
def main():
    map = options["map"]
    layer = options["layer"]
    column = options["column"]
    otable = options["other_table"]
    ocolumn = options["other_column"]
    if options["subset_columns"]:
        scolumns = options["subset_columns"].split(",")
    else:
        scolumns = None

    try:
        f = grass.vector_layer_db(map, layer)
    except CalledModuleError:
        sys.exit(1)

    maptable = f["table"]
    database = f["database"]
    driver = f["driver"]

    if driver == "dbf":
        grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
        grass.fatal(
            _("There is no table connected to this map. Unable to join any column."
              ))

    # check if column is in map table
    if column not in grass.vector_columns(map, layer):
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver=driver,
                                    database=database)["cols"]

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
        # check if scolumns exists in the other table
        for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(
                    _("Column <%s> not found in table <%s>") % (scol, otable))

    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
        # skip the vector column which is used for join
        colname = col[0]
        if colname == column:
            continue

        use_len = False
        if len(col) > 2:
            use_len = True
            # Sqlite 3 does not support the precision number any more
            if driver == "sqlite":
                use_len = False
            # MySQL - expect format DOUBLE PRECISION(M,D), see #2792
            elif driver == "mysql" and col[1] == "DOUBLE PRECISION":
                use_len = False

        if use_len:
            coltype = "%s(%s)" % (col[1], col[2])
        else:
            coltype = "%s" % col[1]

        colspec = "%s %s" % (colname, coltype)

        # add only the new column to the table
        if colname not in all_cols_tt:
            try:
                grass.run_command("v.db.addcolumn",
                                  map=map,
                                  columns=colspec,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Error creating column <%s>") % colname)

        stmt = template.substitute(
            table=maptable,
            column=column,
            otable=otable,
            ocolumn=ocolumn,
            colname=colname,
        )
        grass.debug(stmt, 1)
        grass.verbose(
            _("Updating column <%s> of vector map <%s>...") % (colname, map))
        try:
            grass.write_command("db.execute",
                                stdin=stmt,
                                input="-",
                                database=database,
                                driver=driver)
        except CalledModuleError:
            grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
Пример #29
0
 def _getElevationFieldblock(self,elevation,fieldblock,elevationfieldblock):
     formula = "$elevationfieldblock = if(isnull($fieldblock),null(),$elevation)"
     g.mapcalc(formula, elevationfieldblock = elevationfieldblock,
                 elevation = elevation, fieldblock=fieldblock, quiet=quiet)
     g.verbose('Raster map elevationfieldblock is in "%s"' % elevationfieldblock)
     return elevationfieldblock
Пример #30
0
def one_point_per_row_output(separator, output_files, output_time_list, output,
                             write_header, site_input, vcat):
    """Write one point per row
       output is of type: x,y,start,end,value
    """
    # open the output file for writing
    out_file = open(output, 'w') if output != "-" else sys.stdout

    if write_header is True:
        out_str = ""
        if vcat:
            out_str += "cat{sep}"
        if site_input:
            out_str += "x{sep}y{sep}site{sep}start{sep}end{sep}value\n"
        else:
            out_str += "x{sep}y{sep}start{sep}end{sep}value\n"
        out_file.write(out_str.format(sep=separator))

    for count in range(len(output_files)):
        file_name = output_files[count]
        gscript.verbose(_("Transforming r.what output file %s" % (file_name)))
        map_list = output_time_list[count]
        in_file = open(file_name, "r")
        for line in in_file:
            line = line.split(separator)
            if vcat:
                cat = line[0]
                x = line[1]
                y = line[2]
                values = line[4:]
                if site_input:
                    site = line[3]
                    values = line[5:]

            else:
                x = line[0]
                y = line[1]
                if site_input:
                    site = line[2]
                values = line[3:]

            for i in range(len(values)):
                start, end = map_list[i].get_temporal_extent_as_tuple()
                if vcat:
                    cat_str = "{ca}{sep}".format(ca=cat, sep=separator)
                else:
                    cat_str = ""
                if site_input:
                    coor_string = "%(x)10.10f%(sep)s%(y)10.10f%(sep)s%(site_name)s%(sep)s"\
                               %({"x":float(x),"y":float(y),"site_name":str(site),"sep":separator})
                else:
                    coor_string = "%(x)10.10f%(sep)s%(y)10.10f%(sep)s"\
                               %({"x":float(x),"y":float(y),"sep":separator})
                time_string = "%(start)s%(sep)s%(end)s%(sep)s%(val)s\n"\
                               %({"start":str(start), "end":str(end),
                                  "val":(values[i].strip()),"sep":separator})

                out_file.write(cat_str + coor_string + time_string)

        in_file.close()

    if out_file is not sys.stdout:
        out_file.close()
Пример #31
0
def main():

    # Variable assigned from USGS product dictionary
    planet_api_key = options['api_key']
    item_type = options['item_type']
    input_name = options['input_name']
    output_name = options['output_name']
    filter_start_date = options['start_date_filter']
    filter_end_date = options['end_date_filter']
    cloud_cover = options['cloud_cover']
    gsd = options['gsd']
    sun_azimuth = options['sun_azimuth']
    sun_elevation = options['sun_elevation']
    view_angle = options['view_angle']

    # Set date range filters
    start_date_range_filter = api.filters.date_range('acquired',
                                                     gte=filter_start_date)
    end_date_range_filter = api.filters.date_range('acquired',
                                                   lte=filter_end_date)

    # Set cloud filter (Optional)
    cloud_cover_low, cloud_cover_high = cloud_cover
    cloud_cover_low_filter = api.filters.range_filter('cloud_cover',
                                                      gt=cloud_cover_low),
    cloud_cover_high_filter = api.filters.range_filter('cloud_cover',
                                                       lt=cloud_cover_high)

    # Set gsd filter NumberInFilter (Optional)

    # Set sun azimuth filter (Optional)

    # Set sun elevation filter (Optional)

    # Set view angle filter (Optional)

    # Set ground_control filter StringInFilter (String 'true', 'false')(Optional)

    # visible_percent RangeFilter (Int 0-100)

    # usable data RangeFilter (Double 0.0 - 1.0)

    # Set permissions filter to only return downloadable data
    permission_filter = api.filters.permission_filter('assets:download')

    request_filter = api.filters.and_filter(start_date_range_filter,
                                            end_date_range_filter,
                                            cloud_cover_low_filter,
                                            cloud_cover_high_filter,
                                            permission_filter)

    planet_query_filter = api.filters.build_search_request([item_type],
                                                           request_filter)

    nav_string = usgs_product_dict[gui_product]
    product = nav_string['product']
    product_format = nav_string['format']
    product_extensions = tuple(nav_string['extension'].split(','))
    product_is_zip = nav_string['zip']
    product_srs = nav_string['srs']
    product_proj4 = nav_string['srs_proj4']
    product_interpolation = nav_string['interpolation']
    product_url_split = nav_string['url_split']
    product_extent = nav_string['extent']
    gui_subset = None

    #Set Planet API Key and client
    os.environ['PL_API_KEY'] = planet_api_key
    client = api.ClientV1()

    # Parameter assignments for each dataset
    if gui_product == 'ned':
        gui_dataset = options['ned_dataset']
        ned_api_name = ''
        if options['ned_dataset'] == 'ned1sec':
            ned_data_abbrv = 'ned_1arc_'
            ned_api_name = '1 arc-second'
        if options['ned_dataset'] == 'ned13sec':
            ned_data_abbrv = 'ned_13arc_'
            ned_api_name = '1/3 arc-second'
        if options['ned_dataset'] == 'ned19sec':
            ned_data_abbrv = 'ned_19arc_'
            ned_api_name = '1/9 arc-second'
        product_tag = product + " " + ned_api_name

    if gui_product == 'nlcd':
        gui_dataset = options['nlcd_dataset']
        if options['nlcd_dataset'] == 'nlcd2001':
            gui_dataset = 'National Land Cover Database (NLCD) - 2001'
        if options['nlcd_dataset'] == 'nlcd2006':
            gui_dataset = 'National Land Cover Database (NLCD) - 2006'
        if options['nlcd_dataset'] == 'nlcd2011':
            gui_dataset = 'National Land Cover Database (NLCD) - 2011'

        if options['nlcd_subset'] == 'landcover':
            gui_subset = 'Land Cover'
        if options['nlcd_subset'] == 'impervious':
            gui_subset = 'Percent Developed Imperviousness'
        if options['nlcd_subset'] == 'canopy':
            gui_subset = 'Percent Tree Canopy'
        product_tag = gui_dataset

    if gui_product == 'naip':
        gui_dataset = 'Imagery - 1 meter (NAIP)'
        product_tag = nav_string['product']

    has_pdal = gscript.find_program(pgm='v.in.pdal')
    if gui_product == 'lidar':
        gui_dataset = 'Lidar Point Cloud (LPC)'
        product_tag = nav_string['product']
        if not has_pdal:
            gscript.warning(
                _("Module v.in.pdal is missing,"
                  " any downloaded data will not be processed."))
    # Assigning further parameters from GUI
    gui_output_layer = options['output_name']
    gui_resampling_method = options['resampling_method']
    gui_i_flag = flags['i']
    gui_k_flag = flags['k']
    work_dir = options['output_directory']
    memory = options['memory']
    nprocs = options['nprocs']

    preserve_extracted_files = gui_k_flag
    use_existing_extracted_files = True
    preserve_imported_tiles = gui_k_flag
    use_existing_imported_tiles = True

    if not os.path.isdir(work_dir):
        gscript.fatal(
            _("Directory <{}> does not exist."
              " Please create it.").format(work_dir))

    # Returns current units
    try:
        proj = gscript.parse_command('g.proj', flags='g')
        if gscript.locn_is_latlong():
            product_resolution = nav_string['dataset'][gui_dataset][0]
        elif float(proj['meters']) == 1:
            product_resolution = nav_string['dataset'][gui_dataset][1]
        else:
            # we assume feet
            product_resolution = nav_string['dataset'][gui_dataset][2]
    except TypeError:
        product_resolution = False
    if gui_product == 'lidar' and options['resolution']:
        product_resolution = float(options['resolution'])

    if gui_resampling_method == 'default':
        gui_resampling_method = nav_string['interpolation']
        gscript.verbose(
            _("The default resampling method for product {product} is {res}").
            format(product=gui_product, res=product_interpolation))

    # Get coordinates for current GRASS computational region and convert to USGS SRS
    gregion = gscript.region()
    wgs84 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    min_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['w'], gregion['s']),
                                      proj_out=wgs84,
                                      separator='comma',
                                      flags='d')
    max_coords = gscript.read_command('m.proj',
                                      coordinates=(gregion['e'], gregion['n']),
                                      proj_out=wgs84,
                                      separator='comma',
                                      flags='d')
    min_list = min_coords.split(',')[:2]
    max_list = max_coords.split(',')[:2]
    list_bbox = min_list + max_list
    str_bbox = ",".join((str(coord) for coord in list_bbox))

    # Format variables for TNM API call
    gui_prod_str = str(product_tag)
    datasets = quote_plus(gui_prod_str)
    prod_format = quote_plus(product_format)
    prod_extent = quote_plus(product_extent[0])

    # Create TNM API URL
    base_TNM = "https://viewer.nationalmap.gov/tnmaccess/api/products?"
    datasets_TNM = "datasets={0}".format(datasets)
    bbox_TNM = "&bbox={0}".format(str_bbox)
    prod_format_TNM = "&prodFormats={0}".format(prod_format)
    TNM_API_URL = base_TNM + datasets_TNM + bbox_TNM + prod_format_TNM
    if gui_product == 'nlcd':
        TNM_API_URL += "&prodExtents={0}".format(prod_extent)
    gscript.verbose("TNM API Query URL:\t{0}".format(TNM_API_URL))

    # Query TNM API
    try_again_messge = _(
        "Possibly, the query has timed out. Check network configuration and try again."
    )
    try:
        TNM_API_GET = urlopen(TNM_API_URL, timeout=12)
    except HTTPError as error:
        gscript.fatal(
            _("HTTP(S) error from USGS TNM API:"
              " {code}: {reason} ({instructions})").format(
                  reason=error.reason,
                  code=error.code,
                  instructions=try_again_messge))
    except (URLError, OSError, IOError) as error:
        # Catching also SSLError and potentially others which are
        # subclasses of IOError in Python 2 and of OSError in Python 3.
        gscript.fatal(
            _("Error accessing USGS TNM API: {error} ({instructions})").format(
                error=error, instructions=try_again_messge))

    # Parse return JSON object from API query
    try:
        return_JSON = json.load(TNM_API_GET)
        if return_JSON['errors']:
            TNM_API_error = return_JSON['errors']
            api_error_msg = "TNM API Error - {0}".format(str(TNM_API_error))
            gscript.fatal(api_error_msg)
        if gui_product == 'lidar' and options['title_filter']:
            return_JSON['items'] = [
                item for item in return_JSON['items']
                if options['title_filter'] in item['title']
            ]
            return_JSON['total'] = len(return_JSON['items'])

    except:
        gscript.fatal(_("Unable to load USGS JSON object."))

    # Functions down_list() and exist_list() used to determine
    # existing files and those that need to be downloaded.
    def down_list():
        dwnld_url.append(TNM_file_URL)
        dwnld_size.append(TNM_file_size)
        TNM_file_titles.append(TNM_file_title)
        if product_is_zip:
            extract_zip_list.append(local_zip_path)
        if f['datasets'][0] not in dataset_name:
            if len(dataset_name) <= 1:
                dataset_name.append(str(f['datasets'][0]))

    def exist_list():
        exist_TNM_titles.append(TNM_file_title)
        exist_dwnld_url.append(TNM_file_URL)
        if product_is_zip:
            exist_zip_list.append(local_zip_path)
            extract_zip_list.append(local_zip_path)
        else:
            exist_tile_list.append(local_tile_path)

    # Assign needed parameters from returned JSON
    tile_API_count = int(return_JSON['total'])
    tiles_needed_count = 0
    size_diff_tolerance = 5
    exist_dwnld_size = 0
    if tile_API_count > 0:
        dwnld_size = []
        dwnld_url = []
        dataset_name = []
        TNM_file_titles = []
        exist_dwnld_url = []
        exist_TNM_titles = []
        exist_zip_list = []
        exist_tile_list = []
        extract_zip_list = []
        # for each file returned, assign variables to needed parameters
        for f in return_JSON['items']:
            TNM_file_title = f['title']
            TNM_file_URL = str(f['downloadURL'])
            TNM_file_size = int(f['sizeInBytes'])
            TNM_file_name = TNM_file_URL.split(product_url_split)[-1]
            if gui_product == 'ned':
                local_file_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
                local_zip_path = os.path.join(work_dir,
                                              ned_data_abbrv + TNM_file_name)
                local_tile_path = os.path.join(work_dir,
                                               ned_data_abbrv + TNM_file_name)
            else:
                local_file_path = os.path.join(work_dir, TNM_file_name)
                local_zip_path = os.path.join(work_dir, TNM_file_name)
                local_tile_path = os.path.join(work_dir, TNM_file_name)
            file_exists = os.path.exists(local_file_path)
            file_complete = None
            # if file exists, but is incomplete, remove file and redownload
            if file_exists:
                existing_local_file_size = os.path.getsize(local_file_path)
                # if local file is incomplete
                if abs(existing_local_file_size -
                       TNM_file_size) > size_diff_tolerance:
                    # add file to cleanup list
                    cleanup_list.append(local_file_path)
                    # NLCD API query returns subsets that cannot be filtered before
                    # results are returned. gui_subset is used to filter results.
                    if not gui_subset:
                        tiles_needed_count += 1
                        down_list()
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            down_list()
                        else:
                            continue
                else:
                    if not gui_subset:
                        tiles_needed_count += 1
                        exist_list()
                        exist_dwnld_size += TNM_file_size
                    else:
                        if gui_subset in TNM_file_title:
                            tiles_needed_count += 1
                            exist_list()
                            exist_dwnld_size += TNM_file_size
                        else:
                            continue
            else:
                if not gui_subset:
                    tiles_needed_count += 1
                    down_list()
                else:
                    if gui_subset in TNM_file_title:
                        tiles_needed_count += 1
                        down_list()
                        continue

    # return fatal error if API query returns no results for GUI input
    elif tile_API_count == 0:
        gscript.fatal(
            _("TNM API ERROR or Zero tiles available for given input parameters."
              ))

    # number of files to be downloaded
    file_download_count = len(dwnld_url)

    # remove existing files from download lists
    for t in exist_TNM_titles:
        if t in TNM_file_titles:
            TNM_file_titles.remove(t)
    for url in exist_dwnld_url:
        if url in dwnld_url:
            dwnld_url.remove(url)

    # messages to user about status of files to be kept, removed, or downloaded
    if exist_zip_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_zip_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: fix this way of reporting and merge it with the one in use
    if exist_tile_list:
        exist_msg = _(
            "\n{0} of {1} files/archive(s) exist locally and will be used by module."
        ).format(len(exist_tile_list), tiles_needed_count)
        gscript.message(exist_msg)
    # TODO: simply continue with whatever is needed to be done in this case
    if cleanup_list:
        cleanup_msg = _(
            "\n{0} existing incomplete file(s) detected and removed. Run module again."
        ).format(len(cleanup_list))
        gscript.fatal(cleanup_msg)

    # formats JSON size from bites into needed units for combined file size
    if dwnld_size:
        total_size = sum(dwnld_size)
        len_total_size = len(str(total_size))
        if 6 < len_total_size < 10:
            total_size_float = total_size * 1e-6
            total_size_str = str("{0:.2f}".format(total_size_float) + " MB")
        if len_total_size >= 10:
            total_size_float = total_size * 1e-9
            total_size_str = str("{0:.2f}".format(total_size_float) + " GB")
    else:
        total_size_str = '0'

    # Prints 'none' if all tiles available locally
    if TNM_file_titles:
        TNM_file_titles_info = "\n".join(TNM_file_titles)
    else:
        TNM_file_titles_info = 'none'

    # Formatted return for 'i' flag
    if file_download_count <= 0:
        data_info = "USGS file(s) to download: NONE"
        if gui_product == 'nlcd':
            if tile_API_count != file_download_count:
                if tiles_needed_count == 0:
                    nlcd_unavailable = "NLCD {0} data unavailable for input parameters".format(
                        gui_subset)
                    gscript.fatal(nlcd_unavailable)
    else:
        data_info = (
            "USGS file(s) to download:",
            "-------------------------",
            "Total download size:\t{size}",
            "Tile count:\t{count}",
            "USGS SRS:\t{srs}",
            "USGS tile titles:\n{tile}",
            "-------------------------",
        )
        data_info = '\n'.join(data_info).format(size=total_size_str,
                                                count=file_download_count,
                                                srs=product_srs,
                                                tile=TNM_file_titles_info)
    print(data_info)

    if gui_i_flag:
        gscript.info(
            _("To download USGS data, remove <i> flag, and rerun r.in.usgs."))
        sys.exit()

    # USGS data download process
    if file_download_count <= 0:
        gscript.message(_("Extracting existing USGS Data..."))
    else:
        gscript.message(_("Downloading USGS Data..."))

    TNM_count = len(dwnld_url)
    download_count = 0
    local_tile_path_list = []
    local_zip_path_list = []
    patch_names = []

    # Download files
    for url in dwnld_url:
        # create file name by splitting name from returned url
        # add file name to local download directory
        if gui_product == 'ned':
            file_name = ned_data_abbrv + url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        else:
            file_name = url.split(product_url_split)[-1]
            local_file_path = os.path.join(work_dir, file_name)
        try:
            # download files in chunks rather than write complete files to memory
            dwnld_req = urlopen(url, timeout=12)
            download_bytes = int(dwnld_req.info()['Content-Length'])
            CHUNK = 16 * 1024
            with open(local_file_path, "wb+") as local_file:
                count = 0
                steps = int(download_bytes / CHUNK) + 1
                while True:
                    chunk = dwnld_req.read(CHUNK)
                    gscript.percent(count, steps, 10)
                    count += 1
                    if not chunk:
                        break
                    local_file.write(chunk)
                gscript.percent(1, 1, 1)
            local_file.close()
            download_count += 1
            # determine if file is a zip archive or another format
            if product_is_zip:
                local_zip_path_list.append(local_file_path)
            else:
                local_tile_path_list.append(local_file_path)
            file_complete = "Download {0} of {1}: COMPLETE".format(
                download_count, TNM_count)
            gscript.info(file_complete)
        except URLError:
            gscript.fatal(
                _("USGS download request has timed out. Network or formatting error."
                  ))
        except StandardError:
            cleanup_list.append(local_file_path)
            if download_count:
                file_failed = "Download {0} of {1}: FAILED".format(
                    download_count, TNM_count)
                gscript.fatal(file_failed)

    # sets already downloaded zip files or tiles to be extracted or imported
    # our pre-stats for extraction are broken, collecting stats during
    used_existing_extracted_tiles_num = 0
    removed_extracted_tiles_num = 0
    old_extracted_tiles_num = 0
    extracted_tiles_num = 0
    if exist_zip_list:
        for z in exist_zip_list:
            local_zip_path_list.append(z)
    if exist_tile_list:
        for t in exist_tile_list:
            local_tile_path_list.append(t)
    if product_is_zip:
        if file_download_count == 0:
            pass
        else:
            gscript.message("Extracting data...")
        # for each zip archive, extract needed file
        files_to_process = len(local_zip_path_list)
        for i, z in enumerate(local_zip_path_list):
            # TODO: measure only for the files being unzipped
            gscript.percent(i, files_to_process, 10)
            # Extract tiles from ZIP archives
            try:
                with zipfile.ZipFile(z, "r") as read_zip:
                    for f in read_zip.namelist():
                        if f.lower().endswith(product_extensions):
                            extracted_tile = os.path.join(work_dir, str(f))
                            remove_and_extract = True
                            if os.path.exists(extracted_tile):
                                if use_existing_extracted_files:
                                    # if the downloaded file is newer
                                    # than the extracted on, we extract
                                    if os.path.getmtime(
                                            extracted_tile) < os.path.getmtime(
                                                z):
                                        remove_and_extract = True
                                        old_extracted_tiles_num += 1
                                    else:
                                        remove_and_extract = False
                                        used_existing_extracted_tiles_num += 1
                                else:
                                    remove_and_extract = True
                                if remove_and_extract:
                                    removed_extracted_tiles_num += 1
                                    os.remove(extracted_tile)
                            if remove_and_extract:
                                extracted_tiles_num += 1
                                read_zip.extract(f, work_dir)
                if os.path.exists(extracted_tile):
                    local_tile_path_list.append(extracted_tile)
                    if not preserve_extracted_files:
                        cleanup_list.append(extracted_tile)
            except IOError as error:
                cleanup_list.append(extracted_tile)
                gscript.fatal(
                    _("Unable to locate or extract IMG file '{filename}'"
                      " from ZIP archive '{zipname}': {error}").format(
                          filename=extracted_tile, zipname=z, error=error))
        gscript.percent(1, 1, 1)
        # TODO: do this before the extraction begins
        gscript.verbose(
            _("Extracted {extracted} new tiles and"
              " used {used} existing tiles").format(
                  used=used_existing_extracted_tiles_num,
                  extracted=extracted_tiles_num))
        if old_extracted_tiles_num:
            gscript.verbose(
                _("Found {removed} existing tiles older"
                  " than the corresponding downloaded archive").format(
                      removed=old_extracted_tiles_num))
        if removed_extracted_tiles_num:
            gscript.verbose(
                _("Removed {removed} existing tiles").format(
                    removed=removed_extracted_tiles_num))

    if gui_product == 'lidar' and not has_pdal:
        gscript.fatal(
            _("Module v.in.pdal is missing,"
              " cannot process downloaded data."))

    # operations for extracted or complete files available locally
    # We are looking only for the existing maps in the current mapset,
    # but theoretically we could be getting them from other mapsets
    # on search path or from the whole location. User may also want to
    # store the individual tiles in a separate mapset.
    # The big assumption here is naming of the maps (it is a smaller
    # for the files in a dedicated download directory).
    used_existing_imported_tiles_num = 0
    imported_tiles_num = 0
    mapset = get_current_mapset()
    files_to_import = len(local_tile_path_list)

    def run_file_import(identifier, results, input, output, resolution,
                        resolution_value, extent, resample, memory):
        result = {}
        try:
            gscript.run_command('r.import',
                                input=input,
                                output=output,
                                resolution=resolution,
                                resolution_value=resolution_value,
                                extent=extent,
                                resample=resample,
                                memory=memory)
        except CalledModuleError:
            error = ("Unable to import <{0}>").format(output)
            result["errors"] = error
        else:
            result["output"] = output
        results[identifier] = result

    def run_lidar_import(identifier, results, input, output, input_srs=None):
        result = {}
        params = {}
        if input_srs:
            params['input_srs'] = input_srs
        try:
            gscript.run_command('v.in.pdal',
                                input=input,
                                output=output,
                                flags='wr',
                                **params)
        except CalledModuleError:
            error = ("Unable to import <{0}>").format(output)
            result["errors"] = error
        else:
            result["output"] = output
        results[identifier] = result

    process_list = []
    process_id_list = []
    process_count = 0
    num_tiles = len(local_tile_path_list)

    with Manager() as manager:
        results = manager.dict()
        for i, t in enumerate(local_tile_path_list):
            # create variables for use in GRASS GIS import process
            LT_file_name = os.path.basename(t)
            LT_layer_name = os.path.splitext(LT_file_name)[0]
            # we are removing the files if requested even if we don't use them
            # do not remove by default with NAIP, there are no zip files
            if gui_product != 'naip' and not preserve_extracted_files:
                cleanup_list.append(t)
            # TODO: unlike the files, we don't compare date with input
            if use_existing_imported_tiles and map_exists(
                    "raster", LT_layer_name, mapset):
                patch_names.append(LT_layer_name)
                used_existing_imported_tiles_num += 1
            else:
                in_info = _("Importing and reprojecting {name}"
                            " ({count} out of {total})...").format(
                                name=LT_file_name,
                                count=i + 1,
                                total=files_to_import)
                gscript.info(in_info)

                process_count += 1
                if gui_product != 'lidar':
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_file_import,
                        kwargs=dict(identifier=i,
                                    results=results,
                                    input=t,
                                    output=LT_layer_name,
                                    resolution='value',
                                    resolution_value=product_resolution,
                                    extent="region",
                                    resample=product_interpolation,
                                    memory=memory))
                else:
                    srs = options['input_srs']
                    process = Process(
                        name="Import-{}-{}-{}".format(process_count, i,
                                                      LT_layer_name),
                        target=run_lidar_import,
                        kwargs=dict(identifier=i,
                                    results=results,
                                    input=t,
                                    output=LT_layer_name,
                                    input_srs=srs if srs else None))
                process.start()
                process_list.append(process)
                process_id_list.append(i)

            # Wait for processes to finish when we reached the max number
            # of processes.
            if process_count == nprocs or i == num_tiles - 1:
                exitcodes = 0
                for process in process_list:
                    process.join()
                    exitcodes += process.exitcode
                if exitcodes != 0:
                    if nprocs > 1:
                        gscript.fatal(
                            _("Parallel import and reprojection failed."
                              " Try running with nprocs=1."))
                    else:
                        gscript.fatal(
                            _("Import and reprojection step failed."))
                for identifier in process_id_list:
                    if "errors" in results[identifier]:
                        gscript.warning(results[identifier]["errors"])
                    else:
                        patch_names.append(results[identifier]["output"])
                        imported_tiles_num += 1
                # Empty the process list
                process_list = []
                process_id_list = []
                process_count = 0
        # no process should be left now
        assert not process_list
        assert not process_id_list
        assert not process_count

    gscript.verbose(
        _("Imported {imported} new tiles and"
          " used {used} existing tiles").format(
              used=used_existing_imported_tiles_num,
              imported=imported_tiles_num))

    # if control variables match and multiple files need to be patched,
    # check product resolution, run r.patch

    # v.surf.rst lidar params
    rst_params = dict(tension=25, smooth=0.1, npmin=100)

    # Check that downloaded files match expected count
    completed_tiles_count = len(local_tile_path_list)
    if completed_tiles_count == tiles_needed_count:
        if len(patch_names) > 1:
            try:
                gscript.use_temp_region()
                # set the resolution
                if product_resolution:
                    gscript.run_command('g.region',
                                        res=product_resolution,
                                        flags='a')
                if gui_product == 'naip':
                    for i in ('1', '2', '3', '4'):
                        patch_names_i = [
                            name + '.' + i for name in patch_names
                        ]
                        output = gui_output_layer + '.' + i
                        gscript.run_command('r.patch',
                                            input=patch_names_i,
                                            output=output)
                        gscript.raster_history(output)
                elif gui_product == 'lidar':
                    gscript.run_command('v.patch',
                                        flags='nzb',
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.run_command('v.surf.rst',
                                        input=gui_output_layer,
                                        elevation=gui_output_layer,
                                        nprocs=nprocs,
                                        **rst_params)
                else:
                    gscript.run_command('r.patch',
                                        input=patch_names,
                                        output=gui_output_layer)
                    gscript.raster_history(gui_output_layer)
                gscript.del_temp_region()
                out_info = ("Patched composite layer '{0}' added"
                            ).format(gui_output_layer)
                gscript.verbose(out_info)
                # Remove files if not -k flag
                if not preserve_imported_tiles:
                    if gui_product == 'naip':
                        for i in ('1', '2', '3', '4'):
                            patch_names_i = [
                                name + '.' + i for name in patch_names
                            ]
                            gscript.run_command('g.remove',
                                                type='raster',
                                                name=patch_names_i,
                                                flags='f')
                    elif gui_product == 'lidar':
                        gscript.run_command('g.remove',
                                            type='vector',
                                            name=patch_names +
                                            [gui_output_layer],
                                            flags='f')
                    else:
                        gscript.run_command('g.remove',
                                            type='raster',
                                            name=patch_names,
                                            flags='f')
            except CalledModuleError:
                gscript.fatal("Unable to patch tiles.")
            temp_down_count = _(
                "{0} of {1} tiles successfully imported and patched").format(
                    completed_tiles_count, tiles_needed_count)
            gscript.info(temp_down_count)
        elif len(patch_names) == 1:
            if gui_product == 'naip':
                for i in ('1', '2', '3', '4'):
                    gscript.run_command('g.rename',
                                        raster=(patch_names[0] + '.' + i,
                                                gui_output_layer + '.' + i))
            elif gui_product == 'lidar':
                gscript.run_command('v.surf.rst',
                                    input=patch_names[0],
                                    elevation=gui_output_layer,
                                    nprocs=nprocs,
                                    **rst_params)
                if not preserve_imported_tiles:
                    gscript.run_command('g.remove',
                                        type='vector',
                                        name=patch_names[0],
                                        flags='f')
            else:
                gscript.run_command('g.rename',
                                    raster=(patch_names[0], gui_output_layer))
            temp_down_count = _("Tile successfully imported")
            gscript.info(temp_down_count)
        else:
            gscript.fatal(
                _("No tiles imported successfully. Nothing to patch."))
    else:
        gscript.fatal(
            _("Error in getting or importing the data (see above). Please retry."
              ))

    # Keep source files if 'k' flag active
    if gui_k_flag:
        src_msg = (
            "<k> flag selected: Source tiles remain in '{0}'").format(work_dir)
        gscript.info(src_msg)

    # set appropriate color table
    if gui_product == 'ned':
        gscript.run_command('r.colors',
                            map=gui_output_layer,
                            color='elevation')

    # composite NAIP
    if gui_product == 'naip':
        gscript.use_temp_region()
        gscript.run_command('g.region', raster=gui_output_layer + '.1')
        gscript.run_command('r.composite',
                            red=gui_output_layer + '.1',
                            green=gui_output_layer + '.2',
                            blue=gui_output_layer + '.3',
                            output=gui_output_layer)
        gscript.raster_history(gui_output_layer)
        gscript.del_temp_region()
Пример #32
0
def one_point_per_col_output(separator, output_files, output_time_list, output,
                             write_header, site_input, vcat):
    """Write one point per col
       output is of type:
       start,end,point_1 value,point_2 value,...,point_n value

       Each row represents a single raster map, hence a single time stamp
    """
    # open the output file for writing
    out_file = open(output, 'w') if output != "-" else sys.stdout

    first = True
    for count in range(len(output_files)):
        file_name = output_files[count]
        gscript.verbose(_("Transforming r.what output file %s" % (file_name)))
        map_list = output_time_list[count]
        in_file = open(file_name, "r")
        lines = in_file.readlines()

        matrix = []
        for line in lines:
            matrix.append(line.split(separator))

        num_cols = len(matrix[0])

        if first is True:
            if write_header is True:
                out_str = "start%(sep)send" % ({"sep": separator})

                # Define different separator for coordinates and sites
                if separator == ',':
                    coor_sep = ';'
                else:
                    coor_sep = ','

                for row in matrix:
                    if vcat:
                        cat = row[0]
                        x = row[1]
                        y = row[2]
                        out_str += "{sep}{cat}{csep}{x:10.10f}{csep}" \
                                  "{y:10.10f}".format(cat=cat, x=float(x),
                                                           y=float(y),
                                                           sep=separator,
                                                           csep=coor_sep)
                        if site_input:
                            site = row[3]
                            out_str += "{sep}{site}".format(sep=coor_sep,
                                                            site=site)
                    else:
                        x = row[0]
                        y = row[1]
                        out_str += "{sep}{x:10.10f}{csep}" \
                                   "{y:10.10f}".format(x=float(x), y=float(y),
                                                       sep=separator,
                                                       csep=coor_sep)
                        if site_input:
                            site = row[2]
                            out_str += "{sep}{site}".format(sep=coor_sep,
                                                            site=site)

                out_file.write(out_str + "\n")

        first = False

        if vcat:
            ncol = 4
        else:
            ncol = 3
        for col in range(num_cols - ncol):
            start, end = output_time_list[count][
                col].get_temporal_extent_as_tuple()
            time_string = "%(start)s%(sep)s%(end)s"\
                               %({"start":str(start), "end":str(end),
                                  "sep":separator})
            out_file.write(time_string)
            for row in range(len(matrix)):
                value = matrix[row][col + ncol]
                out_file.write("%(sep)s%(value)s"\
                                   %({"sep":separator,
                                      "value":value.strip()}))
            out_file.write("\n")

        in_file.close()
    if out_file is not sys.stdout:
        out_file.close()
Пример #33
0
def main(options, flags):

    # Get the options
    points = options["points"]
    coordinates = options["coordinates"] 
    strds = options["strds"]
    output = options["output"]
    where = options["where"]
    order = options["order"]
    layout = options["layout"]
    null_value = options["null_value"]
    separator = options["separator"]
    
    nprocs = int(options["nprocs"])
    write_header = flags["n"]
    use_stdin = flags["i"]

    #output_cat_label = flags["f"]
    #output_color = flags["r"]
    #output_cat = flags["i"]
    
    overwrite = gscript.overwrite()
    
    if coordinates and points: 
        gscript.fatal(_("Options coordinates and points are mutually exclusive"))

    if not coordinates and not points and not use_stdin: 
        gscript.fatal(_("Please specify the coordinates, the points option or use the 's' option to pipe coordinate positions to t.rast.what from stdin, to provide the sampling coordinates"))

    if use_stdin:
        coordinates_stdin = str(sys.__stdin__.read())
        # Check if coordinates are given with site names or IDs
        stdin_length = len(coordinates_stdin.split('\n')[0].split())
        if stdin_length <= 2:
            site_input = False
        elif stdin_length >= 3:
            site_input = True
    else:
        site_input = False

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    sp = tgis.open_old_stds(strds, "strds", dbif)
    maps = sp.get_registered_maps_as_objects(where=where, order=order, 
                                             dbif=dbif)
    dbif.close()

    if not maps:
        gscript.fatal(_("Space time raster dataset <%s> is empty") % sp.get_id())

    # Setup separator
    if separator == "pipe":
        separator = "|"
    if separator == "comma":
        separator = ","
    if separator == "space":
        separator = " "
    if separator == "tab":
        separator = "\t"
    if separator == "newline":
        separator = "\n"

    # Setup flags are disabled due to test issues
    flags = ""
    #if output_cat_label is True:
    #    flags += "f"
    #if output_color is True:
    #    flags += "r"
    #if output_cat is True:
    #    flags += "i"

    # Configure the r.what module
    if points: 
        r_what = pymod.Module("r.what", map="dummy", 
                                        output="dummy", run_=False, 
                                        separator=separator, points=points, 
                                        overwrite=overwrite, flags=flags, 
                                        quiet=True) 
    elif coordinates: 
        # Create a list of values
        coord_list = coordinates.split(",")
        r_what = pymod.Module("r.what", map="dummy", 
                                        output="dummy", run_=False, 
                                        separator=separator,  
                                        coordinates=coord_list, 
                                        overwrite=overwrite, flags=flags, 
                                        quiet=True)
    elif use_stdin:
        r_what = pymod.Module("r.what", map="dummy", 
                                        output="dummy", run_=False, 
                                        separator=separator,  
                                        stdin_=coordinates_stdin, 
                                        overwrite=overwrite, flags=flags, 
                                        quiet=True)
    else: 
        grass.error(_("Please specify points or coordinates"))

    if len(maps) < nprocs:
        nprocs = len(maps)

    # The module queue for parallel execution
    process_queue = pymod.ParallelModuleQueue(int(nprocs))
    num_maps = len(maps)
    
    # 400 Maps is the absolute maximum in r.what
    # We need to determie the number of maps that can be processed
    # in parallel

    # First estimate the number of maps per process. We use 400 maps
    # simultaniously as maximum for a single process

    num_loops = int(num_maps / (400 * nprocs))
    remaining_maps = num_maps % (400 * nprocs)

    if num_loops == 0:
        num_loops = 1
        remaining_maps = 0

    # Compute the number of maps for each process
    maps_per_loop = int((num_maps - remaining_maps) / num_loops)
    maps_per_process = int(maps_per_loop / nprocs)
    remaining_maps_per_loop = maps_per_loop % nprocs

    # We put the output files in an ordered list
    output_files = []
    output_time_list = []

    count = 0
    for loop in range(num_loops):
        file_name = gscript.tempfile() + "_%i"%(loop)
        count = process_loop(nprocs, maps, file_name, count, maps_per_process, 
                             remaining_maps_per_loop, output_files, 
                             output_time_list, r_what, process_queue)
    
    process_queue.wait()
    
    gscript.verbose("Number of raster map layers remaining for sampling %i"%(remaining_maps))
    if remaining_maps > 0:
        # Use a single process if less then 100 maps
        if remaining_maps <= 100:
            mod = copy.deepcopy(r_what)
            mod(map=map_names, output=file_name)
            process_queue.put(mod)
        else:
            maps_per_process = int(remaining_maps / nprocs)
            remaining_maps_per_loop = remaining_maps % nprocs
            
            file_name = "out_remain"
            process_loop(nprocs, maps, file_name, count, maps_per_process, 
                         remaining_maps_per_loop, output_files, 
                         output_time_list, r_what, process_queue)

    # Wait for unfinished processes
    process_queue.wait()
    
    # Out the output files in the correct order together
    if layout == "row":
        one_point_per_row_output(separator, output_files, output_time_list,
                                 output, write_header, site_input)
    elif layout == "col":
        one_point_per_col_output(separator, output_files, output_time_list,
                                 output, write_header, site_input)
    else:
        one_point_per_timerow_output(separator, output_files, output_time_list,
                                     output, write_header, site_input)
Пример #34
0
def one_point_per_timerow_output(separator, output_files, output_time_list,
                                 output, write_header, site_input, vcat):
    """Use the original layout of the r.what output and print instead of
       the raster names, the time stamps as header

       One point per line for all time stamps:
        x|y|1991-01-01 00:00:00;1991-01-02 00:00:00|1991-01-02 00:00:00;1991-01-03 00:00:00|1991-01-03 00:00:00;1991-01-04 00:00:00|1991-01-04 00:00:00;1991-01-05 00:00:00
        3730731.49590371|5642483.51236521|6|8|7|7
        3581249.04638104|5634411.97526282|5|8|7|7
    """
    out_file = open(output, 'w') if output != "-" else sys.stdout

    matrix = []
    header = ""

    first = True
    for count in range(len(output_files)):
        file_name = output_files[count]
        gscript.verbose("Transforming r.what output file %s" % (file_name))
        map_list = output_time_list[count]
        in_file = open(file_name, "r")

        if write_header:
            if first is True:
                if vcat:
                    header = "cat{sep}".format(sep=separator)
                else:
                    header = ""
                if site_input:
                    header += "x%(sep)sy%(sep)ssite" % ({"sep": separator})
                else:
                    header += "x%(sep)sy" % ({"sep": separator})
            for map in map_list:
                start, end = map.get_temporal_extent_as_tuple()
                time_string = "%(sep)s%(start)s;%(end)s"\
                              %({"start":str(start), "end":str(end),
                                 "sep":separator})
                header += time_string

        lines = in_file.readlines()

        for i in range(len(lines)):
            cols = lines[i].split(separator)

            if first is True:
                if vcat and site_input:
                    matrix.append(cols[:4])
                elif vcat or site_input:
                    matrix.append(cols[:3])
                else:
                    matrix.append(cols[:2])

            if vcat:
                matrix[i] = matrix[i] + cols[4:]
            else:
                matrix[i] = matrix[i] + cols[3:]

        first = False

        in_file.close()

    if write_header:
        out_file.write(header + "\n")

    gscript.verbose(_("Writing the output file <%s>" % (output)))
    for row in matrix:
        first = True
        for col in row:
            value = col.strip()

            if first is False:
                out_file.write("%s" % (separator))
            out_file.write(value)

            first = False

        out_file.write("\n")
    if out_file is not sys.stdout:
        out_file.close()
Пример #35
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options['method']  # sharpening algorithm
    ms1 = options['blue']  # blue channel
    ms2 = options['green']  # green channel
    ms3 = options['red']  # red channel
    pan = options['pan']  # high res pan channel
    out = options['output']  # prefix for output RGB maps
    bladjust = flags['l']  # adjust blue channel
    sproc = flags['s']  # serial processing

    outb = grass.core.find_file('%s_blue' % out)
    outg = grass.core.find_file('%s_green' % out)
    outr = grass.core.find_file('%s_red' % out)

    if (outb['name'] != '' or outg['name'] != '' or outr['name'] != '') and not grass.overwrite():
        grass.warning(_('Maps with selected output prefix names already exist.'
                        ' Delete them or use overwrite flag'))
        return

    pid = str(os.getpid())

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv['nsres']
    ewres = kv['ewres']
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()

    grass.run_command('g.region', res=panres, align=pan)

    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))

    if sharpen == "brovey":
        grass.verbose(_("Using Brovey algorithm"))

        # pan/intensity histogram matching using linear regression
        outname = 'tmp%s_pan1' % pid
        panmatch1 = matchhist(pan, ms1, outname)

        outname = 'tmp%s_pan2' % pid
        panmatch2 = matchhist(pan, ms2, outname)

        outname = 'tmp%s_pan3' % pid
        panmatch3 = matchhist(pan, ms3, outname)

        outr = '%s_red' % out
        outg = '%s_green' % out
        outb = '%s_blue' % out

        # calculate brovey transformation
        grass.message(_("Calculating Brovey transformation..."))

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0 * "$ms1" * "$panmatch1" / k'''
            grass.mapcalc(e, outr=outr, outg=outg, outb=outb,
                          panmatch1=panmatch1, panmatch2=panmatch2,
                          panmatch3=panmatch3, ms1=ms1, ms2=ms2, ms3=ms3,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms1, panmatch1, ms1, ms2, ms3),
                                     overwrite=True)
            pg = grass.mapcalc_start('%s_green = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms2, panmatch2, ms1, ms2, ms3),
                                     overwrite=True)
            pr = grass.mapcalc_start('%s_red = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms3, panmatch3, ms1, ms2, ms3),
                                     overwrite=True)

            pb.wait()
            pg.wait()
            pr.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name='%s,%s,%s' % (panmatch1, panmatch2, panmatch3))

    elif sharpen == "ihs":
        grass.verbose(_("Using IHS<->RGB algorithm"))
        # transform RGB channels into IHS color space
        grass.message(_("Transforming to IHS color space..."))
        grass.run_command('i.rgb.his', overwrite=True,
                          red=ms3,
                          green=ms2,
                          blue=ms1,
                          hue="tmp%s_hue" % pid,
                          intensity="tmp%s_int" % pid,
                          saturation="tmp%s_sat" % pid)

        # pan/intensity histogram matching using linear regression
        target = "tmp%s_int" % pid
        outname = "tmp%s_pan_int" % pid
        panmatch = matchhist(pan, target, outname)

        # substitute pan for intensity channel and transform back to RGB color space
        grass.message(_("Transforming back to RGB color space and sharpening..."))
        grass.run_command('i.his.rgb', overwrite=True,
                          hue="tmp%s_hue" % pid,
                          intensity="%s" % panmatch,
                          saturation="tmp%s_sat" % pid,
                          red="%s_red" % out,
                          green="%s_green" % out,
                          blue="%s_blue" % out)

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name=panmatch)

    elif sharpen == "pca":
        grass.verbose(_("Using PCA/inverse PCA algorithm"))
        grass.message(_("Creating PCA images and calculating eigenvectors..."))

        # initial PCA with RGB channels
        pca_out = grass.read_command('i.pca', quiet=True, rescale='0,0',
                                     input='%s,%s,%s' % (ms1, ms2, ms3),
                                     output='tmp%s.pca' % pid)
        if len(pca_out) < 1:
            grass.fatal(_("Input has no data. Check region settings."))

        b1evect = []
        b2evect = []
        b3evect = []
        for l in pca_out.replace('(', ',').replace(')', ',').splitlines():
            b1evect.append(float(l.split(',')[1]))
            b2evect.append(float(l.split(',')[2]))
            b3evect.append(float(l.split(',')[3]))

        # inverse PCA with hi res pan channel substituted for principal component 1
        pca1 = 'tmp%s.pca.1' % pid
        pca2 = 'tmp%s.pca.2' % pid
        pca3 = 'tmp%s.pca.3' % pid
        b1evect1 = b1evect[0]
        b1evect2 = b1evect[1]
        b1evect3 = b1evect[2]
        b2evect1 = b2evect[0]
        b2evect2 = b2evect[1]
        b2evect3 = b2evect[2]
        b3evect1 = b3evect[0]
        b3evect2 = b3evect[1]
        b3evect3 = b3evect[2]

        outname = 'tmp%s_pan' % pid
        panmatch = matchhist(pan, ms1, outname)

        grass.message(_("Performing inverse PCA ..."))

        stats1 = grass.parse_command("r.univar", map=ms1, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats2 = grass.parse_command("r.univar", map=ms2, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats3 = grass.parse_command("r.univar", map=ms3, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))

        b1mean = float(stats1['mean'])
        b2mean = float(stats2['mean'])
        b3mean = float(stats3['mean'])

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0* "$ms1" * "$panmatch1" / k'''

            outr = '%s_red' % out
            outg = '%s_green' % out
            outb = '%s_blue' % out

            cmd1 = "$outb = (1.0 * $panmatch * $b1evect1) + ($pca2 * $b2evect1) + ($pca3 * $b3evect1) + $b1mean"
            cmd2 = "$outg = (1.0 * $panmatch * $b1evect2) + ($pca2 * $b2evect1) + ($pca3 * $b3evect2) + $b2mean"
            cmd3 = "$outr = (1.0 * $panmatch * $b1evect3) + ($pca2 * $b2evect3) + ($pca3 * $b3evect3) + $b3mean"

            cmd = '\n'.join([cmd1, cmd2, cmd3])

            grass.mapcalc(cmd, outb=outb, outg=outg, outr=outr,
                          panmatch=panmatch, pca2=pca2, pca3=pca3,
                          b1evect1=b1evect1, b2evect1=b2evect1, b3evect1=b3evect1,
                          b1evect2=b1evect2, b2evect2=b2evect2, b3evect2=b3evect2,
                          b1evect3=b1evect3, b2evect3=b2evect3, b3evect3=b3evect3,
                          b1mean=b1mean, b2mean=b2mean, b3mean=b3mean,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect1, pca2,
                                        b2evect1, pca3, b3evect1, b1mean),
                                     overwrite=True)

            pg = grass.mapcalc_start('%s_green = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect2, pca2,
                                        b2evect2, pca3, b3evect2, b2mean),
                                     overwrite=True)

            pr = grass.mapcalc_start('%s_red = (%s * %f) + (%s * %f) + (%s * ''%f) + %f'
                                     % (out, panmatch, b1evect3, pca2,
                                        b2evect3, pca3, b3evect3, b3mean),
                                     overwrite=True)

            pr.wait()
            pg.wait()
            pb.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type="raster",
                          pattern='tmp%s*,%s' % (pid, panmatch))

    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))
    # equalized grey scales give best contrast
    for ch in ['red', 'green', 'blue']:
        grass.run_command('r.colors', quiet=True, map="%s_%s" % (out, ch),
                          flags="e", color='grey')

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        rules = grass.tempfile()
        colors = open(rules, 'w')
        colors.write('5 0 0 0\n20 200 200 200\n40 230 230 230\n67 255 255 255 \n')
        colors.close()

        grass.run_command('r.colors', map="%s_blue" % out, rules=rules)
        os.remove(rules)

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ['red', 'green', 'blue']:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(_("If desired, combine channels into a single RGB map with 'r.composite'."))
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ['red', 'green', 'blue']:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three output
    grass.run_command('i.group', group=out,
                      input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.run_command('g.remove', flags="f", type="raster",
                      pattern="tmp%s*" % pid, quiet=True)
def main():
    # Get the options
    input = options["input"]
    timestamp_column = options["timestamp_column"]
    columns = options["column"]
    layer = options["layer"]
    where = options["where"]
    strds = options["strds"]
    tempwhere = options["t_where"]
    i_flag = flags["i"]

    if where == "" or where == " " or where == "\n":
        where = None

    # overwrite = grass.overwrite()

    # Set verbosity level
    # quiet = True
    # if grass.verbosity() > 2:
    #     quiet = False

    grass.warning(_('This addon is experimental!'))

    # Check DB connection for input vector map
    dbcon = grass.vector_layer_db(input, layer)
    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")
    if not len(column_names) == len(strds_names):
        grass.fatal(_('Number of columns and number of STRDS does not match.'))

    # Check type of timestamp column
    cols = grass.vector_columns(input, layer=layer)
    if timestamp_column not in cols.keys():
        grass.fatal(
            _('Could not find column {} \
                    in table connected to vector map {} \
                    at layer {}'.format(timestamp_column, input, layer)))
    if cols[timestamp_column]['type'] != 'DATE':
        if dbcon['driver'] != 'sqlite':
            # Note that SQLite does not have a DATE datatype and
            # and an index does not significantly speedup the process
            # (at least not with a couple of 100 points)
            grass.warning(
                _('Timestamp column is of type {}. \
                            It is recommended to use DATE type with an index. \
                            '.format(cols[timestamp_column]['type'])))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # Limit temporal extent to extent of points if no tempwhere is given
    if not tempwhere:
        extent = []
        for stat in ('min', 'max'):
            tsql = "SELECT {}({}) FROM {}".format(stat, timestamp_column,
                                                  dbcon['table'])
            extent.append(grass.read_command('db.select', flags='c', sql=tsql))

        grass.verbose(
            _('Temporal extent of vector points map is \
                      {} to {}'.format(extent[0], extent[1])))
    else:
        tempwhere = '({}) AND '.format(tempwhere)

    # Loop over STRDS
    counter = 0
    for strds_name in strds_names:

        cur_strds = tgis.open_old_stds(strds_name, "strds", dbif)

        granu = cur_strds.get_granularity()
        start_time = tgis.datetime_math.check_datetime_string(extent[0])
        start_gran = tgis.datetime_math.adjust_datetime_to_granularity(
            start_time, granu).isoformat()
        tempwhere += "(end_time > '{}' and start_time <= '{}')".format(
            start_gran, extent[1])  # needs to be set properly

        # Get info on registered maps in STRDS
        rows = cur_strds.get_registered_maps("name,mapset,start_time,end_time",
                                             tempwhere, "start_time", dbif)

        # Check temporal type and
        # define sampling function to use
        # becomes relevant when temporal type relative gets implemented
        if cur_strds.is_time_relative():
            grass.fatal(
                _('Sorry, STRDS of relative temporal type is not (yet) supported'
                  ))
            sample = sample_relative
        else:
            sample = sample_absolute

        # Check if there are raster maps to sample from that fullfill
        # temporal conditions
        if not rows and not tempwhere:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty".format(
                    cur_strds.get_id())))
        elif not rows and tempwhere:
            dbif.close()
            grass.fatal(
                _("No maps selected from Space time raster dataset <%s>, \
                          or dataset is empty".format(cur_strds.get_id())))

        # Include temporal condition into where clause
        where_clause = '({}) AND '.format(where) if where else ''

        # Loop over registered maps in STRDS
        row_number = 0
        for row in rows:
            # If r.what had a where option, r.what could be used to
            # collect raster values (without interpolation)
            # in a ParallelModuleQueue to collect values using multiple
            # cores and then upload results in one operation

            sample(input, layer, timestamp_column, column_names[counter], row,
                   where_clause, i_flag)

            row_number += 1
            grass.percent(row_number, len(rows), 3)
        counter = counter + 1

    dbif.close()
    grass.vector_history(input)
Пример #37
0
def main():
    global temp_ng, temp_ncin, temp_ncout

    # we discard stderrs when not debugging
    # ideally stderrs should be printed when an exception was raised
    # this would be done easily with StringIO
    # but it doesn't work with subprocess
    if not grass.debug_level():
        nuldev = open(os.devnull, "w")
    else:
        nuldev = sys.stderr

    # Initalise temporary verctor map names
    temp_ng = "v_lidar_mcc_tmp_ng_" + str(os.getpid())
    temp_ncin = "v_lidar_mcc_tmp_ncin_" + str(os.getpid())
    temp_ncout = "v_lidar_mcc_tmp_ncout_" + str(os.getpid())

    input = options["input"]
    g_output = options["ground"]
    ng_output = options["nonground"]

    # does map exist?
    if not grass.find_file(input, element="vector")["file"]:
        grass.fatal(_("Vector map <%s> not found") % input)

    # Count points in input map
    n_input = grass.vector_info(input)["points"]

    # does map contain points ?
    if not (n_input > 0):
        grass.fatal(_("Vector map <%s> does not contain points") % input)

    flag_n = flags["n"]

    ### Scale domain (l)
    # Evans & Hudak 2007 used scale domains 1 to 3
    l = int(1)
    l_stop = int(options["nl"])
    if l_stop < 1:
        grass.fatal("The minimum number of scale domains is 1.")

    ### Curvature tolerance threshold (t)
    # Evans & Hudak 2007 used a t-value of 0.3
    t = float(options["t"])
    ###Increase of curvature tolerance threshold for each
    ti = t / 3.0

    ### Convergence threshold (j)
    # Evans & Hudak 2007 used a convergence threshold of 0.3
    j = float(options["j"])
    if j <= 0:
        grass.fatal("The convergence threshold has to be > 0.")

    ### Tension parameter (f)
    # Evans & Hudak 2007 used a tension parameter 1.5
    f = float(options["f"])
    if f <= 0:
        grass.fatal("The tension parameter has to be > 0.")

    ### Spline steps parameter (s)
    # Evans & Hudak 2007 used the 12 nearest neighbors
    # (used spline steps $res * 5 before)
    s = int(options["s"])
    if s <= 0:
        grass.fatal("The spline step parameter has to be > 0.")

    ###Read desired resolution from region
    # Evans & Hudak 2007 used a desired resolution (delta) of 1.5
    gregion = grass.region()
    x_res_fin = gregion["ewres"]
    y_res_fin = gregion["nsres"]

    # Defineresolution steps in iteration
    n_res_steps = (l_stop + 1) / 2

    # Pass ame of input map to v.outlier
    nc_points = input

    # controls first creation of the output map before patching
    ng_output_exists = False
    # append and do not build topology
    vpatch_flags = "ab"

    # 7.x requires topology to see z coordinate
    # 7.1 v.patch has flags to use z even without topology
    # see #2433 on Trac and r66822 in Subversion
    build_before_patch = True
    unused, gver_minor, unused = grass.version()["version"].split(".")
    if int(gver_minor) >= 1:
        build_before_patch = False
        # do not expect topology and expect z
        vpatch_flags += "nz"

    # Loop through scale domaines
    while l <= l_stop:
        i = 1
        convergence = 100
        if l < ((l_stop + 1) / 2):
            xres = x_res_fin / (n_res_steps - (l - 1))
            yres = y_res_fin / (n_res_steps - (l - 1))
        elif l == ((l_stop + 1) / 2):
            xres = x_res_fin
            yres = y_res_fin
        else:
            xres = x_res_fin * ((l + 1) - n_res_steps)
            yres = y_res_fin * ((l + 1) - n_res_steps)

        grass.use_temp_region()
        grass.run_command(
            "g.region",
            s=gregion["s"],
            w=gregion["w"],
            nsres=yres,
            ewres=xres,
            flags="a",
        )
        xs_s = xres * s
        ys_s = yres * s
        grass.message("Processing scale domain " + str(l) + "...")
        # Repeat application of v.outlier until convergence level is reached
        while convergence > j:
            grass.verbose("Number of input points in iteration " + str(i) +
                          ": " + str(n_input))
            # Run v.outlier
            if not flag_n:
                grass.run_command(
                    "v.outlier",
                    input=nc_points,
                    output=temp_ncout,
                    outlier=temp_ng,
                    ew_step=xs_s,
                    ns_step=ys_s,
                    lambda_=f,
                    threshold=t,
                    filter="positive",
                    overwrite=True,
                    quiet=True,
                    stderr=nuldev,
                )
            else:
                grass.run_command(
                    "v.outlier",
                    input=nc_points,
                    output=temp_ncout,
                    outlier=temp_ng,
                    ew_step=xs_s,
                    ns_step=ys_s,
                    lambda_=f,
                    threshold=t,
                    filter="negative",
                    overwrite=True,
                    quiet=True,
                    stderr=nuldev,
                )

            # Get information about results for calculating convergence level
            ng = grass.vector_info(temp_ng)["points"]
            nc = n_input - ng
            n_input = nc
            grass.run_command(
                "g.remove",
                flags="f",
                type="vector",
                name=temp_ncin,
                quiet=True,
                stderr=nuldev,
            )
            grass.run_command(
                "g.rename",
                vector=temp_ncout + "," + temp_ncin,
                quiet=True,
                stderr=nuldev,
            )
            nc_points = temp_ncin
            # Give information on process status
            grass.verbose("Unclassified points after iteration " + str(i) +
                          ": " + str(nc))
            grass.verbose("Points classified as non ground after iteration " +
                          str(i) + ": " + str(ng))
            # Set convergence level
            if nc > 0:
                convergence = float(float(ng) / float(nc))
                if build_before_patch:
                    grass.run_command("v.build", map=temp_ng, stderr=nuldev)
                # Patch non-ground points to non-ground output map
                if ng_output_exists:
                    grass.run_command(
                        "v.patch",
                        input=temp_ng,
                        output=ng_output,
                        flags=vpatch_flags,
                        overwrite=True,
                        quiet=True,
                        stderr=nuldev,
                    )
                else:
                    grass.run_command("g.copy",
                                      vector=(temp_ng, ng_output),
                                      stderr=nuldev)
                    ng_output_exists = True
            else:
                convergence = 0
            # Give information on convergence level
            grass.verbose("Convergence level after run " + str(i) +
                          " in scale domain " + str(l) + ": " +
                          str(round(convergence, 3)))
            # Increase iterator
            i = i + 1
        # Adjust curvature tolerance and reset scale domain
        t = t + ti
        l = l + 1
        # Delete temporary region
        grass.del_temp_region()

    # Rename temporary map of points whichhave not been classified as non-ground to output vector map containing ground points
    grass.run_command("g.rename",
                      vector=nc_points + "," + g_output,
                      quiet=True,
                      stderr=nuldev)
def main():
    """Do the real work
    """
    #Parse remaining variables
    network_map = options['input']
    # network_mapset = network_map.split('@')[0]
    network = network_map.split('@')[1] if len(
        network_map.split('@')) > 1 else None
    suffix = options['suffix']
    layer = options['layer']
    corridor_tolerance = options['corridor_tolerance']
    cores = options['cores']
    where = None if options['where'] == '' else options['where']
    weights = options['weights'].split(',')
    s_flag = flags['s']
    d_flag = flags['d']
    r_flag = flags['r']

    ulimit = resource.getrlimit(resource.RLIMIT_NOFILE)

    net_hist_str = grass.read_command('v.info', map=network_map,
                                      flags='h').split('\n')[0].split(': ')[1]

    dist_cmd_dict = task.cmdstring_to_tuple(net_hist_str)

    dist_prefix = dist_cmd_dict[1]['prefix']
    #network_prefix = dist_cmd_dict[1]['prefix']

    #print(where)

    # in_vertices = dist_cmd_dict[1]['input']

    #Check if db-connection for edge map exists
    con = vect.vector_db(network_map)[int(layer)]
    if not con:
        grass.fatal("Database connection for map {} \
                    is not defined for layer {}.".format(network, layer))

    #Check if required columns exist and are of required type
    required_columns = ['con_id_u', 'from_p', 'to_p', 'cd_u']
    if weights:
        required_columns += weights

    in_columns = vect.vector_columns(network_map, layer=layer)

    missing_columns = np.setdiff1d(required_columns, in_columns.keys())

    if missing_columns:
        grass.fatal("Cannot find the following reqired/requested \
                    column(s) {} in vector map \
                    {}.".format(', '.join(missing_columns), network))

    #
    weight_types = []
    # Check properly if column is numeric
    for col in required_columns:
        if in_columns[col]['type'] not in [
                'INTEGER', 'DOUBLE PRECISION', 'REAL'
        ]:
            grass.fatal("Column {} is of type {}. \
                         Only numeric types (integer, \
                         real or double precision) \
                         allowed!".format(col, in_columns[col]['type']))

        if col in weights:
            weight_types.append(in_columns[col]['type'])

    # Extract necessary informartion on edges from attribute table of
    # edge map
    table_io = StringIO(
        unicode(
            grass.read_command('v.db.select',
                               flags='c',
                               map=network_map,
                               columns=required_columns,
                               separator=',',
                               where=where)))

    try:
        table_extract = np.genfromtxt(table_io,
                                      delimiter=',',
                                      dtype=None,
                                      names=required_columns)
    except:
        grass.fatal('No edges selected to compute corridors for...')

    # Output result of where-clause and exit (if requested)
    if s_flag:
        print(table_extract)
        #grass.message("con_id_u|from_p|to_p")
        #for fid in $selected_edges_ud:
        #    message_text = $(echo $table_extract | tr ' ' '\n' |
        # tr ',' ' ' | awk -v FID=$fid '{if($1==FID) print $1 "|" $2 "|"
        #  $3}' | head -n 1)
        #    grass.message(message_text)
        sys.exit(0)

    #Get unique identifiers for the selected undirected edges
    selected_patches = np.unique(
        np.append(table_extract['from_p'], table_extract['to_p']))

    selected_edges = np.unique(table_extract['con_id_u'])

    # activate z-flag if more maps have to be aggregated than ulimit
    z_flag = None if len(selected_edges) < ulimit else 'z'

    #Check if cost distance raster maps exist
    pattern = "{}_patch_*_cost_dist".format(dist_prefix)
    patchmaps = grass.read_command('g.list', pattern=pattern,
                                   type='raster').rstrip('\n').split('\n')

    for patch in selected_patches:
        #Check if cost distance raster maps exist
        patchmap = "{}_patch_{}_cost_dist".format(dist_prefix, patch)
        if not patchmap in patchmaps:
            grass.fatal("Cannot find raster map {}.".format(patchmap))

    #Create mapcalculator expressions for cost distance corridors,
    # assigning distance values
    corridormaps = {}
    if d_flag:
        pattern = "{}_corridor_*_cost_dist".format(dist_prefix)
        corridor_base = 'dist'
    else:
        pattern = "{}_corridor_[0-9]+$".format(dist_prefix)
        corridor_base = 'id'

    corridormaps[corridor_base] = grass.read_command(
        'g.list', flags='e', pattern=pattern,
        type='raster').rstrip('\n').split('\n')
    for weight in weights:
        pattern = "{}_corridor_[0-9]+_{}".format(dist_prefix, weight)
        corridormaps[weight] = grass.read_command(
            'g.list', flags='e', pattern=pattern,
            type='raster').rstrip('\n').split('\n')

    # Setup GRASS modules for raster processing
    mapcalc = Module("r.mapcalc", quiet=True, run_=False)
    reclass = Module("r.reclass", rules='-', quiet=True, run_=False)
    recode = Module("r.recode", rules='-', quiet=True, run_=False)

    # Setup paralel module queue if parallel processing is requested
    #print(weight_types)
    if cores > 1:
        mapcalc_queue = ParallelModuleQueue(nprocs=cores)

        if 'INTEGER' in weight_types:
            reclass_queue = ParallelModuleQueue(nprocs=cores)

        if 'REAL' in weight_types or 'DOUBLE PRECISION' in weight_types:
            recode_queue = ParallelModuleQueue(nprocs=cores)

    corridor_list = []
    for edge_id in selected_edges:
        edge = table_extract[table_extract['con_id_u'] == edge_id][0]
        #print(e.dtype.names)
        if d_flag:
            corridor = "{}_corridor_{}_cost_dist".format(dist_prefix, edge_id)
            #corridor_list.append(corridor)
            mc_expression = "{prefix}_corridor_{CON_ID}_cost_dist=if( \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist) - \
            (({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist) * \
            {cor_tolerance}/100.0)<= \
            ({prefix}_patch_{FROM_P}_cost_dist + \
            {prefix}_patch_{TO_P}_cost_dist), \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist), \
            null())".format(prefix=dist_prefix,
                            CON_ID=edge['con_id_u'],
                            FROM_P=edge['from_p'],
                            TO_P=edge['to_p'],
                            cor_tolerance=corridor_tolerance)
        else:
            corridor = "{}_corridor_{}".format(dist_prefix, edge['con_id_u'])
            #corridor_list.append(corridor)
            # Create mapcalculator expressions for cost distance
            # corridors, assigning connection IDs for reclassification
            mc_expression = "{prefix}_corridor_{CON_ID}=if( \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist)- \
            (({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist)* \
            {cor_tolerance}/100.0)<={CD}, \
            {CON_ID}, null())".format(prefix=dist_prefix,
                                      CON_ID=edge['con_id_u'],
                                      FROM_P=edge['from_p'],
                                      TO_P=edge['to_p'],
                                      CD=edge['cd_u'],
                                      cor_tolerance=corridor_tolerance)

        corridor_list.append(corridor)
        #print(corridor)
        #print(corridormaps)

        if r_flag or corridor not in corridormaps[corridor_base]:
            new_mapcalc = copy.deepcopy(mapcalc)

            if cores > 1:
                calc = new_mapcalc(expression=mc_expression)
                mapcalc_queue.put(calc)
            else:
                calc = new_mapcalc(expression=mc_expression,
                                   region='intersect')
                calc.run()

        for weight in weights:
            if r_flag or corridor not in corridormaps[weight]:
                in_map = corridor
                out_map = '{}_{}'.format(in_map, weight)
                if in_columns[weight]['type'] == 'INTEGER':
                    new_reclass = copy.deepcopy(reclass)
                    reclass_rule = "{} = {}".format(edge['con_id_u'],
                                                    edge[weight])
                    rcl = new_reclass(input=in_map,
                                      output=out_map,
                                      stdin_=reclass_rule)

                    if cores > 1:
                        reclass_queue.put(rcl)
                    else:
                        rcl.run()

                if in_columns[weight]['type'] in ['REAL', 'DOUBLE PRECISION']:
                    new_recode = copy.deepcopy(recode)
                    recode_rule = "{0}:{0}:{1}:{1}".format(
                        edge['con_id_u'], edge[weight])
                    rco = new_recode(input=in_map,
                                     output=out_map,
                                     stdin_=recode_rule)
                    if cores > 1:
                        recode_queue.put(rco)
                    else:
                        rco.run()

    if cores > 1:
        mapcalc_queue.wait()
        if 'INTEGER' in weight_types:
            reclass_queue.wait()
        if 'REAL' in weight_types or 'DOUBLE PRECISION' in weight_types:
            recode_queue.wait()

    grass.verbose('Aggregating corridor maps...')

    if d_flag:
        grass.run_command('r.series',
                          flags=z_flag,
                          quiet=True,
                          input=','.join(corridor_list),
                          output='{}_corridors_min_cost_dist_{}'.format(
                              dist_prefix, suffix),
                          method='minimum')
    else:
        #Summarize corridors
        if not weights:
            print(','.join(corridor_list))
            output_map = '{}_corridors_count_{}'.format(dist_prefix, suffix)
            grass.run_command('r.series',
                              flags=z_flag,
                              quiet=True,
                              input=','.join(corridor_list),
                              output=output_map,
                              method='count')
            write_raster_history(output_map)

        else:
            #Weight corridors according to user requested weights
            for weight in weights:
                # Generate corridor map list
                corridor_map_list = (cm + '_{}'.format(weight)
                                     for cm in corridor_list)
                output_map = '{}_corridors_{}_sum_{}'.format(
                    dist_prefix, weight, suffix)
                #Summarize corridors using r.series
                grass.run_command('r.series',
                                  flags=z_flag,
                                  quiet=True,
                                  input=corridor_map_list,
                                  output=output_map,
                                  method='sum')
                write_raster_history(output_map)
Пример #39
0
def main():
    indsn = options['input']
    inlayer = options['layer']
    inwhere = options['where']
    inenc = options['encoding']
    inkey = options['key']
    ingeom = options['geometry']
    listlayers = flags['l']

    min_area = options['min_area']

    outdsn = options['output']
    outformat = options['format']
    outclean = "%s_clean" % inlayer
    outoverlaps = "%s_overlaps" % inlayer

    overwrite = grass.overwrite()

    # list input layers
    if flags['l']:
        try:
            grass.run_command('v.in.ogr', input=indsn, flags = 'l')
        except CalledModuleError:
            grass.fatal(_("Unable to list layers in OGR datasource <%s>") % indsn)
        return 0

    # list output formats
    if flags['f']:
        grass.run_command('v.out.ogr', flags = 'l')
        return 0

    # import options
    vopts = {}
    if options['encoding']:
        vopts['encoding'] = options['encoding']
    if options['where']:
        vopts['where'] = options['where']
    if options['geometry']:
        vopts['geometry'] = options['geometry']
    if options['key']:
        vopts['key'] = options['key']
    if options['snap']:
        vopts['snap'] = options['snap']

    # create temp location from input without import
    grassenv = grass.gisenv()
    tgtloc = grassenv['LOCATION_NAME']
    tgtmapset = grassenv['MAPSET']
    GISDBASE = grassenv['GISDBASE']
    tgtgisrc = os.environ['GISRC']
    SRCGISRC = grass.tempfile()

    TMPLOC = 'temp_import_location_' + str(os.getpid())

    f = open(SRCGISRC, 'w')
    f.write('MAPSET: PERMANENT\n')
    f.write('GISDBASE: %s\n' % GISDBASE)
    f.write('LOCATION_NAME: %s\n' % TMPLOC)
    f.write('GUI: text\n')
    f.close()

    grass.verbose(_("Creating temporary location for <%s>...") % indsn)
    try:
        grass.run_command('v.in.ogr', input=indsn,
                          location=TMPLOC, flags='i', quiet=True, overwrite=overwrite, **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to create location from OGR datasource <%s>") % indsn)

    # switch to temp location
    os.environ['GISRC'] = str(SRCGISRC)

    outvect = 'vector_clean'
    outvect_tmp = 'vector_clean'
    if float(min_area) > 0:
        outvect_tmp = 'vector_clean_import'

    # import into temp location
    grass.message(_("Importing <%s>, layer <%s> ...") % (indsn, inlayer))
    try:
        grass.run_command('v.in.ogr', input=indsn, layer=inlayer,
                          output=outvect_tmp, overwrite=overwrite, **vopts)
    except CalledModuleError:
        grass.fatal(_("Unable to import OGR datasource <%s>") % indsn)

    # remove small areas
    if float(min_area) > 0:
        grass.message(_("Removing small areas in data source <%s>, layer <%s> ...") % (indsn, inlayer))
        try:
            grass.run_command('v.clean', input=outvect_tmp, output=outvect,
                      type='area', tool='rmarea', threshold=min_area, overwrite=overwrite)
        except CalledModuleError:
            grass.fatal(_("Removing small areas in data source <%s>, layer <%s> failed") % (indsn, inlayer))

    # export
    oflags = 'sm'
    if flags['u']:
        oflags = 'smu'
        overwrite = True

    outlayer = '%s_clean' % inlayer
    grass.message = (_("Exporting cleaned layer as <%s>") % outlayer)
    try:
        grass.run_command('v.out.ogr', input=outvect, layer='1', output=outdsn,
                        output_layer=outlayer, format=outformat, flags=oflags,
                        overwrite=overwrite)
    except CalledModuleError:
        grass.fatal(_("Unable to export to OGR datasource <%s>") % outdsn)

    # export any overlaps
    outlayers = grass.read_command('v.category', input=outvect, option='layers')

    nlayers = len(outlayers.splitlines())
    #for layer in outlayers.splitlines():
    #    nlayers += 1

    if nlayers == 2:
        outlayer = '%s_overlaps' % inlayer
        oflags = 'smu'
        grass.message = (_("Exporting overlaps as <%s>") % outlayer)
        try:
            grass.run_command('v.out.ogr', input=outvect, layer='2', output=outdsn,
                      output_layer=outlayer, format=outformat, flags=oflags,
                      overwrite=True)
        except CalledModuleError:
            grass.fatal(_("Unable to export to OGR datasource <%s>") % outdsn)

    # switch to target location
    os.environ['GISRC'] = str(tgtgisrc)

    return 0
Пример #40
0
def start_connection_backend(options, backends):
    """Create and start a connection using a requested or available backend"""

    session = None
    for backend in backends:
        if backend == "paramiko":
            try:
                # Lazy-import to reduce import-time dependencies.
                # pylint: disable=import-outside-toplevel
                from friendlyssh import Connection

                session = Connection(
                    username=options["user"],
                    host=options["server"],
                    password=options["password"],
                    port=options["port"],
                )
                gs.verbose(_("Using Paramiko backend"))
                break
            except ImportError as error:
                gs.verbose(
                    _("Tried Paramiko backend but"
                      " it is not available (%s)" % error))
                continue
        elif backend == "fabric":
            try:
                # Lazy-import to reduce import-time dependencies.
                # pylint: disable=import-outside-toplevel
                from fabricbackend import FabricConnection

                session = FabricConnection(
                    user=options["user"],
                    host=options["server"],
                    connect_kwargs={"password": options["password"]},
                    port=options["port"],
                )
                gs.verbose(_("Using Fabric backend"))
                break
            except ImportError as error:
                gs.verbose(
                    _("Tried Fabric backend but it is not available: {}".
                      format(error)))
                continue
        elif backend == "simple":
            try:
                # Lazy-import to reduce import-time dependencies.
                # pylint: disable=import-outside-toplevel
                from simplessh import SshConnection as Connection

                # TODO: support password and port (or warn they are missing)
                session = Connection(user=options["user"],
                                     host=options["server"])
                gs.verbose(_("Using simple (ssh and scp) backend"))
                break
            except ImportError as error:
                gs.verbose(
                    _("Tried simple (ssh and scp) backend but"
                      " it is not available (%s)" % error))
                continue
        elif backend == "pexpect":
            try:
                # Lazy-import to reduce import-time dependencies.
                # pylint: disable=import-outside-toplevel
                from pexpectssh import SshSession as Connection

                # TODO: support port (or warn it's missing)
                session = Connection(
                    user=options["user"],
                    host=options["server"],
                    logfile="gcloudsshiface.log",
                    verbose=1,
                    password=options["password"],
                )
                gs.verbose(_("Using Pexpect (with ssh and scp) backend"))
                break
            except ImportError as error:
                gs.verbose(
                    _("Tried Pexpect (ssh, scp and pexpect)"
                      " backend but it is not available"
                      " (%s)" % error))
                continue
        elif backend == "local":
            try:
                # Lazy-import to reduce import-time dependencies.
                # pylint: disable=import-outside-toplevel
                from localsession import LocalConnection as Connection

                session = Connection()
                gs.verbose(_("Using local host backend"))
                break
            except ImportError as error:
                gs.verbose(
                    _("Tried local host"
                      " backend but it is not available"
                      " (%s)" % error))
                continue
    return session
Пример #41
0
    def filter(
        self,
        area,
        area_relation,
        clouds=None,
        producttype=None,
        limit=None,
        query={},
        start=None,
        end=None,
        sortby=[],
        asc=True,
        relativeorbitnumber=None,
    ):
        args = {}
        if clouds:
            args["cloudcoverpercentage"] = (0, int(clouds))
        if relativeorbitnumber:
            args["relativeorbitnumber"] = relativeorbitnumber
            if producttype.startswith("S2") and int(relativeorbitnumber) > 143:
                gs.warning("This relative orbit number is out of range")
            elif int(relativeorbitnumber) > 175:
                gs.warning(_("This relative orbit number is out of range"))
        if producttype:
            args["producttype"] = producttype
            if producttype.startswith("S2"):
                args["platformname"] = "Sentinel-2"
            else:
                args["platformname"] = "Sentinel-1"
        if not start:
            start = "NOW-60DAYS"
        else:
            start = start.replace("-", "")
        if not end:
            end = "NOW"
        else:
            end = end.replace("-", "")
        if query:
            redefined = [
                value for value in args.keys() if value in query.keys()
            ]
            if redefined:
                gs.warning(
                    _("Query overrides already defined options ({})").format(
                        ",".join(redefined)))
            args.update(query)
        gs.verbose(
            _("Query: area={} area_relation={} date=({}, {}) args={}").format(
                area, area_relation, start, end, args))
        products = self._api.query(area=area,
                                   area_relation=area_relation,
                                   date=(start, end),
                                   **args)
        products_df = self._api.to_dataframe(products)
        if len(products_df) < 1:
            gs.message(_("No product found"))
            return

        # sort and limit to first sorted product
        if sortby:
            self._products_df_sorted = products_df.sort_values(
                sortby, ascending=[asc] * len(sortby))
        else:
            self._products_df_sorted = products_df

        if limit:
            self._products_df_sorted = self._products_df_sorted.head(
                int(limit))

        gs.message(
            _("{} Sentinel product(s) found").format(
                len(self._products_df_sorted)))
Пример #42
0
def main():
    if not hasNumPy:
        grass.fatal(_("Required dependency NumPy not found. Exiting."))

    sharpen = options['method']  # sharpening algorithm
    ms1 = options['blue']  # blue channel
    ms2 = options['green']  # green channel
    ms3 = options['red']  # red channel
    pan = options['pan']  # high res pan channel
    out = options['output']  # prefix for output RGB maps
    bladjust = flags['l']  # adjust blue channel
    sproc = flags['s']  # serial processing

    outb = grass.core.find_file('%s_blue' % out)
    outg = grass.core.find_file('%s_green' % out)
    outr = grass.core.find_file('%s_red' % out)

    if (outb['name'] != '' or outg['name'] != '' or outr['name'] != '') and not grass.overwrite():
        grass.warning(_('Maps with selected output prefix names already exist.'
                        ' Delete them or use overwrite flag'))
        return

    pid = str(os.getpid())

    # get PAN resolution:
    kv = grass.raster_info(map=pan)
    nsres = kv['nsres']
    ewres = kv['ewres']
    panres = (nsres + ewres) / 2

    # clone current region
    grass.use_temp_region()

    grass.run_command('g.region', res=panres, align=pan)

    grass.message(_("Performing pan sharpening with hi res pan image: %f" % panres))

    if sharpen == "brovey":
        grass.verbose(_("Using Brovey algorithm"))

        # pan/intensity histogram matching using linear regression
        outname = 'tmp%s_pan1' % pid
        panmatch1 = matchhist(pan, ms1, outname)

        outname = 'tmp%s_pan2' % pid
        panmatch2 = matchhist(pan, ms2, outname)

        outname = 'tmp%s_pan3' % pid
        panmatch3 = matchhist(pan, ms3, outname)

        outr = '%s_red' % out
        outg = '%s_green' % out
        outb = '%s_blue' % out

        # calculate brovey transformation
        grass.message(_("Calculating Brovey transformation..."))

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0 * "$ms1" * "$panmatch1" / k'''
            grass.mapcalc(e, outr=outr, outg=outg, outb=outb,
                          panmatch1=panmatch1, panmatch2=panmatch2,
                          panmatch3=panmatch3, ms1=ms1, ms2=ms2, ms3=ms3,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms1, panmatch1, ms1, ms2, ms3),
                                     overwrite=True)
            pg = grass.mapcalc_start('%s_green = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms2, panmatch2, ms1, ms2, ms3),
                                     overwrite=True)
            pr = grass.mapcalc_start('%s_red = (1.0 * %s * %s) / (%s + %s + %s)' %
                                     (out, ms3, panmatch3, ms1, ms2, ms3),
                                     overwrite=True)

            pb.wait()
            pg.wait()
            pr.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name='%s,%s,%s' % (panmatch1, panmatch2, panmatch3))

    elif sharpen == "ihs":
        grass.verbose(_("Using IHS<->RGB algorithm"))
        # transform RGB channels into IHS color space
        grass.message(_("Transforming to IHS color space..."))
        grass.run_command('i.rgb.his', overwrite=True,
                          red=ms3,
                          green=ms2,
                          blue=ms1,
                          hue="tmp%s_hue" % pid,
                          intensity="tmp%s_int" % pid,
                          saturation="tmp%s_sat" % pid)

        # pan/intensity histogram matching using linear regression
        target = "tmp%s_int" % pid
        outname = "tmp%s_pan_int" % pid
        panmatch = matchhist(pan, target, outname)

        # substitute pan for intensity channel and transform back to RGB color space
        grass.message(_("Transforming back to RGB color space and sharpening..."))
        grass.run_command('i.his.rgb', overwrite=True,
                          hue="tmp%s_hue" % pid,
                          intensity="%s" % panmatch,
                          saturation="tmp%s_sat" % pid,
                          red="%s_red" % out,
                          green="%s_green" % out,
                          blue="%s_blue" % out)

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type='raster',
                          name=panmatch)

    elif sharpen == "pca":
        grass.verbose(_("Using PCA/inverse PCA algorithm"))
        grass.message(_("Creating PCA images and calculating eigenvectors..."))

        # initial PCA with RGB channels
        pca_out = grass.read_command('i.pca', quiet=True, rescale='0,0',
                                     input='%s,%s,%s' % (ms1, ms2, ms3),
                                     output='tmp%s.pca' % pid)
        if len(pca_out) < 1:
            grass.fatal(_("Input has no data. Check region settings."))

        b1evect = []
        b2evect = []
        b3evect = []
        for l in pca_out.replace('(', ',').replace(')', ',').splitlines():
            b1evect.append(float(l.split(',')[1]))
            b2evect.append(float(l.split(',')[2]))
            b3evect.append(float(l.split(',')[3]))

        # inverse PCA with hi res pan channel substituted for principal component 1
        pca1 = 'tmp%s.pca.1' % pid
        pca2 = 'tmp%s.pca.2' % pid
        pca3 = 'tmp%s.pca.3' % pid
        b1evect1 = b1evect[0]
        b1evect2 = b1evect[1]
        b1evect3 = b1evect[2]
        b2evect1 = b2evect[0]
        b2evect2 = b2evect[1]
        b2evect3 = b2evect[2]
        b3evect1 = b3evect[0]
        b3evect2 = b3evect[1]
        b3evect3 = b3evect[2]

        outname = 'tmp%s_pan' % pid
        panmatch = matchhist(pan, ms1, outname)

        grass.message(_("Performing inverse PCA ..."))

        stats1 = grass.parse_command("r.univar", map=ms1, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats2 = grass.parse_command("r.univar", map=ms2, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))
        stats3 = grass.parse_command("r.univar", map=ms3, flags='g',
                                     parse=(grass.parse_key_val,
                                            {'sep': '='}))

        b1mean = float(stats1['mean'])
        b2mean = float(stats2['mean'])
        b3mean = float(stats3['mean'])

        if sproc:
            # serial processing
            e = '''eval(k = "$ms1" + "$ms2" + "$ms3")
                "$outr" = 1.0 * "$ms3" * "$panmatch3" / k
                "$outg" = 1.0 * "$ms2" * "$panmatch2" / k
                "$outb" = 1.0* "$ms1" * "$panmatch1" / k'''

            outr = '%s_red' % out
            outg = '%s_green' % out
            outb = '%s_blue' % out

            cmd1 = "$outb = (1.0 * $panmatch * $b1evect1) + ($pca2 * $b2evect1) + ($pca3 * $b3evect1) + $b1mean"
            cmd2 = "$outg = (1.0 * $panmatch * $b1evect2) + ($pca2 * $b2evect1) + ($pca3 * $b3evect2) + $b2mean"
            cmd3 = "$outr = (1.0 * $panmatch * $b1evect3) + ($pca2 * $b2evect3) + ($pca3 * $b3evect3) + $b3mean"

            cmd = '\n'.join([cmd1, cmd2, cmd3])

            grass.mapcalc(cmd, outb=outb, outg=outg, outr=outr,
                          panmatch=panmatch, pca2=pca2, pca3=pca3,
                          b1evect1=b1evect1, b2evect1=b2evect1, b3evect1=b3evect1,
                          b1evect2=b1evect2, b2evect2=b2evect2, b3evect2=b3evect2,
                          b1evect3=b1evect3, b2evect3=b2evect3, b3evect3=b3evect3,
                          b1mean=b1mean, b2mean=b2mean, b3mean=b3mean,
                          overwrite=True)
        else:
            # parallel processing
            pb = grass.mapcalc_start('%s_blue = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect1, pca2,
                                        b2evect1, pca3, b3evect1, b1mean),
                                     overwrite=True)

            pg = grass.mapcalc_start('%s_green = (%s * %f) + (%s * %f) + (%s * %f) + %f'
                                     % (out, panmatch, b1evect2, pca2,
                                        b2evect2, pca3, b3evect2, b2mean),
                                     overwrite=True)

            pr = grass.mapcalc_start('%s_red = (%s * %f) + (%s * %f) + (%s * ''%f) + %f'
                                     % (out, panmatch, b1evect3, pca2,
                                        b2evect3, pca3, b3evect3, b3mean),
                                     overwrite=True)

            pr.wait()
            pg.wait()
            pb.wait()

        # Cleanup
        grass.run_command('g.remove', flags='f', quiet=True, type="raster",
                          pattern='tmp%s*,%s' % (pid, panmatch))

    # Could add other sharpening algorithms here, e.g. wavelet transformation

    grass.message(_("Assigning grey equalized color tables to output images..."))
    # equalized grey scales give best contrast
    for ch in ['red', 'green', 'blue']:
        grass.run_command('r.colors', quiet=True, map="%s_%s" % (out, ch),
                          flags="e", color='grey')

    # Landsat too blue-ish because panchromatic band less sensitive to blue
    # light, so output blue channed can be modified
    if bladjust:
        grass.message(_("Adjusting blue channel color table..."))
        rules = grass.tempfile()
        colors = open(rules, 'w')
        colors.write('5 0 0 0\n20 200 200 200\n40 230 230 230\n67 255 255 255 \n')
        colors.close()

        grass.run_command('r.colors', map="%s_blue" % out, rules=rules)
        os.remove(rules)

    # output notice
    grass.verbose(_("The following pan-sharpened output maps have been generated:"))
    for ch in ['red', 'green', 'blue']:
        grass.verbose(_("%s_%s") % (out, ch))

    grass.verbose(_("To visualize output, run: g.region -p raster=%s_red" % out))
    grass.verbose(_("d.rgb r=%s_red g=%s_green b=%s_blue" % (out, out, out)))
    grass.verbose(_("If desired, combine channels into a single RGB map with 'r.composite'."))
    grass.verbose(_("Channel colors can be rebalanced using i.colors.enhance."))

    # write cmd history:
    for ch in ['red', 'green', 'blue']:
        grass.raster_history("%s_%s" % (out, ch))

    # create a group with the three output
    grass.run_command('i.group', group=out,
                      input="{n}_red,{n}_blue,{n}_green".format(n=out))

    # Cleanup
    grass.run_command('g.remove', flags="f", type="raster",
                      pattern="tmp%s*" % pid, quiet=True)
Пример #43
0
def main():
    """Process command line parameters and update the table"""
    options, flags = gs.parser()

    vector = options["map"]
    layer = options["layer"]
    where = options["where"]
    column = options["column"]
    expression = options["expression"]
    condition = options["condition"]
    functions_file = options["functions"]

    # Map needs to be in the current mapset
    mapset = gs.gisenv()["MAPSET"]
    if not gs.find_file(vector, element="vector", mapset=mapset)["file"]:
        gs.fatal(
            _("Vector map <{vector}> does not exist or is not in the current mapset"
              "(<{mapset}>) and therefore it cannot be modified").format(
                  **locals()))

    # Map+layer needs to have a table connected
    try:
        # TODO: Support @OGR vector maps? Probably not supported by db.execute anyway.
        db_info = gs.vector_db(vector)[int(layer)]
    except KeyError:
        gs.fatal(
            _("There is no table connected to map <{vector}> (layer <{layer}>)."
              " Use v.db.connect or v.db.addtable to add it.").format(
                  **locals()))
    table = db_info["table"]
    database = db_info["database"]
    driver = db_info["driver"]
    columns = gs.vector_columns(vector, layer)

    # Check that column exists
    try:
        column_info = columns[column]
    except KeyError:
        gs.fatal(
            _("Column <{column}> not found. Use v.db.addcolumn to create it.").
            format(column=column))
    column_type = column_info["type"]

    # Check that optional function file exists
    if functions_file:
        if not os.access(functions_file, os.R_OK):
            gs.fatal(_("File <{file}> not found").format(file=functions_file))

    # Define Python functions
    # Here we need the full-deal eval and exec functions and can't sue less
    # general alternatives such as ast.literal_eval.
    def expression_function(**kwargs):
        return eval(expression, globals(), kwargs)  # pylint: disable=eval-used

    def condition_function(**kwargs):
        return eval(condition, globals(), kwargs)  # pylint: disable=eval-used

    # TODO: Add error handling for failed imports.
    if options["packages"]:
        packages = options["packages"].split(",")
        for package in packages:
            # pylint: disable=exec-used
            exec(f"import {package}", globals(), globals())
            if flags["s"]:
                exec(f"from {package} import *", globals(), globals())

    # TODO: Add error handling for invalid syntax.
    if functions_file:
        with open(functions_file) as file:
            exec(file.read(), globals(), globals())  # pylint: disable=exec-used

    # Get table contents
    if not where:
        # The condition needs to be None, an empty string is passed through.
        where = None
    if gs.version()["version"] < "7.9":
        sep = "|"  # Only one char sep for Python csv package.
        null = "NULL"
        csv_text = gs.read_command(
            "v.db.select",
            map=vector,
            layer=layer,
            separator=sep,
            null=null,
            where=where,
        )
        table_contents = csv_loads(csv_text, delimeter=sep, null=null)
    else:
        # TODO: XXX is a workaround for a bug in v.db.select -j
        json_text = gs.read_command("v.db.select",
                                    map=vector,
                                    layer=layer,
                                    flags="j",
                                    null="XXX",
                                    where=where)
        table_contents = json.loads(json_text)

    cmd = python_to_transaction(
        table=table,
        table_contents=table_contents,
        column=column,
        column_type=column_type,
        expression=expression,
        expression_function=expression_function,
        condition=condition,
        condition_function=condition_function,
        ensure_lowercase=not flags["u"],
    )

    # Messages
    if len(cmd) == 2:
        gs.message(
            "No rows to update. Try a different SQL where or Python condition."
        )
    elif len(cmd) > 2:
        # First and last statement
        gs.verbose(f'Using SQL: "{cmd[1]}...{cmd[-2]}"')

    # The newline is needed for successful execution/reading of many statements.
    # TODO: Add error handling when there is a syntax error due to wrongly
    # generated SQL statement and/or sanitize the value in update more.
    gs.write_command("db.execute",
                     input="-",
                     database=database,
                     driver=driver,
                     stdin="\n".join(cmd))

    gs.vector_history(vector)
Пример #44
0
def main():
    vector = options['map']
    table = options['table']
    layer = options['layer']
    columns = options['columns']
    key = options['key']

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    map_name = vector.split('@')[0]

    if not table:
        if layer == '1':
            grass.verbose(_("Using vector map name as table name: <%s>") % map_name)
            table = map_name
        else:
            # to avoid tables with identical names on higher layers
            table = "%s_%s" % (map_name, layer)
            grass.verbose(
                _("Using vector map name extended by layer number as table name: <%s>") %
                table)
    else:
        grass.verbose(_("Using user specified table name: %s") % table)

    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags='c')
    grass.verbose(_("Creating new DB connection based on default mapset settings..."))
    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    schema = kv['schema']

    # maybe there is already a table linked to the selected layer?
    nuldev = file(os.devnull, 'w')
    try:
        grass.vector_db(map_name, stderr=nuldev)[int(layer)]
        grass.fatal(_("There is already a table linked to layer <%s>") % layer)
    except KeyError:
        pass

    # maybe there is already a table with that name?
    tables = grass.read_command('db.tables', flags='p', database=database, driver=driver,
                                stderr=nuldev)

    if not table in tables.splitlines():
        if columns:
            column_def = [x.strip().lower() for x in columns.strip().split(',')]
        else:
            column_def = []

        # if not existing, create it:
        column_def_key = "%s integer" % key
        if column_def_key not in column_def:
            column_def.insert(0, column_def_key)
        column_def = ','.join(column_def)

        grass.verbose(_("Creating table with columns (%s)...") % column_def)

        sql = "CREATE TABLE %s (%s)" % (table, column_def)
        try:
            grass.run_command('db.execute',
                              database=database, driver=driver, sql=sql)
        except CalledModuleError:
            grass.fatal(_("Unable to create table <%s>") % table)

    # connect the map to the DB:
    if schema:
        table = '{schema}.{table}'.format(schema=schema, table=table)
    grass.run_command('v.db.connect', quiet=True,
                      map=map_name, database=database, driver=driver,
                      layer=layer, table=table, key=key)

    # finally we have to add cats into the attribute DB to make modules such as v.what.rast happy:
    # (creates new row for each vector line):
    grass.run_command('v.to.db', map=map_name, layer=layer,
                      option='cat', column=key, qlayer=layer)

    grass.verbose(_("Current attribute table links:"))
    if grass.verbosity() > 2:
        grass.run_command('v.db.connect', flags='p', map=map_name)

    # write cmd history:
    grass.vector_history(map_name)

    return 0
Пример #45
0
 def _getBarrier(self,fieldblock,barrier):
     formula = "$barrier = if(isnull($fieldblock),1,0)"
     g.mapcalc(formula, barrier=barrier, fieldblock=fieldblock,quiet=quiet)
     g.verbose('Raster map barrier is in "%s"' % barrier)
     return barrier
Пример #46
0
def cleanup():
    # No cleanup is done here
    # see end of main()
    # kept for later
    grass.verbose(_("Module cleanup"))
Пример #47
0
    def filter(self,
               area,
               area_relation,
               clouds=None,
               producttype=None,
               limit=None,
               query={},
               start=None,
               end=None,
               sortby=[],
               asc=True):
        args = {}
        if clouds:
            args['cloudcoverpercentage'] = (0, int(clouds))
        if producttype:
            args['producttype'] = producttype
            if producttype.startswith('S2'):
                args['platformname'] = 'Sentinel-2'
            else:
                args['platformname'] = 'Sentinel-1'
        if not start:
            start = 'NOW-60DAYS'
        else:
            start = start.replace('-', '')
        if not end:
            end = 'NOW'
        else:
            end = end.replace('-', '')
        if query:
            redefined = [
                value for value in args.keys() if value in query.keys()
            ]
            if redefined:
                gs.warning(
                    "Query overrides already defined options ({})".format(
                        ','.join(redefined)))
            args.update(query)
        gs.verbose(
            "Query: area={} area_relation={} date=({}, {}) args={}".format(
                area, area_relation, start, end, args))
        products = self._api.query(area=area,
                                   area_relation=area_relation,
                                   date=(start, end),
                                   **args)
        products_df = self._api.to_dataframe(products)
        if len(products_df) < 1:
            gs.message(_('No product found'))
            return

        # sort and limit to first sorted product
        if sortby:
            self._products_df_sorted = products_df.sort_values(
                sortby, ascending=[asc] * len(sortby))
        else:
            self._products_df_sorted = products_df

        if limit:
            self._products_df_sorted = self._products_df_sorted.head(
                int(limit))

        gs.message(
            _('{} Sentinel product(s) found').format(
                len(self._products_df_sorted)))
Пример #48
0
def main():
    # Following declarations MAY will used in future for sure.
    global GISDBASE, LAYERCOUNT, LASTFILE

    # Check if ImageMagick is available since it is essential
    if os.name == "nt":
        if grass.find_program("magick", "-version"):
            grass.verbose(_("printws: ImageMagick is available: OK!"))
        else:
            grass.fatal(
                "ImageMagick is not accessible. See documentation of m.printws module for details."
            )
    else:
        if grass.find_program("convert", "-version"):
            grass.verbose(_("printws: ImageMagick is available: OK!"))
        else:
            grass.fatal(
                "ImageMagick is not accessible. See documentation of m.printws module for details."
            )

    textmacros = {}
    #%nam% macros are kept for backward compatibility
    textmacros["%TIME24%"] = time.strftime("%H:%M:%S")
    textmacros["%DATEYMD%"] = time.strftime("%Y.%m.%d")
    textmacros["%DATEMDY%"] = time.strftime("%m/%d/%Y")
    if not hasPwd:
        textmacros["%USERNAME%"] = "(user unknown)"
    else:
        textmacros["%USERNAME%"] = pwd.getpwuid(os.getuid())[0]
    # using $ for macros in the future. New items should be created
    # exclusively as $macros later on
    textmacros["\$TIME24"] = textmacros["%TIME24%"]
    textmacros["\$DATEYMD"] = textmacros["%DATEYMD%"]
    textmacros["\$DATEMDY"] = textmacros["%DATEMDY%"]
    textmacros["\$USERNAME"] = textmacros["%USERNAME%"]

    textmacros["\$SPC"] = "\\u00A0"  # ?? d.text won't display this at string end hmmm

    # saves region for restoring at end
    # doing with official method:
    grass.use_temp_region()

    # getting/setting screen/print dpi ratio

    if len(options["dpi"]) > 0:
        dpioption = float(options["dpi"])
    else:
        dpioption = 150.0

    if len(options["screendpi"]) > 0:
        screendpioption = float(options["screendpi"])
    else:
        screendpioption = 100.0

    global UPSIZE
    UPSIZE = float(dpioption) / float(screendpioption)

    if len(options["input"]) > 0:
        displays = readworkspace(options["input"])
    else:
        quit()

    textmacros["%GXW%"] = options["input"]
    textmacros["\$GXW"] = textmacros["%GXW%"]

    displaycounter = 0

    # there could be multiple displays in a workspace so we loop them
    # each display is a whole and independent file assembly
    for key in displays:
        textmacros["%DISPLAY%"] = key
        textmacros["\$DISPLAY"] = key
        grass.verbose(_("printws: rendering display: " + key))
        displaycounter = displaycounter + 1
        layers = copy.deepcopy(displays[key])

        # extracting extent information from layers dic and erase the item
        # extents[0-5] w s e n minz maxz ;  extents [6-9] window x y w h
        extents = layers[0]
        grass.verbose(
            "m.printws: EXTENTS from workspace:" + str(extents)
        )  # was debug message
        del layers[0]

        regionmode = ""
        if len(options["region"]) > 0:
            grass.run_command("g.region", region=options["region"])
            regionmode = "region"
        else:
            grass.run_command(
                "g.region", "", w=extents[0], s=extents[1], e=extents[2], n=extents[3]
            )
            regionmode = "window"

        # setting GRASS rendering environment

        # dummy file name is defined since the following lines
        # when switching on the cairo driver would create
        # an empty map.png in the current directory
        os.environ["GRASS_RENDER_FILE"] = os.path.join(
            TMPDIR, str(os.getpid()) + "_DIS_" + str(00) + "_GEN_" + str(00) + ".png"
        )
        os.environ["GRASS_RENDER_IMMEDIATE"] = "cairo"
        os.environ["GRASS_RENDER_FILE_READ"] = "TRUE"
        os.environ["GRASS_RENDER_TRANSPARENT"] = "TRUE"
        os.environ["GRASS_RENDER_FILE_COMPRESSION"] = "0"
        os.environ["GRASS_RENDER_FILE_MAPPED"] = "TRUE"

        # reading further options and setting defaults

        if len(options["page"]) > 0:
            pageoption = options["page"]
        else:
            pageoption = "A4landscape"

        # parsing titles, etc.
        if len(options["font"]) > 0:
            isAsterisk = options["font"].find("*")
            if isAsterisk > 0:
                titlefont = getfontbypattern(options["font"].replace("*", ""))
            else:
                titlefont = options["font"]
        else:
            titlefont = getfontbypattern("Open")  # try to find something UTF-8
        grass.verbose(_("printws: titlefont: " + titlefont))

        if len(options["titlecolor"]) > 0:
            titlecolor = options["titlecolor"]
        else:
            titlecolor = black

        if len(options["maintitlesize"]) > 0:
            maintitlesize = converttommfrom(
                float(options["maintitlesize"]), options["layunits"]
            )
        else:
            maintitlesize = 10.0

        if len(options["subtitlesize"]) > 0:
            subtitlesize = converttommfrom(
                float(options["subtitlesize"]), options["layunits"]
            )
        else:
            subtitlesize = 7.0

        if len(options["pssize"]) > 0:
            pssize = converttommfrom(float(options["pssize"]), options["layunits"])
        else:
            pssize = 5.0

        # Please fasten your seatbelts :) Calculations start here.
        # -------------------------------------------------------------------

        pagesizes = getpagesizes(pageoption)
        pagesizesindots = dictodots(pagesizes, dpioption)

        # Leave space for titles up and ps down - still in mm !!
        upperspace = 0
        subtitletop = 0
        titletop = 0
        if len(options["maintitle"]) > 0:
            titletop = 0.4 * maintitlesize
            upperspace = upperspace + titletop + maintitlesize
        if len(options["subtitle"]) > 0:
            subtitletop = upperspace + 0.4 * subtitlesize
            upperspace = subtitletop + subtitlesize + 1
        lowerspace = 0
        if (
            (len(options["psundercentral"]) > 0)
            or (len(options["psunderright"]) > 0)
            or (len(options["psunderleft"]) > 0)
        ):
            lowerspace = lowerspace + pssize + 2

        os.environ["GRASS_RENDER_WIDTH"] = str(pagesizesindots["w"])
        os.environ["GRASS_RENDER_HEIGHT"] = str(pagesizesindots["h"])

        pagemargins = getpagemargins(options["pagemargin"], options["layunits"])
        pagemarginsindots = dictodots(pagemargins, dpioption)

        # Getting max drawing area in dots
        mxfd = getmaxframeindots(pagemarginsindots, pagesizesindots)
        maxframe = (
            str(mxfd["t"])
            + ","
            + str(mxfd["b"])
            + ","
            + str(mxfd["l"])
            + ","
            + str(mxfd["r"])
        )

        # convert font size in mm to percentage for d.text
        mxfmm = dictomm(mxfd, dpioption)
        maintitlesize = float(maintitlesize) / (mxfmm["b"] - mxfmm["t"]) * 100.0
        subtitlesize = float(subtitlesize) / (mxfmm["b"] - mxfmm["t"]) * 100.0

        pssize = float(pssize) / (mxfmm["r"] - mxfmm["l"]) * 100.0
        # subtitle location is another issue
        subtitletoppercent = 100.0 - subtitletop / (mxfmm["b"] - mxfmm["t"]) * 100.0
        titletoppercent = 100.0 - titletop / (mxfmm["b"] - mxfmm["t"]) * 100.0

        mapul = getmapUL(options["mapupperleft"], options["layunits"])
        mapulindots = dictodots(mapul, dpioption)

        mapsizes = getmapsizes(options["mapsize"], options["layunits"])
        mapsizesindots = dictodots(mapsizes, dpioption)

        # Correcting map area ratio to ratio of region edges
        # OR screen window edges depeding on "regionmode"
        # for later:     grass.use_temp_region()
        ISLATLONG = False
        s = grass.read_command("g.region", flags="p")
        kv = grass.parse_key_val(s, sep=":")
        regioncols = float(kv["cols"].strip())
        regionrows = float(kv["rows"].strip())
        ewrestemp = kv["ewres"].strip()
        nsrestemp = kv["nsres"].strip()
        if ewrestemp.find(":") > 0:
            ISLATLONG = True
            ewrestemp = ewrestemp.split(":")
            ewres = (
                float(ewrestemp[0])
                + float(ewrestemp[1]) / 60.0
                + float(ewrestemp[2]) / 3600.0
            )
            nsrestemp = nsrestemp.split(":")
            nsres = (
                float(nsrestemp[0])
                + float(nsrestemp[1]) / 60.0
                + float(nsrestemp[2]) / 3600.0
            )
        else:
            ewres = float(ewrestemp)
            nsres = float(nsrestemp)

        sizex = regioncols * ewres
        sizey = regionrows * nsres

        grass.verbose(_("printws: sizex " + str(sizex)))
        grass.verbose(_("printws: sizey " + str(sizey)))

        if regionmode == "region":
            hregionratio = float(sizex) / float(sizey)
            grass.verbose(_("printws: REGION MODE -> region "))
        else:  # surprisingly doing the SAME
            # using screen window ratio for map area
            # next line was a test for this but didn't help on gadgets positioning
            # hregionratio = float(extents[8]) / float(extents[9])
            hregionratio = float(sizex) / float(sizey)
            grass.verbose(_("printws: REGION MODE -> window"))
        hmapratio = mapsizes["w"] / mapsizes["h"]

        grass.verbose(_("printws: raw mapsizes: " + str(mapsizesindots)))
        grass.verbose(_("printws: hr: " + str(hregionratio)))
        grass.verbose(_("printws: hm: " + str(hmapratio)))
        if hregionratio > hmapratio:
            grass.verbose(
                _("printws: Map area height correction / " + str(hregionratio))
            )
            mapsizes["h"] = mapsizes["w"] / hregionratio
        elif hregionratio < hmapratio:
            grass.verbose(
                _("printws: Map area width correction * " + str(hregionratio))
            )
            mapsizes["w"] = mapsizes["h"] * hregionratio
        mapsizesindots = dictodots(mapsizes, dpioption)

        # changing region resolution to match print resolution
        # to eliminate unnecessary CPU heating/data transfer
        # so as to make it faster
        # with only invisible detail loss.
        colsregiontomap = float(mapsizesindots["w"]) / regioncols
        rowsregiontomap = float(mapsizesindots["h"]) / regionrows

        newewres = ewres
        newnsres = nsres

        # if colsregiontomap < 1:
        # CHANGE: also enables raising of resolution to prevent
        # pixelation because of low resolution setting...
        newewres = ewres / colsregiontomap
        # if rowsregiontomap < 1:
        newnsres = nsres / rowsregiontomap

        # WOW - no necessary to convert back to DMS for nsres / ewres
        # if ISLATLONG:
        #    newewresstr=decdeg2dms(newewres)
        #    newnsresstr=decdeg2dms(newnsres)
        # else:
        newewresstr = str(newewres)
        newnsresstr = str(newnsres)

        grass.run_command("g.region", ewres=newewresstr, nsres=newnsresstr)

        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # it seems that d.wms uses the GRASS_REGION from region info
        # others may also do so we set it
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        kv2 = {}
        kv2["e"] = kv["east"]
        kv2["n"] = kv["north"]
        kv2["s"] = kv["south"]
        kv2["w"] = kv["west"]
        kv2["ewres"] = newewresstr
        kv2["nsres"] = newnsresstr
        # kv2['rows']    #- autocalculated to resolution - no need to set explicitly
        # kv2['cols']    #- autocalculated to resolution - no need to set explicitly
        # grass.message(str(kv2))
        # grass.message(grass.region_env(**kv2))
        # grass.message(s)
        os.environ["GRASS_REGION"] = grass.region_env(**kv2)

        # Getting mapping area in dots
        # Correcting mxfd to leave space for title and subscript
        pagemarginstitles = copy.deepcopy(pagemargins)
        pagemarginstitles["t"] = pagemarginstitles["t"] + upperspace
        pagemarginstitles["b"] = pagemarginstitles["b"] + lowerspace
        pagemarginsindotstitles = dictodots(pagemarginstitles, dpioption)
        mxfdtitles = getmaxframeindots(pagemarginsindotstitles, pagesizesindots)

        mpfd = getmapframeindots(mapulindots, mapsizesindots, mxfdtitles)
        if pageoption == "Flexi":
            # For 'Flexi' page we modify the setup to create
            # a page containing only the map without margins
            grass.verbose(_("printws: pre Flexi mapframe: " + str(mpfd)))
            mpfd["b"] = mpfd["b"] - mpfd["t"]
            mpfd["t"] = 0
            mpfd["r"] = mpfd["r"] - mpfd["l"]
            mpfd["l"] = 0
            os.environ["GRASS_RENDER_WIDTH"] = str(mpfd["r"])
            os.environ["GRASS_RENDER_HEIGHT"] = str(mpfd["b"])
            grass.verbose(_("printws: post Flexi mapframe: " + str(mpfd)))
        mapframe = (
            str(mpfd["t"])
            + ","
            + str(mpfd["b"])
            + ","
            + str(mpfd["l"])
            + ","
            + str(mpfd["r"])
        )

        grass.verbose(_("printws: DOT VALUES ARE:"))
        grass.verbose(_("printws: maxframe: " + str(mxfd)))
        grass.verbose(_("printws: maxframe: " + maxframe))
        grass.verbose(_("printws: mapframe: " + str(mpfd)))
        grass.verbose(_("printws: mapframe: " + mapframe))
        grass.verbose(_("printws: page: " + str(pagesizesindots)))
        grass.verbose(_("printws: margins: " + str(pagemarginsindots)))
        grass.verbose(_("printws: mapUL: " + str(mapulindots)))
        grass.verbose(_("printws: mapsizes (corrected): " + str(mapsizesindots)))
        grass.verbose(_("printws: ewres (corrected): " + str(newewres)))
        grass.verbose(_("printws: nsres (corrected): " + str(newnsres)))

        # quit()

        # ------------------- INMAP -------------------

        # Do not limit -map. It was: -limit map 720000000 before...
        # So we can grow on disk as long as it lasts
        imcommand = (
            "convert  -limit memory 720000000 -units PixelsPerInch -density "
            + str(int(dpioption))
            + " "
        )

        if os.name == "nt":
            imcommand = "magick " + imcommand

        os.environ["GRASS_RENDER_FRAME"] = mapframe

        grass.verbose(_("printws: Rendering: the following layers: "))
        lastopacity = "-1"

        for lay in layers:
            grass.verbose(_(lay[1] + " at: " + lay[0] + " opacity"))
            if lay[0] == "1":
                if lastopacity != "1":
                    LASTFILE = os.path.join(
                        TMPDIR,
                        str(os.getpid())
                        + "_DIS_"
                        + str(displaycounter)
                        + "_GEN_"
                        + str(LAYERCOUNT)
                        + "."
                        + TMPFORMAT,
                    )
                    os.environ["GRASS_RENDER_FILE"] = LASTFILE
                    LAYERCOUNT = LAYERCOUNT + 2
                    imcommand = imcommand + " " + LASTFILE
                    lastopacity = "1"
                render(lay[1], lay[2], lay[3])
            else:
                lastopacity = lay[0]
                LASTFILE = os.path.join(
                    TMPDIR,
                    str(os.getpid())
                    + "_DIS_"
                    + str(displaycounter)
                    + "_GEN_"
                    + str(LAYERCOUNT)
                    + "."
                    + TMPFORMAT,
                )
                LAYERCOUNT = LAYERCOUNT + 2
                os.environ["GRASS_RENDER_FILE"] = LASTFILE
                grass.verbose("LAY: " + str(lay))
                render(lay[1], lay[2], lay[3])
                imcommand = (
                    imcommand
                    + " \( "
                    + LASTFILE
                    + " -channel a -evaluate multiply "
                    + lay[0]
                    + " +channel \)"
                )

        # setting resolution back to pre-script state since map rendering is
        # finished
        # CHANGE: not necessary anymore since we use temp_region now
        # However, since we did set GRASS_REGION, let's redo it here

        os.environ.pop("GRASS_REGION")

        # ------------------- OUTSIDE MAP texts, etc -------------------
        if pageoption == "Flexi":
            grass.verbose(
                _("m.printws: WARNING! Felxi mode, will not create titles, etc...")
            )
        else:
            os.environ["GRASS_RENDER_FRAME"] = maxframe

            dict = {}
            dict["task"] = "d.text"
            dict["color"] = titlecolor
            dict["font"] = titlefont
            dict["charset"] = "UTF-8"

            if len(options["maintitle"]) > 1:
                dict["text"] = decodetextmacros(options["maintitle"], textmacros)
                dict["at"] = "50," + str(titletoppercent)
                dict["align"] = "uc"
                dict["size"] = str(maintitlesize)
                render(str(dict), dict, {})

            if len(options["subtitle"]) > 1:
                dict["text"] = decodetextmacros(options["subtitle"], textmacros)
                dict["at"] = "50," + str(subtitletoppercent)
                dict["align"] = "uc"
                dict["size"] = str(subtitlesize)
                render(str(dict), dict, {})

            dict["size"] = str(pssize)

            if len(options["psundercentral"]) > 1:
                dict["text"] = decodetextmacros(options["psundercentral"], textmacros)
                dict["at"] = "50,1"
                dict["align"] = "lc"
                render(str(dict), dict, {})
            if len(options["psunderleft"]) > 1:
                dict["text"] = decodetextmacros(options["psunderleft"], textmacros)
                dict["at"] = "0,1"
                dict["align"] = "ll"
                render(str(dict), dict, {})
            if len(options["psunderright"]) > 1:
                dict["text"] = decodetextmacros(options["psunderright"], textmacros)
                dict["at"] = "100,1"
                dict["align"] = "lr"
                render(str(dict), dict, {})

        # ------------------- GENERATING OUTPUT FILE -------------------

        if len(options["output"]) > 1:
            output = options["output"]
        else:
            output = "map_" + str(os.getpid())

        # remove extension AND display number and naming if any
        output = os.path.splitext(output)[0]
        output = re.sub("_DISPLAY_[0-9]+_.*", "", output)

        if len(options["format"]) > 1:
            extension = options["format"]
        else:
            extension = "pdf"

        displaypart = ""
        if len(displays) > 1:
            displaypart = "_DISPLAY_" + str(displaycounter) + "_" + key

        pagedata = getpagedata(pageoption)
        # params= ' -extent '+str(pagesizesindots['w'])+'x'+str(pagesizesindots['h'])+' -gravity center -compress jpeg -page '+pagedata['page']+' '+pagedata['parameters']+' -units PixelsPerInch -density '+str(dpioption)+'x'+str(dpioption)+' '
        params = (
            " -compress jpeg -quality 92 "
            + pagedata["parameters"]
            + " -units PixelsPerInch -density "
            + str(int(dpioption))
            + " "
        )

        imcommand = (
            imcommand
            + " -layers flatten "
            + params
            + '"'
            + output
            + displaypart
            + "."
            + extension
            + '"'
        )

        grass.verbose(_("printws: And the imagemagick command is... " + imcommand))
        os.system(imcommand)

    if not flags["d"]:
        grass.verbose(_("printws: Doing graceful cleanup..."))
        os.system("rm " + os.path.join(TMPDIR, str(os.getpid()) + "*_GEN_*"))
        if REMOVE_TMPDIR:
            try_rmdir(TMPDIR)
        else:
            grass.message(
                "\n%s\n" % _("printws: Temp dir remove failed. Do it yourself, please:")
            )
            sys.stderr.write("%s\n" % TMPDIR % " <---- this")

    # restoring pre-script region
    # - not necessary as we are using grass.use_temp_region() in the future

    return 0
Пример #49
0
def main():
    raster = options["raster"]
    maskcats = options["maskcats"]
    vector = options["vector"]
    layer = options["layer"]
    cats = options["cats"]
    where = options["where"]
    remove = flags["r"]
    invert = flags["i"]

    if not remove and not raster and not vector:
        grass.fatal(_("Either parameter <raster> ot parameter <vector> is required"))

    mapset = grass.gisenv()["MAPSET"]
    exists = bool(grass.find_file("MASK", element="cell", mapset=mapset)["file"])

    if remove:
        # -> remove
        if exists:
            grass.run_command("g.remove", flags="f", quiet=True, type="rast", pattern="MASK")
            grass.message(_("Raster MASK removed"))
        else:
            grass.fatal(_("No existing MASK to remove"))
    else:
        # -> create
        if exists:
            if not grass.overwrite():
                grass.fatal(_("MASK already found in current mapset. Delete first or overwrite."))
            else:
                grass.warning(_("MASK already exists and will be overwritten"))
                grass.run_command("g.remove", flags="f", quiet=True, type="rast", pattern="MASK")

        if raster:
            # check if input raster exists
            if not grass.find_file(raster)["file"]:
                grass.fatal(_("Raster map <%s> not found") % raster)

            if maskcats != "*" and not remove:
                if grass.raster_info(raster)["datatype"] != "CELL":
                    grass.fatal(
                        _(
                            "The raster map <%s> must be integer (CELL type) "
                            " in order to use the 'maskcats' parameter"
                        )
                        % raster
                    )

            p = grass.feed_command("r.reclass", input=raster, output="MASK", overwrite=True, rules="-")
            p.stdin.write("%s = 1" % maskcats)
            p.stdin.close()
            p.wait()
        elif vector:
            vector_name = grass.find_file(vector, "vector")["fullname"]
            if not vector_name:
                grass.fatal(_("Vector map <%s> not found") % vector)

            # parser bug?
            if len(cats) == 0:
                cats = None
            if len(where) == 0:
                where = None

            if grass.vector_info_topo(vector_name)["areas"] < 1:
                grass.warning(_("No area found in vector map <%s>. " "Creating a convex hull for MASK.") % vector_name)
                global tmp_hull
                tmp_hull = "tmp_hull_%d" % os.getpid()
                to_rast_input = tmp_hull
                # force 'flat' convex hull for 3D vector maps
                if 0 != grass.run_command(
                    "v.hull",
                    flags="f",
                    quiet=True,
                    input=vector_name,
                    output=tmp_hull,
                    layer=layer,
                    cats=cats,
                    where=where,
                ):
                    grass.fatal(_("Unable to create a convex hull for vector map <%s>") % vector_name)
            else:
                to_rast_input = vector_name

            env = os.environ.copy()
            if grass.verbosity() > 1:
                env["GRASS_VERBOSE"] = "1"
            grass.run_command(
                "v.to.rast",
                input=to_rast_input,
                layer=layer,
                output="MASK",
                use="val",
                val="1",
                type="area",
                cats=cats,
                where=where,
                env=env,
            )

        if invert:
            global tmp
            tmp = "r_mask_%d" % os.getpid()
            grass.run_command("g.rename", rast=("MASK", tmp), quiet=True)
            grass.message(_("Creating inverted raster MASK..."))
            grass.mapcalc("MASK = if(isnull($tmp), 1, null())", tmp=tmp)
            grass.verbose(_("Inverted raster MASK created"))
        else:
            grass.verbose(_("Raster MASK created"))

        grass.message(
            _(
                "All subsequent raster operations will be limited to "
                "the MASK area. Removing or renaming raster map named "
                "'MASK' will restore raster operations to normal."
            )
        )
Пример #50
0
def one_point_per_col_output(separator, output_files, output_time_list,
                             output, write_header, site_input):
    """Write one point per col
       output is of type: 
       start,end,point_1 value,point_2 value,...,point_n value
       
       Each row represents a single raster map, hence a single time stamp
    """
    # open the output file for writing
    out_file = open(output, 'w') if output != "-" else sys.stdout
        
    first = True
    for count in range(len(output_files)):
        file_name = output_files[count]
        gscript.verbose(_("Transforming r.what output file %s"%(file_name)))
        map_list = output_time_list[count]
        in_file = open(file_name, "r")
        lines = in_file.readlines()
        
        matrix = []
        for line in lines:
            matrix.append(line.split(separator))
            
        num_cols = len(matrix[0])
        
        if first is True:
            if write_header is True:
                out_file.write("start%(sep)send"%({"sep":separator}))
                if site_input:
                    for row in matrix:
                        x = row[0]
                        y = row[1]
                        site = row[2]
                        out_file.write("%(sep)s%(x)10.10f;%(y)10.10f;%(site_name)s"\
                                   %({"sep":separator,
                                      "x":float(x), 
                                      "y":float(y),
                                      "site_name":str(site)}))
                else:
                    for row in matrix:
                        x = row[0]
                        y = row[1]
                        out_file.write("%(sep)s%(x)10.10f;%(y)10.10f"\
                                   %({"sep":separator,
                                      "x":float(x), 
                                      "y":float(y)}))

                out_file.write("\n")

        first = False

        for col in xrange(num_cols - 3):
            start, end = output_time_list[count][col].get_temporal_extent_as_tuple()
            time_string = "%(start)s%(sep)s%(end)s"\
                               %({"start":str(start), "end":str(end),
                                  "sep":separator})
            out_file.write(time_string)
            for row in xrange(len(matrix)):
                value = matrix[row][col + 3]
                out_file.write("%(sep)s%(value)s"\
                                   %({"sep":separator,
                                      "value":value.strip()}))
            out_file.write("\n")

        in_file.close()
    if out_file is not sys.stdout:
        out_file.close()
Пример #51
0
def main():
    raster = options['raster']
    maskcats = options['maskcats']
    vector = options['vector']
    layer = options['layer']
    cats = options['cats']
    where = options['where']
    remove = flags['r']
    invert = flags['i']

    if not remove and not raster and not vector:
        grass.fatal(_("Either parameter <raster> ot parameter <vector> is required"))

    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file('MASK', element='cell', mapset=mapset)['file'])

    if remove:
        # -> remove
        if exists:
            if sys.platform == 'win32':
                grass.run_command('g.remove', flags='if', quiet=True,
                                  type='raster', name='MASK')
            else:
                grass.run_command('g.remove', flags='f', quiet=True,
                                  type='raster', name='MASK')
            grass.message(_("Raster MASK removed"))
        else:
            grass.fatal(_("No existing MASK to remove"))
    else:
        # -> create
        if exists:
            if not grass.overwrite():
                grass.fatal(_("MASK already found in current mapset. Delete first or overwrite."))
            else:
                grass.warning(_("MASK already exists and will be overwritten"))
                grass.run_command('g.remove', flags='f', quiet=True,
                                  type='raster', name='MASK')

        if raster:
            # check if input raster exists
            if not grass.find_file(raster)['file']:
                grass.fatal(_("Raster map <%s> not found") % raster)

            if maskcats != '*' and not remove:
                if grass.raster_info(raster)['datatype'] != "CELL":
                    grass.fatal(_("The raster map <%s> must be integer (CELL type) "
                                  " in order to use the 'maskcats' parameter") % raster)

            p = grass.feed_command(
                'r.reclass',
                input=raster,
                output='MASK',
                overwrite=True,
                rules='-')
            p.stdin.write("%s = 1" % maskcats)
            p.stdin.close()
            p.wait()
        elif vector:
            vector_name = grass.find_file(vector, 'vector')['fullname']
            if not vector_name:
                grass.fatal(_("Vector map <%s> not found") % vector)

            # parser bug?
            if len(cats) == 0:
                cats = None
            if len(where) == 0:
                where = None

            if grass.vector_info_topo(vector_name)['areas'] < 1:
                grass.warning(_("No area found in vector map <%s>. "
                                "Creating a convex hull for MASK.") % vector_name)
                global tmp_hull
                tmp_hull = "tmp_hull_%d" % os.getpid()
                to_rast_input = tmp_hull
                # force 'flat' convex hull for 3D vector maps
                try:
                    grass.run_command('v.hull', flags='f', quiet=True,
                                      input=vector_name, output=tmp_hull,
                                      layer=layer, cats=cats, where=where)
                except CalledModuleError:
                    grass.fatal(
                        _("Unable to create a convex hull for vector map <%s>") %
                        vector_name)
            else:
                to_rast_input = vector_name

            env = os.environ.copy()
            if grass.verbosity() > 1:
                env['GRASS_VERBOSE'] = '1'
            grass.run_command('v.to.rast', input=to_rast_input, layer=layer,
                              output='MASK', use='val', val='1',
                              type='area', cats=cats, where=where, env=env)

        if invert:
            global tmp
            tmp = "r_mask_%d" % os.getpid()
            grass.run_command('g.rename', raster=('MASK', tmp), quiet=True)
            grass.message(_("Creating inverted raster MASK..."))
            grass.mapcalc("MASK = if(isnull($tmp), 1, null())", tmp=tmp)
            grass.verbose(_("Inverted raster MASK created"))
        else:
            grass.verbose(_("Raster MASK created"))

        grass.message(_("All subsequent raster operations will be limited to "
                        "the MASK area. Removing or renaming raster map named "
                        "'MASK' will restore raster operations to normal."))
Пример #52
0
def import_stds(input, output, directory, title=None, descr=None, location=None,
                link=False, exp=False, overr=False, create=False,
                stds_type="strds", base=None, set_current_region=False):
    """Import space time datasets of type raster and vector

        :param input: Name of the input archive file
        :param output: The name of the output space time dataset
        :param directory: The extraction directory
        :param title: The title of the new created space time dataset
        :param descr: The description of the new created
                     space time dataset
        :param location: The name of the location that should be created,
                        maps are imported into this location
        :param link: Switch to link raster maps instead importing them
        :param exp: Extend location extents based on new dataset
        :param overr: Override projection (use location's projection)
        :param create: Create the location specified by the "location"
                      parameter and exit.
                      Do not import the space time datasets.
        :param stds_type: The type of the space time dataset that
                         should be imported
        :param base: The base name of the new imported maps, it will be
                     extended using a numerical index.
    """

    global raise_on_error
    old_state = gscript.raise_on_error
    gscript.set_raise_on_error(True)

    # Check if input file and extraction directory exits
    if not os.path.exists(input):
        gscript.fatal(_("Space time raster dataset archive <%s> not found")
                      % input)
    if not create and not os.path.exists(directory):
        gscript.fatal(_("Extraction directory <%s> not found") % directory)

    tar = tarfile.open(name=input, mode='r')

    # Check for important files
    msgr = get_tgis_message_interface()
    msgr.message(_("Checking validity of input file (size: %0.1f MB). Make take a while..."
        % (os.path.getsize(input)/(1024*1024.0))))
    members = tar.getnames()
    # Make sure that the basenames of the files are used for comparison
    member_basenames = [os.path.basename(name) for name in members]

    if init_file_name not in member_basenames:
        gscript.fatal(_("Unable to find init file <%s>") % init_file_name)
    if list_file_name not in member_basenames:
        gscript.fatal(_("Unable to find list file <%s>") % list_file_name)
    if proj_file_name not in member_basenames:
        gscript.fatal(_("Unable to find projection file <%s>") % proj_file_name)

    msgr.message(_("Extracting data..."))
    tar.extractall(path=directory)
    tar.close()

    # We use a new list file name for map registration
    new_list_file_name = list_file_name + "_new"
    # Save current working directory path
    old_cwd = os.getcwd()

    # Switch into the data directory
    os.chdir(directory)

    # Check projection information
    if not location:
        temp_name = gscript.tempfile()
        temp_file = open(temp_name, "w")
        proj_name = os.path.abspath(proj_file_name)

        # We need to convert projection strings generated 
        # from other programms than g.proj into
        # new line format so that the grass file comparison function
        # can be used to compare the projections
        proj_name_tmp = temp_name + "_in_projection"
        proj_file = open(proj_name, "r")
        proj_content = proj_file.read()
        proj_content = proj_content.replace(" +", "\n+")
        proj_content = proj_content.replace("\t+", "\n+")
        proj_file.close()

        proj_file = open(proj_name_tmp, "w")
        proj_file.write(proj_content)
        proj_file.close()

        p = gscript.start_command("g.proj", flags="j", stdout=temp_file)
        p.communicate()
        temp_file.close()

        if not gscript.compare_key_value_text_files(temp_name, proj_name_tmp,
                                                    sep="="):
            if overr:
                gscript.warning(_("Projection information does not match. "
                                  "Proceeding..."))
            else:
                diff = ''.join(gscript.diff_files(temp_name, proj_name))
                gscript.warning(_("Difference between PROJ_INFO file of "
                                  "imported map and of current location:"
                                  "\n{diff}").format(diff=diff))
                gscript.fatal(_("Projection information does not match. "
                                "Aborting."))

    # Create a new location based on the projection information and switch
    # into it
    old_env = gscript.gisenv()
    if location:
        try:
            proj4_string = open(proj_file_name, 'r').read()
            gscript.create_location(dbase=old_env["GISDBASE"],
                                    location=location,
                                    proj4=proj4_string)
            # Just create a new location and return
            if create:
                os.chdir(old_cwd)
                return
        except Exception as e:
            gscript.fatal(_("Unable to create location %(l)s. Reason: %(e)s")
                          % {'l': location, 'e': str(e)})
        # Switch to the new created location
        try:
            gscript.run_command("g.mapset", mapset="PERMANENT",
                                location=location,
                                dbase=old_env["GISDBASE"])
        except CalledModuleError:
            gscript.fatal(_("Unable to switch to location %s") % location)
        # create default database connection
        try:
            gscript.run_command("t.connect", flags="d")
        except CalledModuleError:
            gscript.fatal(_("Unable to create default temporal database "
                            "in new location %s") % location)

    try:
        # Make sure the temporal database exists
        factory.init()

        fs = "|"
        maplist = []
        mapset = get_current_mapset()
        list_file = open(list_file_name, "r")
        new_list_file = open(new_list_file_name, "w")

        # get number of lines to correctly form the suffix
        max_count = -1
        for max_count, l in enumerate(list_file):
            pass
        max_count += 1
        list_file.seek(0)

        # Read the map list from file
        line_count = 0
        while True:
            line = list_file.readline()
            if not line:
                break

            line_list = line.split(fs)

            # The filename is actually the base name of the map
            # that must be extended by the file suffix
            filename = line_list[0].strip().split(":")[0]
            if base:
                mapname = "%s_%s" % (base, gscript.get_num_suffix(line_count + 1,
                                                                  max_count))
                mapid = "%s@%s" % (mapname, mapset)
            else:
                mapname = filename
                mapid = mapname + "@" + mapset

            row = {}
            row["filename"] = filename
            row["name"] = mapname
            row["id"] = mapid
            row["start"] = line_list[1].strip()
            row["end"] = line_list[2].strip()

            new_list_file.write("%s%s%s%s%s\n" % (mapname, fs, row["start"],
                                                  fs, row["end"]))

            maplist.append(row)
            line_count += 1

        list_file.close()
        new_list_file.close()

        # Read the init file
        fs = "="
        init = {}
        init_file = open(init_file_name, "r")
        while True:
            line = init_file.readline()
            if not line:
                break

            kv = line.split(fs)
            init[kv[0]] = kv[1].strip()

        init_file.close()

        if "temporal_type" not in init or \
           "semantic_type" not in init or \
           "number_of_maps" not in init:
            gscript.fatal(_("Key words %(t)s, %(s)s or %(n)s not found in init"
                            " file.") % {'t': "temporal_type",
                                         's': "semantic_type",
                                         'n': "number_of_maps"})

        if line_count != int(init["number_of_maps"]):
            gscript.fatal(_("Number of maps mismatch in init and list file."))

        format_ = "GTiff"
        type_ = "strds"

        if "stds_type" in init:
            type_ = init["stds_type"]
        if "format" in init:
            format_ = init["format"]

        if stds_type != type_:
            gscript.fatal(_("The archive file is of wrong space time dataset"
                            " type"))

        # Check the existence of the files
        if format_ == "GTiff":
            for row in maplist:
                filename = row["filename"] + ".tif"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find GeoTIFF raster file "
                                    "<%s> in archive.") % filename)
        elif format_ == "AAIGrid":
            for row in maplist:
                filename = row["filename"] + ".asc"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find AAIGrid raster file "
                                    "<%s> in archive.") % filename)
        elif format_ == "GML":
            for row in maplist:
                filename = row["filename"] + ".xml"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find GML vector file "
                                    "<%s> in archive.") % filename)
        elif format_ == "pack":
            for row in maplist:
                if type_ == "stvds":
                    filename = str(row["filename"].split(":")[0]) + ".pack"
                else:
                    filename = row["filename"] + ".pack"
                if not os.path.exists(filename):
                    gscript.fatal(_("Unable to find GRASS package file "
                                    "<%s> in archive.") % filename)
        else:
            gscript.fatal(_("Unsupported input format"))

        # Check the space time dataset
        id = output + "@" + mapset
        sp = dataset_factory(type_, id)
        if sp.is_in_db() and gscript.overwrite() is False:
            gscript.fatal(_("Space time %(t)s dataset <%(sp)s> is already in"
                            " the database. Use the overwrite flag.") %
                          {'t': type_, 'sp': sp.get_id()})

        # Import the maps
        if type_ == "strds":
            if format_ == "GTiff" or format_ == "AAIGrid":
                _import_raster_maps_from_gdal(maplist, overr, exp, location,
                                              link, format_, set_current_region)
            if format_ == "pack":
                _import_raster_maps(maplist, set_current_region)
        elif type_ == "stvds":
            if format_ == "GML":
                _import_vector_maps_from_gml(
                    maplist, overr, exp, location, link)
            if format_ == "pack":
                _import_vector_maps(maplist)

        # Create the space time dataset
        if sp.is_in_db() and gscript.overwrite() is True:
            gscript.info(_("Overwrite space time %(sp)s dataset "
                           "<%(id)s> and unregister all maps.") %
                         {'sp': sp.get_new_map_instance(None).get_type(),
                          'id': sp.get_id()})
            sp.delete()
            sp = sp.get_new_instance(id)

        temporal_type = init["temporal_type"]
        semantic_type = init["semantic_type"]
        relative_time_unit = None
        if temporal_type == "relative":
            if "relative_time_unit" not in init:
                gscript.fatal(_("Key word %s not found in init file.") %
                              ("relative_time_unit"))
            relative_time_unit = init["relative_time_unit"]
            sp.set_relative_time_unit(relative_time_unit)

        gscript.verbose(_("Create space time %s dataset.") %
                        sp.get_new_map_instance(None).get_type())

        sp.set_initial_values(temporal_type=temporal_type,
                              semantic_type=semantic_type, title=title,
                              description=descr)
        sp.insert()

        # register the maps
        fs = "|"
        register_maps_in_space_time_dataset(
            type=sp.get_new_map_instance(None).get_type(),
            name=output, file=new_list_file_name, start="file",
            end="file", unit=relative_time_unit, dbif=None, fs=fs,
            update_cmd_list=False)

        os.chdir(old_cwd)
    except:
        raise

    # Make sure the location is switched back correctly
    finally:
        if location:
            # Switch to the old location
            try:
                gscript.run_command("g.mapset", mapset=old_env["MAPSET"],
                                    location=old_env["LOCATION_NAME"],
                                    gisdbase=old_env["GISDBASE"])
            except CalledModuleError:
                grass.warning(_("Switching to original location failed"))

        gscript.set_raise_on_error(old_state)
Пример #53
0
def main():
    color = options['color']
    column = options['column']
    layer = options['layer']
    map = options['map']
    range = options['range']
    raster = options['raster']
    rgb_column = options['rgb_column']
    rules = options['rules']
    flip = flags['n']

    global tmp, tmp_colr, tmp_vcol
    pid = os.getpid()
    tmp = tmp_colr = tmp_vcol = None

    mapset = grass.gisenv()['MAPSET']
    gisbase = os.getenv('GISBASE')

    # does map exist in CURRENT mapset?
    kv = grass.find_file(map, element='vector', mapset=mapset)
    if not kv['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    vector = map.split('@', 1)

    # sanity check mutually exclusive color options
    if not options['color'] and not options['raster'] and not options['rules']:
        grass.fatal(_("Pick one of color, rules, or raster options"))

    if color:
        #### check the color rule is valid
        color_opts = os.listdir(os.path.join(gisbase, 'etc', 'colors'))
        color_opts += ['random', 'grey.eq', 'grey.log', 'rules']
        if color not in color_opts:
            grass.fatal(
                _("Invalid color rule <%s>\n") % color +
                _("Valid options are: %s") % ' '.join(color_opts))
    elif raster:
        if not grass.find_file(raster)['name']:
            grass.fatal(_("Raster raster map <%s> not found") % raster)
    elif rules:
        if not os.access(rules, os.R_OK):
            grass.fatal(_("Unable to read color rules file <%s>") % rules)

    # column checks
    # check input data column
    cols = grass.vector_columns(map, layer=layer)
    if column not in cols:
        grass.fatal(_("Column <%s> not found") % column)
    ncolumn_type = cols[column]['type']
    if ncolumn_type not in ["INTEGER", "DOUBLE PRECISION"]:
        grass.fatal(
            _("Column <%s> is not numeric but %s") % (column, ncolumn_type))

    # check if GRASSRGB column exists, make it if it doesn't
    table = grass.vector_db(map)[int(layer)]['table']
    if rgb_column not in cols:
        # RGB Column not found, create it
        grass.message(_("Creating column <%s>...") % rgb_column)
        try:
            grass.run_command('v.db.addcolumn',
                              map=map,
                              layer=layer,
                              column="%s varchar(11)" % rgb_column)
        except CalledModuleError:
            grass.fatal(_("Creating color column"))
    else:
        column_type = cols[rgb_column]['type']
        if column_type not in ["CHARACTER", "TEXT"]:
            grass.fatal(
                _("Column <%s> is not of compatible type (found %s)") %
                (rgb_column, column_type))
        else:
            num_chars = dict([
                (v[0], int(v[2])) for v in grass.db_describe(table)['cols']
            ])[rgb_column]
            if num_chars < 11:
                grass.fatal(
                    _("Color column <%s> is not wide enough (needs 11 characters)"
                      ), rgb_column)

    cvals = grass.vector_db_select(map, layer=int(layer),
                                   columns=column)['values'].values()

    # find data range
    if range:
        # order doesn't matter
        vals = range.split(',')
    else:
        grass.message(_("Scanning values..."))
        vals = [float(x[0]) for x in cvals]

    minval = min(vals)
    maxval = max(vals)

    grass.verbose(_("Range: [%s, %s]") % (minval, maxval))
    if minval is None or maxval is None:
        grass.fatal(_("Scanning data range"))

    # setup internal region
    grass.use_temp_region()
    grass.run_command('g.region', rows=2, cols=2)

    tmp_colr = "tmp_colr_%d" % pid

    # create dummy raster map
    if ncolumn_type == "INTEGER":
        grass.mapcalc("$tmp_colr = int(if(row() == 1, $minval, $maxval))",
                      tmp_colr=tmp_colr,
                      minval=minval,
                      maxval=maxval)
    else:
        grass.mapcalc("$tmp_colr = double(if(row() == 1, $minval, $maxval))",
                      tmp_colr=tmp_colr,
                      minval=minval,
                      maxval=maxval)

    if color:
        color_cmd = {'color': color}
    elif raster:
        color_cmd = {'raster': raster}
    elif rules:
        color_cmd = {'rules': rules}

    if flip:
        flip_flag = 'n'
    else:
        flip_flag = ''

    grass.run_command('r.colors',
                      map=tmp_colr,
                      flags=flip_flag,
                      quiet=True,
                      **color_cmd)

    tmp = grass.tempfile()

    # calculate colors and write SQL command file
    grass.message(_("Looking up colors..."))

    f = open(tmp, 'w')
    p = grass.feed_command('r.what.color', flags='i', input=tmp_colr, stdout=f)
    lastval = None
    for v in sorted(vals):
        if v == lastval:
            continue
        p.stdin.write('%f\n' % v)
    p.stdin.close()
    p.wait()
    f.close()

    tmp_vcol = "%s_vcol.sql" % tmp
    fi = open(tmp, 'r')
    fo = open(tmp_vcol, 'w')
    t = string.Template(
        "UPDATE $table SET $rgb_column = '$colr' WHERE $column = $value;\n")
    found = 0
    for line in fi:
        [value, colr] = line.split(': ')
        colr = colr.strip()
        if len(colr.split(':')) != 3:
            continue
        fo.write(
            t.substitute(table=table,
                         rgb_column=rgb_column,
                         colr=colr,
                         column=column,
                         value=value))
        found += 1
    fi.close()
    fo.close()

    if not found:
        grass.fatal(_("No values found in color range"))

    # apply SQL commands to update the table with values
    grass.message(_("Writing %s colors...") % found)

    try:
        grass.run_command('db.execute', input=tmp_vcol)
    except CalledModuleError:
        grass.fatal(_("Processing SQL transaction"))

    if flags['s']:
        vcolors = "vcolors_%d" % pid
        grass.run_command('g.rename', raster=(tmp_colr, vcolors), quiet=True)
        grass.message(
            _("Raster map containing color rules saved to <%s>") % vcolors)
        # TODO save full v.colors command line history
        grass.run_command(
            'r.support',
            map=vcolors,
            history="",
            source1="vector map = %s" % map,
            source2="column = %s" % column,
            title=_("Dummy raster to use as thematic vector legend"),
            description="generated by v.colors using r.mapcalc")
        grass.run_command('r.support',
                          map=vcolors,
                          history=_("RGB saved into <%s> using <%s%s%s>") %
                          (rgb_column, color, raster, rules))
Пример #54
0
def main():
    global tmp
    tmp = gscript.tempfile()

    extend = flags["e"]
    shellstyle = flags["g"]
    table = options["table"]
    column = options["column"]
    database = options["database"]
    driver = options["driver"]
    where = options["where"]
    perc = options["percentile"]

    perc = [float(p) for p in perc.split(",")]

    desc_table = gscript.db_describe(table, database=database, driver=driver)
    if not desc_table:
        gscript.fatal(_("Unable to describe table <%s>") % table)
    found = False
    for cname, ctype, cwidth in desc_table["cols"]:
        if cname == column:
            found = True
            if ctype not in ("INTEGER", "DOUBLE PRECISION"):
                gscript.fatal(_("Column <%s> is not numeric") % cname)
    if not found:
        gscript.fatal(_("Column <%s> not found in table <%s>") % (column, table))

    if not shellstyle:
        gscript.verbose(
            _("Calculation for column <%s> of table <%s>...") % (column, table)
        )
        gscript.message(_("Reading column values..."))

    sql = "SELECT %s FROM %s" % (column, table)
    if where:
        sql += " WHERE " + where

    if not database:
        database = None

    if not driver:
        driver = None

    tmpf = open(tmp, "w")
    gscript.run_command(
        "db.select",
        flags="c",
        table=table,
        database=database,
        driver=driver,
        sql=sql,
        stdout=tmpf,
    )
    tmpf.close()

    # check if result is empty
    tmpf = open(tmp)
    if tmpf.read(1) == "":
        gscript.fatal(_("Table <%s> contains no data.") % table)
        tmpf.close()

    # calculate statistics
    if not shellstyle:
        gscript.verbose(_("Calculating statistics..."))

    N = 0
    sum = 0.0
    sum2 = 0.0
    sum3 = 0.0
    minv = 1e300
    maxv = -1e300

    tmpf = open(tmp)
    for line in tmpf:
        line = line.rstrip("\r\n")
        if len(line) == 0:
            continue
        x = float(line)
        N += 1
        sum += x
        sum2 += x * x
        sum3 += abs(x)
        maxv = max(maxv, x)
        minv = min(minv, x)
    tmpf.close()

    if N <= 0:
        gscript.fatal(_("No non-null values found"))

    if not shellstyle:
        sys.stdout.write("Number of values: %d\n" % N)
        sys.stdout.write("Minimum: %.15g\n" % minv)
        sys.stdout.write("Maximum: %.15g\n" % maxv)
        sys.stdout.write("Range: %.15g\n" % (maxv - minv))
        sys.stdout.write("Mean: %.15g\n" % (sum / N))
        sys.stdout.write("Arithmetic mean of absolute values: %.15g\n" % (sum3 / N))
        if not ((sum2 - sum * sum / N) / N) < 0:
            sys.stdout.write("Variance: %.15g\n" % ((sum2 - sum * sum / N) / N))
            sys.stdout.write(
                "Standard deviation: %.15g\n" % (math.sqrt((sum2 - sum * sum / N) / N))
            )
            sys.stdout.write(
                "Coefficient of variation: %.15g\n"
                % ((math.sqrt((sum2 - sum * sum / N) / N)) / (math.sqrt(sum * sum) / N))
            )
        else:
            sys.stdout.write("Variance: 0\n")
            sys.stdout.write("Standard deviation: 0\n")
            sys.stdout.write("Coefficient of variation: 0\n")
        sys.stdout.write("Sum: %.15g\n" % sum)
    else:
        sys.stdout.write("n=%d\n" % N)
        sys.stdout.write("min=%.15g\n" % minv)
        sys.stdout.write("max=%.15g\n" % maxv)
        sys.stdout.write("range=%.15g\n" % (maxv - minv))
        sys.stdout.write("mean=%.15g\n" % (sum / N))
        sys.stdout.write("mean_abs=%.15g\n" % (sum3 / N))
        if not ((sum2 - sum * sum / N) / N) < 0:
            sys.stdout.write("variance=%.15g\n" % ((sum2 - sum * sum / N) / N))
            sys.stdout.write("stddev=%.15g\n" % (math.sqrt((sum2 - sum * sum / N) / N)))
            sys.stdout.write(
                "coeff_var=%.15g\n"
                % ((math.sqrt((sum2 - sum * sum / N) / N)) / (math.sqrt(sum * sum) / N))
            )
        else:
            sys.stdout.write("variance=0\n")
            sys.stdout.write("stddev=0\n")
            sys.stdout.write("coeff_var=0\n")
        sys.stdout.write("sum=%.15g\n" % sum)

    if not extend:
        return

    # preparations:
    sortfile(tmp, tmp + ".sort")

    odd = N % 2
    eostr = ["even", "odd"][odd]

    q25pos = round(N * 0.25)
    if q25pos == 0:
        q25pos = 1
    q50apos = round(N * 0.50)
    if q50apos == 0:
        q50apos = 1
    q50bpos = q50apos + (1 - odd)
    q75pos = round(N * 0.75)
    if q75pos == 0:
        q75pos = 1

    ppos = {}
    pval = {}
    for i in range(len(perc)):
        ppos[i] = round(N * perc[i] / 100)
        if ppos[i] == 0:
            ppos[i] = 1
        pval[i] = 0

    inf = open(tmp + ".sort")
    l = 1
    for line in inf:
        line = line.rstrip("\r\n")
        if len(line) == 0:
            continue
        if l == q25pos:
            q25 = float(line)
        if l == q50apos:
            q50a = float(line)
        if l == q50bpos:
            q50b = float(line)
        if l == q75pos:
            q75 = float(line)
        for i in range(len(ppos)):
            if l == ppos[i]:
                pval[i] = float(line)
        l += 1

    q50 = (q50a + q50b) / 2

    if not shellstyle:
        sys.stdout.write("1st Quartile: %.15g\n" % q25)
        sys.stdout.write("Median (%s N): %.15g\n" % (eostr, q50))
        sys.stdout.write("3rd Quartile: %.15g\n" % q75)
        for i in range(len(perc)):
            if perc[i] == int(perc[i]):  # integer
                if int(perc[i]) % 10 == 1 and int(perc[i]) != 11:
                    sys.stdout.write(
                        "%dst Percentile: %.15g\n" % (int(perc[i]), pval[i])
                    )
                elif int(perc[i]) % 10 == 2 and int(perc[i]) != 12:
                    sys.stdout.write(
                        "%dnd Percentile: %.15g\n" % (int(perc[i]), pval[i])
                    )
                elif int(perc[i]) % 10 == 3 and int(perc[i]) != 13:
                    sys.stdout.write(
                        "%drd Percentile: %.15g\n" % (int(perc[i]), pval[i])
                    )
                else:
                    sys.stdout.write(
                        "%dth Percentile: %.15g\n" % (int(perc[i]), pval[i])
                    )
            else:
                sys.stdout.write("%.15g Percentile: %.15g\n" % (perc[i], pval[i]))
    else:
        sys.stdout.write("first_quartile=%.15g\n" % q25)
        sys.stdout.write("median=%.15g\n" % q50)
        sys.stdout.write("third_quartile=%.15g\n" % q75)
        for i in range(len(perc)):
            percstr = "%.15g" % perc[i]
            percstr = percstr.replace(".", "_")
            sys.stdout.write("percentile_%s=%.15g\n" % (percstr, pval[i]))
Пример #55
0
def one_point_per_timerow_output(separator, output_files, output_time_list,
                             output, write_header, site_input):
    """Use the original layout of the r.waht output and print instead of 
       the raster names, the time stamps as header
       
       One point per line for all time stamps:
        x|y|1991-01-01 00:00:00;1991-01-02 00:00:00|1991-01-02 00:00:00;1991-01-03 00:00:00|1991-01-03 00:00:00;1991-01-04 00:00:00|1991-01-04 00:00:00;1991-01-05 00:00:00
        3730731.49590371|5642483.51236521|6|8|7|7
        3581249.04638104|5634411.97526282|5|8|7|7
    """
    out_file = open(output, 'w') if output != "-" else sys.stdout

    matrix = []
    header = ""

    first = True
    for count in range(len(output_files)):
        file_name = output_files[count]
        gscript.verbose("Transforming r.what output file %s"%(file_name))
        map_list = output_time_list[count]
        in_file = open(file_name, "r")

        if write_header:
            if first is True:
                if site_input:
                    header = "x%(sep)sy%(sep)ssite"%({"sep":separator})
                else:
                    header = "x%(sep)sy"%({"sep":separator})
            for map in map_list:
                start, end = map.get_temporal_extent_as_tuple()
                time_string = "%(sep)s%(start)s;%(end)s"\
                              %({"start":str(start), "end":str(end),
                                 "sep":separator})
                header += time_string

        lines = in_file.readlines()

        for i in xrange(len(lines)):
            cols = lines[i].split(separator)

            if first is True:
                if site_input:
                    matrix.append(cols[:3])
                else:
                    matrix.append(cols[:2])

            matrix[i] = matrix[i] + cols[3:]

        first = False

        in_file.close()

    out_file.write(header + "\n")

    gscript.verbose(_("Writing the output file <%s>"%(output)))
    for row in matrix:
        first = True
        for col in row:
            value = col.strip()
            
            if first is False:
                out_file.write("%s"%(separator))
            out_file.write(value)
            
            first = False

        out_file.write("\n")
    if out_file is not sys.stdout:
        out_file.close()
def main():
    """
    Main program
    """

    # Temporary filenames

    # The following three are meant for a test step-by-step cwv estimation, see
    # unused functions!

    # tmp_ti_mean = tmp_map_name('ti_mean')  # for cwv
    # tmp_tj_mean = tmp_map_name('tj_mean')  # for cwv
    # tmp_ratio = tmp_map_name('ratio')  # for cwv

    tmp_avg_lse = tmp_map_name('avg_lse')
    tmp_delta_lse = tmp_map_name('delta_lse')
    tmp_cwv = tmp_map_name('cwv')
    #tmp_lst = tmp_map_name('lst')

    # basic equation for mapcalc
    global equation, citation_lst
    equation = "{result} = {expression}"

    # user input
    mtl_file = options['mtl']

    if not options['prefix']:
        b10 = options['b10']
        b11 = options['b11']
        t10 = options['t10']
        t11 = options['t11']

        if not options['clouds']:
            qab = options['qab']
            cloud_map = False

        else:
            qab = False
            cloud_map = options['clouds']

    elif options['prefix']:
        prefix = options['prefix']
        b10 = prefix + '10'
        b11 = prefix + '11'

        if not options['clouds']:
            qab = prefix + 'QA'
            cloud_map = False

        else:
            cloud_map = options['clouds']
            qab = False

    qapixel = options['qapixel']
    lst_output = options['lst']

    # save Brightness Temperature maps?
    global brightness_temperature_prefix
    if options['prefix_bt']:
        brightness_temperature_prefix = options['prefix_bt']
    else:
        brightness_temperature_prefix = None

    global cwv_output
    cwv_window_size = int(options['window'])
    assertion_for_cwv_window_size_msg = ('A spatial window of size 5^2 or less is not '
                                         'recommended. Please select a larger window. '
                                         'Refer to the manual\'s notes for details.')
    assert cwv_window_size >= 7, assertion_for_cwv_window_size_msg
    cwv_output = options['cwv']

    # optional maps
    average_emissivity_map = options['emissivity']
    delta_emissivity_map = options['delta_emissivity']

    # output for in-between maps?
    global emissivity_output, delta_emissivity_output
    emissivity_output = options['emissivity_out']
    delta_emissivity_output = options['delta_emissivity_out']

    global landcover_map, emissivity_class
    landcover_map = options['landcover']
    emissivity_class = options['emissivity_class']

    # flags
    global info, null
    info = flags['i']
    scene_extent = flags['e']
    timestamping = flags['t']
    null = flags['n']

    global rounding
    rounding = flags['r']

    global celsius
    celsius = flags['c']

    # ToDo:
    # shell = flags['g']

    #
    # Pre-production actions
    #

    # Set Region
    if scene_extent:
        grass.use_temp_region()  # safely modify the region
        msg = "\n|! Matching region extent to map {name}"

        # ToDo: check if extent-B10 == extent-B11? Unnecessary?
        # Improve below!

        if b10:
            run('g.region', rast=b10, align=b10)
            msg = msg.format(name=b10)

        elif t10:
            run('g.region', rast=t10, align=t10)
            msg = msg.format(name=t10)

        g.message(msg)

    elif scene_extent:
        grass.warning(_('Operating on current region'))

    #
    # 1. Mask clouds
    #

    if cloud_map:
        # user-fed cloud map?
        msg = '\n|i Using {cmap} as a MASK'.format(cmap=cloud_map)
        g.message(msg)
        r.mask(raster=cloud_map, flags='i', overwrite=True)

    else:
        # using the quality assessment band and a "QA" pixel value
        mask_clouds(qab, qapixel)

    #
    # 2. TIRS > Brightness Temperatures
    #

    if mtl_file:

        # if MTL and b10 given, use it to compute at-satellite temperature t10
        if b10:
            # convert DNs to at-satellite temperatures
            t10 = tirs_to_at_satellite_temperature(b10, mtl_file)

        # likewise for b11 -> t11
        if b11:
            # convert DNs to at-satellite temperatures
            t11 = tirs_to_at_satellite_temperature(b11, mtl_file)

    #
    # Initialise a SplitWindowLST object
    #

    split_window_lst = SplitWindowLST(emissivity_class)
    citation_lst = split_window_lst.citation

    #
    # 3. Land Surface Emissivities
    #

    # use given fixed class?
    if emissivity_class:

        if split_window_lst.landcover_class is False:
            # replace with meaningful error
            g.warning('Unknown land cover class string! Note, this string '
                      'input option is case sensitive.')

        if emissivity_class == 'Random':
            msg = "\n|! Random emissivity class selected > " + \
                split_window_lst.landcover_class + ' '

        else:
            msg = '\n|! Retrieving average emissivities *only* for {eclass} '

        if info:
            msg += '| Average emissivities (channels 10, 11): '
            msg += str(split_window_lst.emissivity_t10) + ', ' + \
                str(split_window_lst.emissivity_t11)

        msg = msg.format(eclass=split_window_lst.landcover_class)
        g.message(msg)

    # use the FROM-GLC map
    elif landcover_map:

        if average_emissivity_map:
            tmp_avg_lse = average_emissivity_map

        if not average_emissivity_map:
            determine_average_emissivity(tmp_avg_lse, landcover_map,
                                         split_window_lst.average_lse_mapcalc)
            if options['emissivity_out']:
                tmp_avg_lse = options['emissivity_out']

        if delta_emissivity_map:
            tmp_delta_lse = delta_emissivity_map

        if not delta_emissivity_map:
            determine_delta_emissivity(tmp_delta_lse, landcover_map,
                                       split_window_lst.delta_lse_mapcalc)
            if options['delta_emissivity_out']:
                tmp_delta_lse = options['delta_emissivity_out']

    #
    # 4. Modified Split-Window Variance-Covariance Matrix > Column Water Vapor
    #
    

    if info:
        msg = '\n|i Spatial window of size {n} for Column Water Vapor estimation: '
        msg = msg.format(n=cwv_window_size)
        g.message(msg)

    cwv = Column_Water_Vapor(cwv_window_size, t10, t11)
    citation_cwv = cwv.citation
    estimate_cwv_big_expression(tmp_cwv, t10, t11, cwv._big_cwv_expression())
    if cwv_output:
        tmp_cwv = cwv_output

    #
    # 5. Estimate Land Surface Temperature
    #

    if info and emissivity_class == 'Random':
        msg = '\n|* Will pick a random emissivity class!'
        grass.verbose(msg)

    estimate_lst(lst_output, t10, t11,
                 tmp_avg_lse, tmp_delta_lse, tmp_cwv,
                 split_window_lst.sw_lst_mapcalc)

    #
    # Post-production actions
    #

    # remove MASK
    r.mask(flags='r', verbose=True)

    # time-stamping
    if timestamping:
        add_timestamp(mtl_file, lst_output)

        if cwv_output:
            add_timestamp(mtl_file, cwv_output)

    # Apply color table
    if celsius:
        run('r.colors', map=lst_output, color='celsius')
    else:
        # color table for kelvin
        run('r.colors', map=lst_output, color='kelvin')

    # ToDo: helper function for r.support
    # strings for metadata
    history_lst = '\n' + citation_lst
    history_lst += '\n\n' + citation_cwv
    history_lst += '\n\nSplit-Window model: '
    history_lst += split_window_lst._equation  # :wsw_lst_mapcalc
    description_lst = ('Land Surface Temperature derived from a split-window algorithm. ')

    if celsius:
        title_lst = 'Land Surface Temperature (C)'
        units_lst = 'Celsius'

    else:
        title_lst = 'Land Surface Temperature (K)'
        units_lst = 'Kelvin'

    landsat8_metadata = Landsat8_MTL(mtl_file)
    source1_lst = landsat8_metadata.scene_id
    source2_lst = landsat8_metadata.origin

    # history entry
    run("r.support", map=lst_output, title=title_lst,
        units=units_lst, description=description_lst,
        source1=source1_lst, source2=source2_lst,
        history=history_lst)

    # (re)name the LST product
    #run("g.rename", rast=(tmp_lst, lst_output))

    # restore region
    if scene_extent:
        grass.del_temp_region()  # restoring previous region settings
        g.message("|! Original Region restored")

    # print citation
    if info:
        print '\nSource: ' + citation_lst
Пример #57
0
def get_session(options):
    """Based on a dictionary and available backends create a remote session"""
    requested_backend = options['backend']
    if requested_backend:
        backends = [requested_backend]
    else:
        # on win there is minimal chance of ssh but try anyway
        # pexpect only upon request, it is specific and insufficiently tested
        backends = ['paramiko', 'simple']
    session = None
    ensure_nones(options, ['port', 'password'])
    to_ints(options, ['port'])

    # TODO: provide a flag (or default) for reading the file or params
    # from some standardized location or variable (so we have shorter
    # command lines)
    config_name = options['config']
    if config_name:
        gscript.debug("Config file supplied for login")
        check_config_file(config_name)
        with open(config_name, 'r') as config_file:
            config = config_file.read()
            # split using whitespace
            # (supposing no spaces in user name and password)
            values = config.split()
            if len(values) == 2:
                gscript.verbose(_("Using values for login from config file"))
                options['user'] = values[0]
                options['password'] = values[1]
            else:
                gscript.fatal(_("The config file <%s> is not well-formed."
                                " It should contain user name and password"
                                " separated by whitespace"
                                " (newlines, spaces or tabs)" % config_name))

    # get access to wrappers
    from grass.pygrass.utils import set_path
    set_path('g.remote')

    for backend in backends:
        if backend == 'paramiko':
            try:
                from friendlyssh import Connection
                session = Connection(
                    username=options['user'], host=options['server'],
                    password=options['password'], port=options['port'])
                gscript.verbose(_("Using Paramiko backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried Paramiko backend but"
                                  " it is not available (%s)" % error))
                continue
        elif backend == 'simple':
            try:
                from simplessh import SshConnection as Connection
                # TODO: support password and port (or warn they are missing)
                session = Connection(
                    user=options['user'], host=options['server'])
                gscript.verbose(_("Using simple (ssh and scp) backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried simple (ssh and scp) backend but"
                                  " it is not available (%s)" % error))
                continue
        elif backend == 'pexpect':
            try:
                from pexpectssh import SshSession as Connection
                # TODO: support port (or warn it's missing)
                session = Connection(
                    user=options['user'], host=options['server'],
                    logfile='gcloudsshiface.log', verbose=1,
                    password=options['password'])
                gscript.verbose(_("Using Pexpect (with ssh and scp) backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried Pexpect (ssh, scp and pexpect)"
                                  " backend but it is not available"
                                  " (%s)" % error))
                continue
        elif backend == 'local':
            try:
                from localsession import LocalConnection as Connection
                session = Connection()
                gscript.verbose(_("Using local host backend"))
                break
            except ImportError as error:
                gscript.verbose(_("Tried local host"
                                  " backend but it is not available"
                                  " (%s)" % error))
                continue
    if session is None:
        hint = _("Please install Paramiko Python package"
                 " or ssh and scp tools.")
        verbose_message = _("Use --verbose flag to get more information.")
        if sys.platform.startswith('win'):
            platform_hint = _("Note that the ssh is generally not available"
                              " for MS Windows. Paramiko should be accessible"
                              " through python pip but you have to make it"
                              " available to GRASS GIS (or OSGeo4W) Python.")
        else:
            platform_hint = _("All should be in the software repositories."
                              " If Paramiko is not in the repository use pip.")
        gscript.fatal(_(
            "No backend available. {general_hint} {platform_hint}"
            " {verbose}").format(
                general_hint=hint, platform_hint=platform_hint,
                verbose=verbose_message))
    return session
Пример #58
0
def main():
    vector = options['map']
    layer = options['layer']
    column = options['column']
    value = options['value']
    qcolumn = options['query_column']
    where = options['where']
    sqlitefile = options['sqliteextra']

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    table = f['table']
    database = f['database']
    driver = f['driver']

    # check for SQLite backend for extra functions
    if sqlitefile and driver != "sqlite":
        grass.fatal(_("Use of libsqlitefunctions only with SQLite backend"))
    if driver == "sqlite" and sqlitefile:
        if not os.access(sqlitefile, os.R_OK):
            grass.fatal(_("File <%s> not found") % sqlitefile)

    # checking column types
    try:
        coltype = grass.vector_columns(vector, layer)[column]['type']
    except KeyError:
        grass.fatal(_('Column <%s> not found') % column)

    if qcolumn:
        if value:
            grass.fatal(_('<value> and <qcolumn> are mutually exclusive'))
        # special case: we copy from another column
        value = qcolumn
    else:
        if not value:
            grass.fatal(_('Either <value> or <qcolumn> must be given'))
        # we insert a value
        if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
            value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
        cmd += " WHERE " + where

    # SQLite: preload extra functions from extension lib if provided by user
    if sqlitefile:
        sqliteload = "SELECT load_extension('%s');\n" % sqlitefile
        cmd = sqliteload + cmd

    grass.verbose("SQL: \"%s\"" % cmd)
    grass.write_command('db.execute',
                        input='-',
                        database=database,
                        driver=driver,
                        stdin=cmd)

    # write cmd history:
    grass.vector_history(vector)

    return 0
Пример #59
0
def main():
    # old connection
    old_database = options['old_database']
    old_schema = options['old_schema']
    # new connection
    default_connection = grass.db_connection()
    if options['new_driver']:
        new_driver = options['new_driver']
    else:
        new_driver = default_connection['driver']
    if options['new_database']:
        new_database = options['new_database']
    else:
        new_database = default_connection['database']
    if options['new_schema']:
        new_schema = options['new_schema']
    else:
        new_schema = default_connection['schema']

    if old_database == '':
    	old_database = None
    old_database_subst = None
    if old_database is not None:
	old_database_subst = substitute_db(old_database)

    new_database_subst = substitute_db(new_database)
    
    if old_database_subst == new_database_subst and old_schema == new_schema:
	grass.fatal(_("Old and new database connection is identical. Nothing to do."))
    
    mapset = grass.gisenv()['MAPSET']
        
    vectors = grass.list_grouped('vect')[mapset]
    num_vectors = len(vectors)

    if flags['c']:
	# create new database if not existing
	create_db(new_driver, new_database)
    
    i = 0
    for vect in vectors:
        vect = "%s@%s" % (vect, mapset)
        i += 1
	grass.message(_("%s\nReconnecting vector map <%s> (%d of %d)...\n%s") % \
                          ('-' * 80, vect, i, num_vectors, '-' * 80))
        for f in grass.vector_db(vect, stderr = nuldev).itervalues():
            layer = f['layer']
            schema_table = f['table']
            key = f['key']
            database = f['database']
            driver = f['driver']
            
            # split schema.table
            if '.' in schema_table:
                schema, table = schema_table.split('.', 1)
            else:
                schema = ''
                table = schema_table
            
            if new_schema:
                new_schema_table = "%s.%s" % (new_schema, table)
            else:
                new_schema_table = table
            
            grass.debug("DATABASE = '%s' SCHEMA = '%s' TABLE = '%s' ->\n"
                        "      NEW_DATABASE = '%s' NEW_SCHEMA_TABLE = '%s'" % \
                            (old_database, schema, table, new_database, new_schema_table))

            do_reconnect = True
	    if old_database_subst is not None:
		if database != old_database_subst:
		    do_reconnect = False
	    if database == new_database_subst:
		do_reconnect = False
	    if schema != old_schema:
		do_reconnect = False
		
            if do_reconnect == True:
                grass.verbose(_("Reconnecting layer %d...") % layer)
                                          
                if flags['c']:
                    # check if table exists in new database
                    copy_tab(driver, database, schema_table,
                             new_driver, new_database, new_schema_table)
                
                # drop original table if required
                if flags['d']:
                    drop_tab(vect, layer, schema_table, driver, substitute_db(database))

                # reconnect tables (don't use substituted new_database)
		# NOTE: v.db.connect creates an index on the key column
                try:
                    grass.run_command('v.db.connect', flags = 'o', quiet = True, map = vect,
                                      layer = layer, driver = new_driver, database = new_database,
                                      table = new_schema_table, key = key)
                except CalledModuleError:
                    grass.warning(_("Unable to connect table <%s> to vector <%s> on layer <%s>") %
				  (table, vect, str(layer)))

            else:
		if database != new_database_subst:
		    grass.warning(_("Layer <%d> will not be reconnected because "
				    "database or schema do not match.") % layer)
	
    return 0
Пример #60
0
def main():
    global GISDBASE

    #-------------------------------------------------
    #------- GETTING PARAMETERS ----------------------
    #------ because of smalltalk migration, variable names
    #------ with mixed capitals are kept

    hRes = float(ensureopt('hres', 10))
    vRes = float(ensureopt('vres', 1))
    pointDist = float(ensureopt('pointdist', 20))
    endHeight = float(ensureopt('endheight', 100))
    depth = float(ensureopt('depth', 5))
    startHeight = float(ensureopt('startheight', 1000))
    basedtm = ensureopt('dtm', "...")
    limitRunLayers = int(ensureopt('limitrunlayers', 2000))
    chWidth = float(ensureopt('bottomwidth', 100))
    chMaxWidth = float(ensureopt('maxwidth', 300))
    # forced calculation
    sideRatio = float(chMaxWidth - chWidth) / 2 / depth
    reliefRes = float(ensureopt('dtmres', 10))
    linevector = ensureopt('linevector', '...')  #mandatory so given for sure
    workPath = TMPDIR  # for os.path.join(,)
    # In smalltalk original startCover was the line vector name
    # it will be better this way (basename):

    if len(options['basename']) < 1:
        startCover = linevector.split("@")[
            0]  # str(os.getpid())+"_r_trench_result"
    else:
        startCover = options['basename']

    #-------------------------------------------------
    #--------------END OF GETTING PARAMS--------------
    #-------------------------------------------------
    #-------------------------------------------------
    #-------------------------------------------------

    grass.run_command("g.region", vector=linevector, overwrite=True)
    grass.run_command("g.region",
                      n="n+1000",
                      w="w-1000",
                      e="e+1000",
                      s="s-1000",
                      res=hRes,
                      overwrite=True)
    grass.run_command("g.region",
                      save='l_work_' + startCover + '.region',
                      overwrite=True)
    grass.run_command("v.to.points",
                      input=linevector,
                      type="line",
                      output='l_' + startCover + "_points",
                      dmax=pointDist,
                      overwrite=True)

    filename = os.path.join(workPath, startCover + ".ascii")
    grass.run_command("v.out.ascii",
                      input='l_' + startCover + "_points",
                      layer=2,
                      type="point",
                      output=filename,
                      columns="cat,lcat,along",
                      format="point",
                      overwrite=True)

    lines = []
    inFile = open(filename, 'rU')
    for line in inFile.read().splitlines():
        lines.append(line.split("|"))
    inFile.close()

    length = float(lines[-1][4])

    grass.verbose("Path length: " + str(length))

    filename = os.path.join(workPath, startCover + '_' + 'profileXY.csv')
    grass.verbose("Profile: " + str(filename))

    outFile = open(filename, "w")
    for each in lines:
        tmp = (each[0]) + ',' + (each[1]) + "\n"
        outFile.write(tmp)
    outFile.close()

    # next line should be more exact because with full trapeziod a wider area is necessary.
    # actually, we don't know at this point how big the deepest cut will be !!! ???
    #
    grass.run_command('v.buffer',
                      overwrite=True,
                      input=linevector,
                      type="line",
                      output='l_' + startCover + '_maxbuf',
                      distance=str(float(chMaxWidth) / float(2)))
    grass.run_command('r.mask',
                      overwrite=True,
                      vector='l_' + startCover + '_maxbuf')
    grass.run_command('r.mapcalc',
                      expression='l_' + startCover + '_maxbuf = ' + basedtm,
                      overwrite=True)

    s = grass.read_command('r.univar',
                           overwrite=True,
                           map='l_' + startCover + '_maxbuf')
    kv = grass.parse_key_val(s, sep=':')
    maxH = float(kv['maximum'])
    maxH = maxH + vRes
    grass.verbose("Maximum height: " + str(maxH))

    grass.run_command('r.mask', flags="r")

    vLevels = int(round(((maxH - endHeight) / vRes))) + 2
    grass.verbose("Number of levels: " + str(vLevels))

    hSeries = []

    # WINDOWS???
    os.system('rm ' + os.path.join(workPath, 'l_*.pascii'))

    db = {}

    for n in range(1, vLevels):
        hSeries.append(round((n - 1) * vRes + endHeight))
        db[n] = []

    quo = (endHeight - startHeight) / length

    grass.verbose("Start height: " + str(startHeight))
    grass.verbose("End height: " + str(endHeight))
    grass.verbose("Slope ratio (in meters / meter): " + str(quo))

    for aLine in lines:
        tmp = (quo * float(aLine[4])) + startHeight
        level = int(round(((tmp - endHeight) / vRes) + 1))
        layer = hSeries[level - 1]  # python arrays run from 0
        #print "---------------"+str(aLine)+"  level: "+str(level)
        db[level].append([aLine[0], aLine[1], aLine[2], chWidth / 2])
        for levelUp in range(level + 1, vLevels):
            bufferWidth = ((chWidth / 2) +
                           ((levelUp - level) * vRes * sideRatio))
            if bufferWidth <= (chMaxWidth / 2):
                db[levelUp].append([aLine[0], aLine[1], aLine[2], bufferWidth])

    for aKey in db:
        #print "---------------"+str(aKey)
        filename = os.path.join(
            workPath, 'l_' + startCover + '_' + str(aKey).zfill(5) + '.pascii')
        outFile = open(filename, "w")
        for each in db[aKey]:
            tmp = str(each[0]) + '|' + str(each[1]) + '|' + str(
                each[2]) + '|' + str(each[3]) + "\n"
            outFile.write(tmp)
        outFile.close()

    grass.run_command('g.region', region='l_work_' + startCover + '.region')
    grass.run_command('g.region', res=str(hRes))

    #creating buffer for raster masking
    grass.run_command('v.buffer',
                      overwrite=True,
                      input=linevector,
                      type='line',
                      output='l_' + startCover + '_buf200',
                      distance=200)
    grass.run_command('r.mask',
                      overwrite=True,
                      vector='l_' + startCover + '_buf200')

    for n in range(1, min(vLevels, limitRunLayers)):
        if len(db[n]) > 0:
            basename = 'l_' + startCover + '_' + str(n).zfill(5)
            grass.run_command(
                'v.in.ascii',
                flags="n",
                overwrite=True,
                input=os.path.join(workPath, basename + '.pascii'),
                output=basename,
                columns=
                "x double precision, y double precision, cat int, width double precision",
                cat=3)
            grass.run_command('v.buffer',
                              flags="t",
                              overwrite=True,
                              input=basename,
                              layer=1,
                              type="point",
                              output=basename + '_buf',
                              column="width",
                              tolerance=0.01)
            grass.run_command('v.db.addcolumn',
                              map=basename + '_buf',
                              col='level int')
            grass.run_command('v.db.update',
                              map=basename + '_buf',
                              column="level",
                              value=str(hSeries[n - 1]))
            grass.run_command('v.to.rast',
                              overwrite=True,
                              input=basename + '_buf',
                              type="area",
                              output=basename + '_buf_diss',
                              use="attr",
                              attribute_column="level")

    #CALCULATING FINAL RESULT

    grass.run_command('r.mask', flags='r')
    grass.run_command('g.region', region='l_work_' + startCover + '.region')
    grass.run_command('g.region', res=str(hRes))
    grass.run_command('r.mapcalc',
                      expression='source = ' + basedtm,
                      overwrite=True)

    for n in range(1, min(vLevels, limitRunLayers)):
        if len(db[n]) > 0:
            basename = 'l_' + startCover + '_' + str(n).zfill(5)
            grass.verbose("Applying: " + basename)
            grass.run_command('r.mapcalc',
                              expression='temp = if (isnull(' + basename +
                              '_buf_diss),source,if ( ' + basename +
                              '_buf_diss < source , ' + basename +
                              '_buf_diss, source))',
                              overwrite=True)
            grass.run_command('g.rename', overwrite=True, raster='temp,source')
    grass.run_command('r.mapcalc',
                      expression='dtm_' + startCover +
                      ' = if (isnull(source),' + basedtm + ',source)',
                      overwrite=True)
    grass.run_command('r.colors', map='dtm_' + startCover, color='bgyr')
    grass.run_command('g.region', res=str(reliefRes))
    grass.run_command('r.relief',
                      overwrite=True,
                      input='dtm_' + startCover,
                      output='dtm_' + startCover + '_shaded',
                      altitude=60,
                      azimuth=45)

    grass.verbose("Calculating volume difference")

    grass.run_command('g.region', raster='dtm_' + startCover)
    grass.run_command('g.region', res=str(hRes))
    grass.run_command('r.mask',
                      overwrite=True,
                      vector='l_' + startCover + '_buf200')
    grass.run_command('r.mapcalc',
                      overwrite=True,
                      expression='diff_' + startCover + ' = ' + basedtm +
                      ' - dtm_' + startCover)

    s = grass.read_command('r.univar',
                           overwrite=True,
                           map='diff_' + startCover)
    kv = grass.parse_key_val(s, sep=':')
    sum = float(kv['sum'])

    grass.run_command('r.mask', flags="r")

    # WRITE LOG FILE

    filename = startCover + ".txt"

    s = grass.read_command('g.region', flags="p3")
    kv = grass.parse_key_val(s, sep=':')

    xres = float(kv['nsres'])
    yres = float(kv['ewres'])
    m3 = xres * yres * sum
    mt = m3 * 2.7 * 1000
    liebherr = mt / 350
    visontaev = mt / 1000 / 4200

    outFile = open(filename, "w")

    tmp = []
    tmp.append("Path: " + linevector + " >> " + startCover + "\n")
    tmp.append("M3: " + str(m3) + "\n")
    tmp.append("Limestone tons: " + str(mt) + "\n")
    tmp.append("Kt limestone: " + str(mt / 1000.0) + "\n")
    tmp.append("Liebherr T 282B: " + str(liebherr) + "\n")
    tmp.append("Visonta year " + str(visontaev) + "\n")

    for each in tmp:
        grass.message(each)
        outFile.write(each)

    outFile.close()

    grass.run_command('g.remove',
                      flags="f",
                      type="all",
                      pattern='l_*' + startCover + '*')

    return 0