コード例 #1
0
ファイル: v.db.renamecolumn.py プロジェクト: caomw/grass
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']

    mapset = grass.gisenv()['MAPSET']

    if not grass.find_file(map, element = 'vector', mapset = mapset):
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    f = grass.vector_layer_db(map, layer)

    table = f['table']
    keycol = f['key']
    database = f['database']
    driver = f['driver']

    if not table:
        grass.fatal(_("There is no table connected to the input vector map. Cannot rename any column"))

    cols = column.split(',')
    oldcol = cols[0]
    newcol = cols[1]

    if driver == "dbf":
        if len(newcol) > 10:
            grass.fatal(_("Column name <%s> too long. The DBF driver supports column names not longer than 10 characters") % newcol)

    if oldcol == keycol:
        grass.fatal(_("Cannot rename column <%s> as it is needed to keep table <%s> connected to the input vector map") % (oldcol, table))

    # describe old col
    oldcoltype = None
    for f in grass.db_describe(table)['cols']:
        if f[0] != oldcol:
            continue
        oldcoltype = f[1]
        oldcollength = f[2]

    # old col there?
    if not oldcoltype:
        grass.fatal(_("Column <%s> not found in table <%s>") % (oldcol, table))

    # some tricks
    if driver in ['sqlite', 'dbf']:
        if oldcoltype.upper() == "CHARACTER":
            colspec = "%s varchar(%s)" % (newcol, oldcollength)
        else:
            colspec = "%s %s" % (newcol, oldcoltype)

        grass.run_command('v.db.addcolumn', map = map, layer = layer, column = colspec)
        sql = "UPDATE %s SET %s=%s" % (table, newcol, oldcol)
        grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql)
        grass.run_command('v.db.dropcolumn', map = map, layer = layer, column = oldcol)
    else:
        sql = "ALTER TABLE %s RENAME %s TO %s" % (table, oldcol, newcol)
        grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = sql)

    # write cmd history:
    grass.vector_history(map)
コード例 #2
0
ファイル: v.db.join.py プロジェクト: AsherBond/MondocosmOS
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['otable']
    ocolumn = options['ocolumn']

    f = grass.vector_layer_db(map, layer)

    maptable = f['table']
    database = f['database']
    driver = f['driver']

    if driver == 'dbf':
	grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
	grass.fatal(_("There is no table connected to this map. Unable to join any column."))

    if not grass.vector_columns(map, layer).has_key(column):
	grass.fatal(_("Column <%s> not found in table <%s> at layer <%s>") % (column, map, layer))

    all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols']
    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in all_cols_ot:
	# Skip the vector column which is used for join
	colname = col[0]
	if colname == column:
	    continue
	# Sqlite 3 does not support the precision number any more
	if len(col) > 2 and driver != "sqlite":
	    coltype = "%s(%s)" % (col[1], col[2])
	else:
	    coltype = "%s" % col[1]

	colspec = "%s %s" % (colname, coltype)

	# Add only the new column to the table
	if colname not in all_cols_tt:
	    if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0:
	        grass.fatal(_("Error creating column <%s>") % colname)

	stmt = template.substitute(table = maptable, column = column,
				   otable = otable, ocolumn = ocolumn,
				   colname = colname)

        grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map))
	if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0:
	    grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history:
    grass.vector_history(map)
コード例 #3
0
def main():
    force = flags['f']
    map = options['map']
    table = options['table']
    layer = options['layer']

    # do some paranoia tests as well:
    f = grass.vector_layer_db(map, layer)
    
    if not table:
	# Removing table name connected to selected layer
	table = f['table']
	if not table:
	    grass.fatal(_("No table assigned to layer <%s>") % layer)
    else:
	# Removing user specified table
	existingtable = f['table']
	if existingtable != table:
	    grass.fatal(_("User selected table <%s> but the table <%s> is linked to layer <%s>")
			% (table, existingtable, layer))

    # we use the DB settings selected layer 
    database = f['database']
    driver = f['driver']

    grass.message(_("Removing table <%s> linked to layer <%s> of vector map <%s>")
		  % (table, layer, map)) 

    if not force:
	grass.message(_("You must use the -f (force) flag to actually remove the table. Exiting."))
	grass.message(_("Leaving map/table unchanged."))
	sys.exit(0)

    grass.message(_("Dropping table <%s>...") % table)

    try:
        grass.write_command('db.execute', stdin="DROP TABLE %s" % table,
                            input='-', database=database, driver=driver)
    except CalledModuleError:
        grass.fatal(_("An error occurred while running db.execute"))

    grass.run_command('v.db.connect', flags = 'd', map = map, layer = layer)

    grass.message(_("Current attribute table link(s):")) 
    # silently test first to avoid confusing error messages
    nuldev = file(os.devnull, 'w')
    try:
        grass.run_command('v.db.connect', flags='p', map=map, quiet=True,
                          stdout=nuldev, stderr=nuldev)
    except CalledModuleError:
        grass.message(_("(No database links remaining)"))
    else:
        grass.run_command('v.db.connect', flags='p', map=map)

    # write cmd history:
    grass.vector_history(map)
コード例 #4
0
ファイル: v.db.update.py プロジェクト: AsherBond/MondocosmOS
def main():
    vector = options['map']
    layer = options['layer']
    column = options['column']
    value = options['value']
    qcolumn = options['qcolumn']
    where = options['where']

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element = 'vector', mapset = mapset)['file']:
	grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
	grass.fatal(_('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'))

    table = f['table']
    database = f['database']
    driver = f['driver']

    # checking column types
    try:
        coltype = grass.vector_columns(vector, layer)[column]['type']
    except KeyError:
	grass.fatal(_('Column <%s> not found') % column)

    if qcolumn:
	if value:
	    grass.fatal(_('<value> and <qcolumn> are mutually exclusive'))
	# special case: we copy from another column
	value = qcolumn
    else:
	if not value:
	    grass.fatal(_('Either <value> or <qcolumn> must be given'))
	# we insert a value
	if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
	    value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
	cmd += " WHERE " + where

    grass.verbose("SQL: \"%s\"" % cmd)

    grass.write_command('db.execute', input = '-', database = database, driver = driver, stdin = cmd)

    # write cmd history:
    grass.vector_history(vector)

    return 0
コード例 #5
0
ファイル: v.dissolve.py プロジェクト: GRASS-GIS/grass-ci
def main():
    global output, tmp

    input = options['input']
    output = options['output']
    layer = options['layer']
    column = options['column']

    # setup temporary file
    tmp = str(os.getpid())

    # does map exist?
    if not grass.find_file(input, element='vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % input)

    if not column:
        grass.warning(
            _("No '%s' option specified. Dissolving based on category values from layer <%s>.") %
            ("column", layer))
        grass.run_command('v.extract', flags='d', input=input,
                          output=output, type='area', layer=layer)
    else:
        if int(layer) == -1:
            grass.warning(_("Invalid layer number (%d). "
                            "Parameter '%s' specified, assuming layer '1'.") %
                          (int(layer), 'column'))
            layer = '1'
        try:
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_('Column <%s> not found') % column)

        if coltype['type'] not in ('INTEGER', 'SMALLINT', 'CHARACTER', 'TEXT'):
            grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

        table = f['table']

        tmpfile = '%s_%s' % (output, tmp)

        try:
            grass.run_command('v.reclass', input=input, output=tmpfile,
                              layer=layer, column=column)
            grass.run_command('v.extract', flags='d', input=tmpfile,
                              output=output, type='area', layer=layer)
        except CalledModuleError as e:
            grass.fatal(_("Final extraction steps failed."
                          " Check above error messages and"
                          " see following details:\n%s") % e)

    # write cmd history:
    grass.vector_history(output)
コード例 #6
0
ファイル: v.db.droprow.py プロジェクト: AsherBond/MondocosmOS
def main():
    # delete vectors via reverse selection
    ret = grass.run_command('v.extract',
                            flags = 'r',
                            input = options['input'], layer = options['layer'],
                            output = options['output'], where = options['where'])
    if ret != 0:
        return 1

    # write cmd history:
    grass.vector_history(map = options['output'])

    return 0
コード例 #7
0
ファイル: v.db.droprow.py プロジェクト: rashadkm/grass_cmake
def main():
    # delete vectors via reverse selection
    try:
        grass.run_command('v.extract',
                          flags='r',
                          input=options['input'], layer=options['layer'],
                          output=options['output'], where=options['where'])
    except CalledModuleError:
        return 1

    # write cmd history:
    grass.vector_history(map = options['output'])

    return 0
コード例 #8
0
ファイル: v.db.addcolumn.py プロジェクト: GRASS-GIS/grass-ci
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns']
    columns = [col.strip() for col in columns.split(',')]

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    exists = bool(grass.find_file(map, element='vector', mapset=mapset)['file'])

    if not exists:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    try:
        f = grass.vector_db(map)[int(layer)]
    except KeyError:
        grass.fatal(
            _("There is no table connected to this map. Run v.db.connect or v.db.addtable first."))

    table = f['table']
    database = f['database']
    driver = f['driver']
    column_existing = grass.vector_columns(map, int(layer)).keys()

    for col in columns:
        if not col:
            grass.fatal(_("There is an empty column. Did you leave a trailing comma?"))
        col_name = col.split(' ')[0].strip()
        if col_name in column_existing:
            grass.error(_("Column <%s> is already in the table. Skipping.") % col_name)
            continue
        grass.verbose(_("Adding column <%s> to the table") % col_name)
        p = grass.feed_command('db.execute', input='-', database=database, driver=driver)
        p.stdin.write("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        grass.debug("ALTER TABLE %s ADD COLUMN %s" % (table, col))
        p.stdin.close()
        if p.wait() != 0:
            grass.fatal(_("Unable to add column <%s>.") % col)

    # write cmd history:
    grass.vector_history(map)
コード例 #9
0
ファイル: v.dissolve.py プロジェクト: AsherBond/MondocosmOS
def main():
    global output, tmp

    input = options['input']
    output = options['output']
    layer = options['layer']
    column = options['column']

    #### setup temporary file
    tmp = str(os.getpid())

    # does map exist?
    if not grass.find_file(input, element = 'vector')['file']:
	grass.fatal(_("Vector map <%s> not found") % input)
    
    if not column:
	grass.run_command('v.extract', flags = 'd', input = input,
			  output = output, type = 'area', layer = layer)
    else:
        try:
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_('Column <%s> not found') % column)
        
	if coltype['type'] not in ('INTEGER', 'CHARACTER'):
	    grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

	table = f['table']

	tmpfile = '%s_%s' % (output, tmp)

	if grass.run_command('v.reclass', input = input, output = tmpfile,
                             layer = layer, column = column) == 0:
            grass.run_command('v.extract', flags = 'd', input = tmpfile,
                              output = output, type = 'area', layer = layer)

    # write cmd history:
    grass.vector_history(output)
コード例 #10
0
ファイル: v.dissolve.py プロジェクト: hellik/grass
def main():
    global output, tmp

    input = options["input"]
    output = options["output"]
    layer = options["layer"]
    column = options["column"]

    # setup temporary file
    tmp = str(os.getpid())

    # does map exist?
    if not grass.find_file(input, element="vector")["file"]:
        grass.fatal(_("Vector map <%s> not found") % input)

    if not column:
        grass.warning(
            _("No '%s' option specified. Dissolving based on category values from layer <%s>."
              ) % ("column", layer))
        grass.run_command("v.extract",
                          flags="d",
                          input=input,
                          output=output,
                          type="area",
                          layer=layer)
    else:
        if int(layer) == -1:
            grass.warning(
                _("Invalid layer number (%d). "
                  "Parameter '%s' specified, assuming layer '1'.") %
                (int(layer), "column"))
            layer = "1"
        try:
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_("Column <%s> not found") % column)

        if coltype["type"] not in ("INTEGER", "SMALLINT", "CHARACTER", "TEXT"):
            grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

        table = f["table"]

        tmpfile = "%s_%s" % (output, tmp)

        try:
            grass.run_command("v.reclass",
                              input=input,
                              output=tmpfile,
                              layer=layer,
                              column=column)
            grass.run_command(
                "v.extract",
                flags="d",
                input=tmpfile,
                output=output,
                type="area",
                layer=layer,
            )
        except CalledModuleError as e:
            grass.fatal(
                _("Final extraction steps failed."
                  " Check above error messages and"
                  " see following details:\n%s") % e)

    # write cmd history:
    grass.vector_history(output)
コード例 #11
0
def main():
    force = flags["f"]
    map = options["map"]
    table = options["table"]
    layer = options["layer"]

    # We check for existence of the map in the current mapset before
    # doing any other operation.
    info = gscript.find_file(map, element="vector", mapset=".")
    if not info["file"]:
        mapset = gscript.gisenv()["MAPSET"]
        # Message is formulated in the way that it does not mislead
        # in case where a map of the same name is in another mapset.
        gscript.fatal(
            _("Vector map <{name}> not found"
              " in the current mapset ({mapset})").format(name=map,
                                                          mapset=mapset))

    # do some paranoia tests as well:
    f = gscript.vector_layer_db(map, layer)

    if not table:
        # Removing table name connected to selected layer
        table = f["table"]
        if not table:
            gscript.fatal(_("No table assigned to layer <%s>") % layer)
    else:
        # Removing user specified table
        existingtable = f["table"]
        if existingtable != table:
            gscript.fatal(
                _("User selected table <%s> but the table <%s> "
                  "is linked to layer <%s>") % (table, existingtable, layer))

    # we use the DB settings selected layer
    database = f["database"]
    driver = f["driver"]

    gscript.message(
        _("Removing table <%s> linked to layer <%s> of vector"
          " map <%s>") % (table, layer, map))

    if not force:
        gscript.message(
            _("You must use the -f (force) flag to actually "
              "remove the table. Exiting."))
        gscript.message(_("Leaving map/table unchanged."))
        sys.exit(0)

    gscript.message(_("Dropping table <%s>...") % table)

    try:
        gscript.write_command(
            "db.execute",
            stdin="DROP TABLE %s" % table,
            input="-",
            database=database,
            driver=driver,
        )
    except CalledModuleError:
        gscript.fatal(_("An error occurred while running db.execute"))

    gscript.run_command("v.db.connect", flags="d", map=map, layer=layer)

    gscript.message(_("Current attribute table link(s):"))
    # silently test first to avoid confusing error messages
    nuldev = open(os.devnull, "w")
    try:
        gscript.run_command("v.db.connect",
                            flags="p",
                            map=map,
                            quiet=True,
                            stdout=nuldev,
                            stderr=nuldev)
    except CalledModuleError:
        gscript.message(_("(No database links remaining)"))
    else:
        gscript.run_command("v.db.connect", flags="p", map=map)

    # write cmd history:
    gscript.vector_history(map)
コード例 #12
0
ファイル: v.db.join.py プロジェクト: hellik/grass
def main():
    map = options["map"]
    layer = options["layer"]
    column = options["column"]
    otable = options["other_table"]
    ocolumn = options["other_column"]
    if options["subset_columns"]:
        scolumns = options["subset_columns"].split(",")
    else:
        scolumns = None

    try:
        f = grass.vector_layer_db(map, layer)
    except CalledModuleError:
        sys.exit(1)

    maptable = f["table"]
    database = f["database"]
    driver = f["driver"]

    if driver == "dbf":
        grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
        grass.fatal(
            _("There is no table connected to this map. Unable to join any column."
              ))

    # check if column is in map table
    if column not in grass.vector_columns(map, layer):
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver=driver,
                                    database=database)["cols"]

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
        grass.fatal(
            _("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
        # check if scolumns exists in the other table
        for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(
                    _("Column <%s> not found in table <%s>") % (scol, otable))

    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
        # skip the vector column which is used for join
        colname = col[0]
        if colname == column:
            continue

        use_len = False
        if len(col) > 2:
            use_len = True
            # Sqlite 3 does not support the precision number any more
            if driver == "sqlite":
                use_len = False
            # MySQL - expect format DOUBLE PRECISION(M,D), see #2792
            elif driver == "mysql" and col[1] == "DOUBLE PRECISION":
                use_len = False

        if use_len:
            coltype = "%s(%s)" % (col[1], col[2])
        else:
            coltype = "%s" % col[1]

        colspec = "%s %s" % (colname, coltype)

        # add only the new column to the table
        if colname not in all_cols_tt:
            try:
                grass.run_command("v.db.addcolumn",
                                  map=map,
                                  columns=colspec,
                                  layer=layer)
            except CalledModuleError:
                grass.fatal(_("Error creating column <%s>") % colname)

        stmt = template.substitute(
            table=maptable,
            column=column,
            otable=otable,
            ocolumn=ocolumn,
            colname=colname,
        )
        grass.debug(stmt, 1)
        grass.verbose(
            _("Updating column <%s> of vector map <%s>...") % (colname, map))
        try:
            grass.write_command("db.execute",
                                stdin=stmt,
                                input="-",
                                database=database,
                                driver=driver)
        except CalledModuleError:
            grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
コード例 #13
0
def main():
    inputraster = options['input']
    number_lines = int(options['number_lines'])
    edge_detection_algorithm = options['edge_detection']
    no_edge_friction = int(options['no_edge_friction'])
    lane_border_multiplier = int(options['lane_border_multiplier'])
    min_tile_size = None
    if options['min_tile_size']:
        min_tile_size = float(options['min_tile_size'])
    existing_cutlines = None
    if options['existing_cutlines']:
        existing_cutlines = options['existing_cutlines'].split(',')
    tiles = options['output']
    memory = int(options['memory'])
    tiled = False

    if options['tile_width']:
        tiled = True
        gscript.message(_("Using tiles processing for edge detection"))
        width = int(options['tile_width'])
        height = int(options['tile_height'])
        overlap = int(options['overlap'])

    processes = int(options['processes'])

    global temp_maps
    temp_maps = []
    r = 'raster'
    v = 'vector'

    if existing_cutlines:
        existingcutlinesmap = 'temp_icutlines_existingcutlinesmap_%i' % os.getpid(
        )
        if len(existing_cutlines) > 1:
            gscript.run_command('v.patch',
                                input_=existing_cutlines,
                                output=existingcutlinesmap,
                                quiet=True,
                                overwrite=True)
            existing_cutlines = existingcutlinesmap

        gscript.run_command('v.to.rast',
                            input_=existing_cutlines,
                            output=existingcutlinesmap,
                            use='val',
                            type_='line,boundary',
                            overwrite=True,
                            quiet=True)

        temp_maps.append([existingcutlinesmap, r])

    temp_edge_map = "temp_icutlines_edgemap_%d" % os.getpid()
    temp_maps.append([temp_edge_map, r])

    gscript.message(
        _("Creating edge map using <%s> edgedetection algorithm") %
        edge_detection_algorithm)
    if edge_detection_algorithm == 'zc':
        kwargs = {
            'input': inputraster,
            'output': temp_edge_map,
            'width_': int(options['zc_width']),
            'threshold': float(options['zc_threshold']),
            'quiet': True
        }

        if tiled:
            grd = GridModule('i.zc',
                             width=width,
                             height=height,
                             overlap=overlap,
                             processes=processes,
                             split=False,
                             **kwargs)
            grd.run()
        else:
            gscript.run_command('i.zc', **kwargs)

    elif edge_detection_algorithm == 'canny':
        if not gscript.find_program('i.edge', '--help'):
            message = _("You need to install the addon i.edge to use ")
            message += _("the Canny edge detector.\n")
            message += _(
                " You can install the addon with 'g.extension i.edge'")
            gscript.fatal(message)

        kwargs = {
            'input': inputraster,
            'output': temp_edge_map,
            'low_threshold': float(options['canny_low_threshold']),
            'high_threshold': float(options['canny_high_threshold']),
            'sigma': float(options['canny_sigma']),
            'quiet': True
        }

        if tiled:
            grd = GridModule('i.edge',
                             width=width,
                             height=height,
                             overlap=overlap,
                             processes=processes,
                             split=False,
                             flags='n',
                             **kwargs)
            grd.run()
        else:
            gscript.run_command('i.edge', flags='n', **kwargs)

    else:
        gscript.fatal(
            "Only zero-crossing and Canny available as edge detection algorithms."
        )

    region = gscript.region()
    gscript.message(_("Finding cutlines in both directions"))

    nsrange = float(region.n - region.s - region.nsres)
    ewrange = float(region.e - region.w - region.ewres)

    if nsrange > ewrange:
        hnumber_lines = number_lines
        vnumber_lines = max(int(number_lines * (ewrange / nsrange)), 1)
    else:
        vnumber_lines = number_lines
        hnumber_lines = max(int(number_lines * (nsrange / ewrange)), 1)

    # Create the lines in horizonal direction
    nsstep = float(region.n - region.s - region.nsres) / hnumber_lines
    hpointsy = [((region.n - i * nsstep) - region.nsres / 2.0)
                for i in range(0, hnumber_lines + 1)]
    hlanepointsy = [y - nsstep / 2.0 for y in hpointsy]
    hstartpoints = listzip([region.w + 0.2 * region.ewres] * len(hpointsy),
                           hpointsy)
    hstoppoints = listzip([region.e - 0.2 * region.ewres] * len(hpointsy),
                          hpointsy)
    hlanestartpoints = listzip([region.w + 0.2 * region.ewres] *
                               len(hlanepointsy), hlanepointsy)
    hlanestoppoints = listzip([region.e - 0.2 * region.ewres] *
                              len(hlanepointsy), hlanepointsy)

    hlanemap = 'temp_icutlines_hlanemap_%i' % os.getpid()
    temp_maps.append([hlanemap, v])
    temp_maps.append([hlanemap, r])

    os.environ['GRASS_VERBOSE'] = '0'
    new = VectorTopo(hlanemap)
    new.open('w')
    for line in listzip(hlanestartpoints, hlanestoppoints):
        new.write(geom.Line(line), cat=1)
    new.close()
    del os.environ['GRASS_VERBOSE']

    gscript.run_command('v.to.rast',
                        input_=hlanemap,
                        output=hlanemap,
                        use='val',
                        type_='line',
                        overwrite=True,
                        quiet=True)

    hbasemap = 'temp_icutlines_hbasemap_%i' % os.getpid()
    temp_maps.append([hbasemap, r])

    # Building the cost maps using the following logic
    # - Any pixel not on an edge, nor on an existing cutline gets a
    # no_edge_friction cost, or no_edge_friction_cost x 10  if there are
    # existing cutlines
    # - Any pixel on an edge gets a cost of 1 if there are no existing cutlines,
    # and a cost of no_edge_friction if there are
    # - A lane line gets a very high cost (lane_border_multiplier x cost of no
    # edge pixel - the latter depending on the existence of cutlines).

    mapcalc_expression = "%s = " % hbasemap
    mapcalc_expression += "if(isnull(%s), " % hlanemap
    if existing_cutlines:
        mapcalc_expression += "if(%s == 0 && isnull(%s), " % (
            temp_edge_map, existingcutlinesmap)
        mapcalc_expression += "%i, " % (no_edge_friction * 10)
        mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap,
                                                           no_edge_friction)
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction * 10)
    else:
        mapcalc_expression += "if(%s == 0, " % temp_edge_map
        mapcalc_expression += "%i, " % no_edge_friction
        mapcalc_expression += "1), "
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction)
    gscript.run_command('r.mapcalc',
                        expression=mapcalc_expression,
                        quiet=True,
                        overwrite=True)

    hcumcost = 'temp_icutlines_hcumcost_%i' % os.getpid()
    temp_maps.append([hcumcost, r])
    hdir = 'temp_icutlines_hdir_%i' % os.getpid()
    temp_maps.append([hdir, r])

    # Create the lines in vertical direction
    ewstep = float(region.e - region.w - region.ewres) / vnumber_lines
    vpointsx = [((region.e - i * ewstep) - region.ewres / 2.0)
                for i in range(0, vnumber_lines + 1)]
    vlanepointsx = [x + ewstep / 2.0 for x in vpointsx]
    vstartpoints = listzip(vpointsx,
                           [region.n - 0.2 * region.nsres] * len(vpointsx))
    vstoppoints = listzip(vpointsx,
                          [region.s + 0.2 * region.nsres] * len(vpointsx))
    vlanestartpoints = listzip(vlanepointsx, [region.n - 0.2 * region.nsres] *
                               len(vlanepointsx))
    vlanestoppoints = listzip(vlanepointsx, [region.s + 0.2 * region.nsres] *
                              len(vlanepointsx))

    vlanemap = 'temp_icutlines_vlanemap_%i' % os.getpid()
    temp_maps.append([vlanemap, v])
    temp_maps.append([vlanemap, r])

    os.environ['GRASS_VERBOSE'] = '0'
    new = VectorTopo(vlanemap)
    new.open('w')
    for line in listzip(vlanestartpoints, vlanestoppoints):
        new.write(geom.Line(line), cat=1)
    new.close()
    del os.environ['GRASS_VERBOSE']

    gscript.run_command('v.to.rast',
                        input_=vlanemap,
                        output=vlanemap,
                        use='val',
                        type_='line',
                        overwrite=True,
                        quiet=True)

    vbasemap = 'temp_icutlines_vbasemap_%i' % os.getpid()
    temp_maps.append([vbasemap, r])
    mapcalc_expression = "%s = " % vbasemap
    mapcalc_expression += "if(isnull(%s), " % vlanemap
    if existing_cutlines:
        mapcalc_expression += "if(%s == 0 && isnull(%s), " % (
            temp_edge_map, existingcutlinesmap)
        mapcalc_expression += "%i, " % (no_edge_friction * 10)
        mapcalc_expression += "if(isnull(%s), %s, 1))," % (existingcutlinesmap,
                                                           no_edge_friction)
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction * 10)
    else:
        mapcalc_expression += "if(%s == 0, " % temp_edge_map
        mapcalc_expression += "%i, " % no_edge_friction
        mapcalc_expression += "1), "
        mapcalc_expression += "%i)" % (lane_border_multiplier *
                                       no_edge_friction)
    gscript.run_command('r.mapcalc',
                        expression=mapcalc_expression,
                        quiet=True,
                        overwrite=True)

    vcumcost = 'temp_icutlines_vcumcost_%i' % os.getpid()
    temp_maps.append([vcumcost, r])
    vdir = 'temp_icutlines_vdir_%i' % os.getpid()
    temp_maps.append([vdir, r])

    if processes > 1:
        pmemory = memory / 2.0
        rcv = gscript.start_command('r.cost',
                                    input_=vbasemap,
                                    startcoordinates=vstartpoints,
                                    stopcoordinates=vstoppoints,
                                    output=vcumcost,
                                    outdir=vdir,
                                    memory=pmemory,
                                    quiet=True,
                                    overwrite=True)

        rch = gscript.start_command('r.cost',
                                    input_=hbasemap,
                                    startcoordinates=hstartpoints,
                                    stopcoordinates=hstoppoints,
                                    output=hcumcost,
                                    outdir=hdir,
                                    memory=pmemory,
                                    quiet=True,
                                    overwrite=True)
        rcv.wait()
        rch.wait()

    else:
        gscript.run_command('r.cost',
                            input_=vbasemap,
                            startcoordinates=vstartpoints,
                            stopcoordinates=vstoppoints,
                            output=vcumcost,
                            outdir=vdir,
                            memory=memory,
                            quiet=True,
                            overwrite=True)

        gscript.run_command('r.cost',
                            input_=hbasemap,
                            startcoordinates=hstartpoints,
                            stopcoordinates=hstoppoints,
                            output=hcumcost,
                            outdir=hdir,
                            memory=memory,
                            quiet=True,
                            overwrite=True)

    hlines = 'temp_icutlines_hlines_%i' % os.getpid()
    temp_maps.append([hlines, r])
    vlines = 'temp_icutlines_vlines_%i' % os.getpid()
    temp_maps.append([vlines, r])

    if processes > 1:
        rdh = gscript.start_command('r.drain',
                                    input_=hcumcost,
                                    direction=hdir,
                                    startcoordinates=hstoppoints,
                                    output=hlines,
                                    flags='d',
                                    quiet=True,
                                    overwrite=True)

        rdv = gscript.start_command('r.drain',
                                    input_=vcumcost,
                                    direction=vdir,
                                    startcoordinates=vstoppoints,
                                    output=vlines,
                                    flags='d',
                                    quiet=True,
                                    overwrite=True)

        rdh.wait()
        rdv.wait()

    else:
        gscript.run_command('r.drain',
                            input_=hcumcost,
                            direction=hdir,
                            startcoordinates=hstoppoints,
                            output=hlines,
                            flags='d',
                            quiet=True,
                            overwrite=True)

        gscript.run_command('r.drain',
                            input_=vcumcost,
                            direction=vdir,
                            startcoordinates=vstoppoints,
                            output=vlines,
                            flags='d',
                            quiet=True,
                            overwrite=True)

    # Combine horizonal and vertical lines
    temp_raster_tile_borders = 'temp_icutlines_raster_tile_borders_%i' % os.getpid(
    )
    temp_maps.append([temp_raster_tile_borders, r])
    gscript.run_command('r.patch',
                        input_=[hlines, vlines],
                        output=temp_raster_tile_borders,
                        quiet=True,
                        overwrite=True)

    gscript.message(_("Creating vector polygons"))

    # Create vector polygons

    # First we need to shrink the region a bit to make sure that all vector
    # points / lines fall within the raster
    gscript.use_temp_region()
    gscript.run_command('g.region',
                        s=region.s + region.nsres,
                        e=region.e - region.ewres,
                        quiet=True)

    region_map = 'temp_icutlines_region_map_%i' % os.getpid()
    temp_maps.append([region_map, v])
    temp_maps.append([region_map, r])
    gscript.run_command('v.in.region',
                        output=region_map,
                        type_='line',
                        quiet=True,
                        overwrite=True)

    gscript.del_temp_region()

    gscript.run_command('v.to.rast',
                        input_=region_map,
                        output=region_map,
                        use='val',
                        type_='line',
                        quiet=True,
                        overwrite=True)

    temp_raster_polygons = 'temp_icutlines_raster_polygons_%i' % os.getpid()
    temp_maps.append([temp_raster_polygons, r])
    gscript.run_command('r.patch',
                        input_=[temp_raster_tile_borders, region_map],
                        output=temp_raster_polygons,
                        quiet=True,
                        overwrite=True)

    temp_raster_polygons_thin = 'temp_icutlines_raster_polygons_thin_%i' % os.getpid(
    )
    temp_maps.append([temp_raster_polygons_thin, r])
    gscript.run_command('r.thin',
                        input_=temp_raster_polygons,
                        output=temp_raster_polygons_thin,
                        quiet=True,
                        overwrite=True)

    # Create a series of temporary map names as we have to go
    # through several steps until we reach the final map.
    temp_vector_polygons1 = 'temp_icutlines_vector_polygons1_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons1, v])
    temp_vector_polygons2 = 'temp_icutlines_vector_polygons2_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons2, v])
    temp_vector_polygons3 = 'temp_icutlines_vector_polygons3_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons3, v])
    temp_vector_polygons4 = 'temp_icutlines_vector_polygons4_%i' % os.getpid()
    temp_maps.append([temp_vector_polygons4, v])

    gscript.run_command('r.to.vect',
                        input_=temp_raster_polygons_thin,
                        output=temp_vector_polygons1,
                        type_='line',
                        flags='t',
                        quiet=True,
                        overwrite=True)

    # Erase all category values from the lines
    gscript.run_command('v.category',
                        input_=temp_vector_polygons1,
                        op='del',
                        cat='-1',
                        output=temp_vector_polygons2,
                        quiet=True,
                        overwrite=True)

    # Transform lines to boundaries
    gscript.run_command('v.type',
                        input_=temp_vector_polygons2,
                        from_type='line',
                        to_type='boundary',
                        output=temp_vector_polygons3,
                        quiet=True,
                        overwrite=True)

    # Add centroids
    gscript.run_command('v.centroids',
                        input_=temp_vector_polygons3,
                        output=temp_vector_polygons4,
                        quiet=True,
                        overwrite=True)

    # If a threshold is given erase polygons that are too small
    if min_tile_size:
        gscript.run_command('v.clean',
                            input_=temp_vector_polygons4,
                            tool=['rmdangle', 'rmarea'],
                            threshold=[-1, min_tile_size],
                            output=tiles,
                            quiet=True,
                            overwrite=True)
    else:
        gscript.run_command('g.copy',
                            vect=[temp_vector_polygons4, tiles],
                            quiet=True,
                            overwrite=True)

    gscript.vector_history(tiles)
コード例 #14
0
ファイル: v.db.addtable.py プロジェクト: rashadkm/grass_cmake
def main():
    vector = options['map']
    table = options['table']
    layer = options['layer']
    columns = options['columns']
    key = options['key']
    
    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    if not grass.find_file(vector, element = 'vector', mapset = mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)
    
    map_name = vector.split('@')[0]
    
    if not table:
        if layer == '1':
            grass.verbose(_("Using vector map name as table name: <%s>") % map_name)
            table = map_name
        else:
            # to avoid tables with identical names on higher layers
            table = "%s_%s" % (map_name, layer)
            grass.verbose(_("Using vector map name extended by layer number as table name: <%s>") % table)
    else:
        grass.verbose(_("Using user specified table name: %s") % table)
    
    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags = 'c')
    grass.verbose(_("Creating new DB connection based on default mapset settings..."))
    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    schema = kv['schema']
    
    # maybe there is already a table linked to the selected layer?
    nuldev = file(os.devnull, 'w')
    try:
        grass.vector_db(map_name, stderr = nuldev)[int(layer)]
        grass.fatal(_("There is already a table linked to layer <%s>") % layer)
    except KeyError:
        pass
    
    # maybe there is already a table with that name?
    tables = grass.read_command('db.tables', flags = 'p', database = database, driver = driver,
                                stderr = nuldev)
    
    if not table in tables.splitlines():
        if columns:
            column_def = map(lambda x: x.strip().lower(), columns.strip().split(','))
        else:
            column_def = []
        
        # if not existing, create it:
        column_def_key = "%s integer" % key
        if column_def_key not in column_def:
            column_def.insert(0, column_def_key)
        column_def = ','.join(column_def)
        
        grass.verbose(_("Creating table with columns (%s)...") % column_def)
        
        sql = "CREATE TABLE %s (%s)" % (table, column_def)
        try:
            grass.run_command('db.execute',
                              database=database, driver=driver, sql=sql)
        except CalledModuleError:
            grass.fatal(_("Unable to create table <%s>") % table)

    # connect the map to the DB:
    if schema:
        table = '{schema}.{table}'.format(schema=schema, table=table)
    grass.run_command('v.db.connect', quiet = True,
                      map = map_name, database = database, driver = driver,
                      layer = layer, table = table, key = key)
    
    # finally we have to add cats into the attribute DB to make modules such as v.what.rast happy:
    # (creates new row for each vector line):
    grass.run_command('v.to.db', map = map_name, layer = layer,
                      option = 'cat', column = key, qlayer = layer)
    
    grass.verbose(_("Current attribute table links:"))
    if grass.verbosity() > 2:
        grass.run_command('v.db.connect', flags = 'p', map = map_name)
    
    # write cmd history:
    grass.vector_history(map_name)
    
    return 0
コード例 #15
0
ファイル: v.db.update.py プロジェクト: sebastic/grass
def main():
    vector = options['map']
    layer = options['layer']
    column = options['column']
    value = options['value']
    qcolumn = options['query_column']
    where = options['where']
    sqlitefile = options['sqliteextra']

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _('There is no table connected to this map. Run v.db.connect or v.db.addtable first.'
              ))

    table = f['table']
    database = f['database']
    driver = f['driver']

    # check for SQLite backend for extra functions
    if sqlitefile and driver != "sqlite":
        grass.fatal(_("Use of libsqlitefunctions only with SQLite backend"))
    if driver == "sqlite" and sqlitefile:
        if not os.access(sqlitefile, os.R_OK):
            grass.fatal(_("File <%s> not found") % sqlitefile)

    # checking column types
    try:
        coltype = grass.vector_columns(vector, layer)[column]['type']
    except KeyError:
        grass.fatal(_('Column <%s> not found') % column)

    if qcolumn:
        if value:
            grass.fatal(_('<value> and <qcolumn> are mutually exclusive'))
        # special case: we copy from another column
        value = qcolumn
    else:
        if not value:
            grass.fatal(_('Either <value> or <qcolumn> must be given'))
        # we insert a value
        if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
            value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
        cmd += " WHERE " + where

    # SQLite: preload extra functions from extension lib if provided by user
    if sqlitefile:
        sqliteload = "SELECT load_extension('%s');\n" % sqlitefile
        cmd = sqliteload + cmd

    grass.verbose("SQL: \"%s\"" % cmd)
    grass.write_command('db.execute',
                        input='-',
                        database=database,
                        driver=driver,
                        stdin=cmd)

    # write cmd history:
    grass.vector_history(vector)

    return 0
コード例 #16
0
def main():
    """Process command line parameters and update the table"""
    options, flags = gs.parser()

    vector = options["map"]
    layer = options["layer"]
    where = options["where"]
    column = options["column"]
    expression = options["expression"]
    condition = options["condition"]
    functions_file = options["functions"]

    # Map needs to be in the current mapset
    mapset = gs.gisenv()["MAPSET"]
    if not gs.find_file(vector, element="vector", mapset=mapset)["file"]:
        gs.fatal(
            _("Vector map <{vector}> does not exist or is not in the current mapset"
              "(<{mapset}>) and therefore it cannot be modified").format(
                  **locals()))

    # Map+layer needs to have a table connected
    try:
        # TODO: Support @OGR vector maps? Probably not supported by db.execute anyway.
        db_info = gs.vector_db(vector)[int(layer)]
    except KeyError:
        gs.fatal(
            _("There is no table connected to map <{vector}> (layer <{layer}>)."
              " Use v.db.connect or v.db.addtable to add it.").format(
                  **locals()))
    table = db_info["table"]
    database = db_info["database"]
    driver = db_info["driver"]
    columns = gs.vector_columns(vector, layer)

    # Check that column exists
    try:
        column_info = columns[column]
    except KeyError:
        gs.fatal(
            _("Column <{column}> not found. Use v.db.addcolumn to create it.").
            format(column=column))
    column_type = column_info["type"]

    # Check that optional function file exists
    if functions_file:
        if not os.access(functions_file, os.R_OK):
            gs.fatal(_("File <{file}> not found").format(file=functions_file))

    # Define Python functions
    # Here we need the full-deal eval and exec functions and can't sue less
    # general alternatives such as ast.literal_eval.
    def expression_function(**kwargs):
        return eval(expression, globals(), kwargs)  # pylint: disable=eval-used

    def condition_function(**kwargs):
        return eval(condition, globals(), kwargs)  # pylint: disable=eval-used

    # TODO: Add error handling for failed imports.
    if options["packages"]:
        packages = options["packages"].split(",")
        for package in packages:
            # pylint: disable=exec-used
            exec(f"import {package}", globals(), globals())
            if flags["s"]:
                exec(f"from {package} import *", globals(), globals())

    # TODO: Add error handling for invalid syntax.
    if functions_file:
        with open(functions_file) as file:
            exec(file.read(), globals(), globals())  # pylint: disable=exec-used

    # Get table contents
    if not where:
        # The condition needs to be None, an empty string is passed through.
        where = None
    if gs.version()["version"] < "7.9":
        sep = "|"  # Only one char sep for Python csv package.
        null = "NULL"
        csv_text = gs.read_command(
            "v.db.select",
            map=vector,
            layer=layer,
            separator=sep,
            null=null,
            where=where,
        )
        table_contents = csv_loads(csv_text, delimeter=sep, null=null)
    else:
        # TODO: XXX is a workaround for a bug in v.db.select -j
        json_text = gs.read_command("v.db.select",
                                    map=vector,
                                    layer=layer,
                                    flags="j",
                                    null="XXX",
                                    where=where)
        table_contents = json.loads(json_text)

    cmd = python_to_transaction(
        table=table,
        table_contents=table_contents,
        column=column,
        column_type=column_type,
        expression=expression,
        expression_function=expression_function,
        condition=condition,
        condition_function=condition_function,
        ensure_lowercase=not flags["u"],
    )

    # Messages
    if len(cmd) == 2:
        gs.message(
            "No rows to update. Try a different SQL where or Python condition."
        )
    elif len(cmd) > 2:
        # First and last statement
        gs.verbose(f'Using SQL: "{cmd[1]}...{cmd[-2]}"')

    # The newline is needed for successful execution/reading of many statements.
    # TODO: Add error handling when there is a syntax error due to wrongly
    # generated SQL statement and/or sanitize the value in update more.
    gs.write_command("db.execute",
                     input="-",
                     database=database,
                     driver=driver,
                     stdin="\n".join(cmd))

    gs.vector_history(vector)
コード例 #17
0
def main():
    force = flags['f']
    map = options['map']
    table = options['table']
    layer = options['layer']

    # do some paranoia tests as well:
    f = gscript.vector_layer_db(map, layer)

    if not table:
        # Removing table name connected to selected layer
        table = f['table']
        if not table:
            gscript.fatal(_("No table assigned to layer <%s>") % layer)
    else:
        # Removing user specified table
        existingtable = f['table']
        if existingtable != table:
            gscript.fatal(
                _("User selected table <%s> but the table <%s> "
                  "is linked to layer <%s>") % (table, existingtable, layer))

    # we use the DB settings selected layer
    database = f['database']
    driver = f['driver']

    gscript.message(
        _("Removing table <%s> linked to layer <%s> of vector"
          " map <%s>") % (table, layer, map))

    if not force:
        gscript.message(
            _("You must use the -f (force) flag to actually "
              "remove the table. Exiting."))
        gscript.message(_("Leaving map/table unchanged."))
        sys.exit(0)

    gscript.message(_("Dropping table <%s>...") % table)

    try:
        gscript.write_command('db.execute',
                              stdin="DROP TABLE %s" % table,
                              input='-',
                              database=database,
                              driver=driver)
    except CalledModuleError:
        gscript.fatal(_("An error occurred while running db.execute"))

    gscript.run_command('v.db.connect', flags='d', map=map, layer=layer)

    gscript.message(_("Current attribute table link(s):"))
    # silently test first to avoid confusing error messages
    nuldev = file(os.devnull, 'w')
    try:
        gscript.run_command('v.db.connect',
                            flags='p',
                            map=map,
                            quiet=True,
                            stdout=nuldev,
                            stderr=nuldev)
    except CalledModuleError:
        gscript.message(_("(No database links remaining)"))
    else:
        gscript.run_command('v.db.connect', flags='p', map=map)

    # write cmd history:
    gscript.vector_history(map)
コード例 #18
0
def main():
    vector = options["map"]
    layer = options["layer"]
    column = options["column"]
    value = options["value"]
    qcolumn = options["query_column"]
    where = options["where"]
    sqlitefile = options["sqliteextra"]

    mapset = grass.gisenv()["MAPSET"]

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element="vector", mapset=mapset)["file"]:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
        grass.fatal(
            _(
                "There is no table connected to this map. Run v.db.connect or v.db.addtable first."
            )
        )

    table = f["table"]
    database = f["database"]
    driver = f["driver"]

    # check for SQLite backend for extra functions
    if sqlitefile and driver != "sqlite":
        grass.fatal(_("Use of libsqlitefunctions only with SQLite backend"))
    if driver == "sqlite" and sqlitefile:
        if not os.access(sqlitefile, os.R_OK):
            grass.fatal(_("File <%s> not found") % sqlitefile)

    # Check column existence and get its type.
    all_columns = grass.vector_columns(vector, layer)
    coltype = None
    for column_name, column_record in all_columns.items():
        if column.lower() == column_name.lower():
            coltype = column_record["type"]
            break
    if not coltype:
        grass.fatal(_("Column <%s> not found") % column)

    if qcolumn:
        if value:
            grass.fatal(_("<value> and <qcolumn> are mutually exclusive"))
        # special case: we copy from another column
        value = qcolumn
    else:
        if not value:
            grass.fatal(_("Either <value> or <qcolumn> must be given"))
        # we insert a value
        if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
            value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
        cmd += " WHERE " + where

    # SQLite: preload extra functions from extension lib if provided by user
    if sqlitefile:
        sqliteload = "SELECT load_extension('%s');\n" % sqlitefile
        cmd = sqliteload + cmd

    grass.verbose('SQL: "%s"' % cmd)
    grass.write_command(
        "db.execute", input="-", database=database, driver=driver, stdin=cmd
    )

    # write cmd history:
    grass.vector_history(vector)

    return 0
コード例 #19
0
ファイル: v.db.dropcolumn.py プロジェクト: rkrug/grass-ci
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns'].split(',')

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(map, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    f = grass.vector_layer_db(map, layer)

    table = f['table']
    keycol = f['key']
    database = f['database']
    driver = f['driver']

    if not table:
        grass.fatal(_("There is no table connected to the input vector map. "
                      "Unable to delete any column. Exiting."))

    if keycol in columns:
        grass.fatal(_("Unable to delete <%s> column as it is needed to keep table <%s> "
                      "connected to the input vector map <%s>") %
                    (keycol, table, map))

    for column in columns:
        if column not in grass.vector_columns(map, layer):
            grass.warning(_("Column <%s> not found in table <%s>. Skipped") % (column, table))
            continue

        if driver == "sqlite":
            # echo "Using special trick for SQLite"
            # http://www.sqlite.org/faq.html#q11
            colnames = []
            coltypes = []
            for f in grass.db_describe(table, database=database, driver=driver)['cols']:
                if f[0] == column:
                    continue
                colnames.append(f[0])
                coltypes.append("%s %s" % (f[0], f[1]))

            colnames = ", ".join(colnames)
            coltypes = ", ".join(coltypes)

            cmds = [
                "BEGIN TRANSACTION",
                "CREATE TEMPORARY TABLE ${table}_backup(${coldef})",
                "INSERT INTO ${table}_backup SELECT ${colnames} FROM ${table}",
                "DROP TABLE ${table}",
                "CREATE TABLE ${table}(${coldef})",
                "INSERT INTO ${table} SELECT ${colnames} FROM ${table}_backup",
                "CREATE UNIQUE INDEX ${table}_cat ON ${table} (${keycol} )",
                "DROP TABLE ${table}_backup",
                "COMMIT"
            ]
            tmpl = string.Template(';\n'.join(cmds))
            sql = tmpl.substitute(table=table, coldef=coltypes, colnames=colnames, keycol=keycol)
        else:
            sql = "ALTER TABLE %s DROP COLUMN %s" % (table, column)

        try:
            grass.write_command('db.execute', input='-', database=database, driver=driver,
                                stdin=sql)
        except CalledModuleError:
            grass.fatal(_("Deleting column failed"))

    # write cmd history:
    grass.vector_history(map)
コード例 #20
0
def main():
    options, flags = gscript.parser()

    input_raster = options["input"]
    points = options["output"]
    if options["sampled"]:
        sampled_rasters = options["sampled"].split(",")
    else:
        sampled_rasters = []
    npoints = [int(num) for num in options["npoints"].split(",")]

    seed = None
    if options["random_seed"]:
        seed = int(options["random_seed"])
    flag_s = flags["s"]

    # we clean up mask too, so register after we know that mask is not present
    atexit.register(cleanup)

    temp_name = "tmp_r_sample_category_{}_".format(os.getpid())
    points_nocats = temp_name + "points_nocats"
    TMP.append(points_nocats)

    # input must be CELL
    rdescribe = gscript.read_command("r.stats",
                                     flags="lnc",
                                     input=input_raster,
                                     separator="pipe")
    catlab = rdescribe.splitlines()
    categories = list(map(int, [z.split("|")[0] for z in catlab]))
    pixlab = dict([z.split("|")[::2] for z in catlab])
    catlab = dict([z.split("|")[:2] for z in catlab])
    if len(npoints) == 1:
        npoints = npoints * len(categories)
    else:
        if len(categories) != len(npoints):
            gscript.fatal(
                _("Number of categories in raster does not match the number of provided sampling points numbers."
                  ))

    # Create sample points per category
    vectors = []
    for i, cat in enumerate(categories):
        # skip generating points if none are required
        if npoints[i] == 0:
            continue

        # Check number of cells in category
        nrc = int(pixlab[str(cat)])
        if nrc < npoints[i]:
            if flag_s:
                gscript.info(
                    _("Not enough points in category {cat}. Skipping").format(
                        cat=categories[i]))
                continue
            gscript.warning(
                _("Number of raster cells in category {cat} < {np}. Sampling {n} points"
                  ).format(cat=categories[i], np=npoints[i], n=nrc))
            npoints[i] = nrc

        gscript.info(
            _("Selecting {n} sampling locations at category {cat}...").format(
                n=npoints[i], cat=cat))

        # Create reclass map with only pixels of current category
        rc_rule = "{0} = {0}\n* = NULL".format(cat)
        gscript.write_command(
            "r.reclass",
            input=input_raster,
            output=temp_name,
            rules="-",
            stdin=rc_rule,
            overwrite=True,
            quiet=True,
        )

        if temp_name not in TMP:
            TMP.append(temp_name)

        # Create the points
        vector = temp_name + str(cat)
        vectors.append(vector)
        if seed is None:
            gscript.run_command(
                "r.random",
                input=temp_name,
                npoints=npoints[i],
                vector=vector,
                flags="s",
                quiet=True,
            )
        else:
            gscript.run_command(
                "r.random",
                input=temp_name,
                npoints=npoints[i],
                vector=vector,
                seed=seed,
                quiet=True,
            )
        TMP.append(vector)

    gscript.run_command("v.patch", input=vectors, output=points, quiet=True)
    # remove and add gain cats so that they are unique
    gscript.run_command(
        "v.category",
        input=points,
        option="del",
        cat=-1,
        output=points_nocats,
        quiet=True,
    )
    # overwrite to reuse the map
    gscript.run_command(
        "v.category",
        input=points_nocats,
        option="add",
        output=points,
        overwrite=True,
        quiet=True,
    )

    # Sample layers
    columns = []
    column_names = []
    sampled_rasters.insert(0, input_raster)
    for raster in sampled_rasters:
        column = escape_sql_column(strip_mapset(raster).lower())
        column_names.append(column)
        datatype = gscript.parse_command("r.info", flags="g",
                                         map=raster)["datatype"]
        if datatype == "CELL":
            datatype = "integer"
        else:
            datatype = "double precision"
        columns.append("{column} {datatype}".format(column=column,
                                                    datatype=datatype))
    gscript.run_command("v.db.addtable",
                        map=points,
                        columns=",".join(columns),
                        quiet=True)
    for raster, column in zip(sampled_rasters, column_names):
        gscript.info(_("Sampling raster map %s...") % raster)
        gscript.run_command(
            "v.what.rast",
            map=points,
            type="point",
            raster=raster,
            column=column,
            quiet=True,
        )

    # Add category labels
    if not list(set(catlab.values()))[0] and len(set(catlab.values())) == 1:
        gscript.verbose(_("There are no category labels in the raster to add"))
    else:
        gscript.run_command("v.db.addcolumn",
                            map=points,
                            columns="label varchar(250)")
        table_name = escape_sql_column(strip_mapset(points).lower())
        for i in categories:
            sqlstat = ("UPDATE " + table_name + " SET label='" +
                       catlab[str(i)] + "' WHERE " + column_names[0] + " == " +
                       str(i))
            gscript.run_command("db.execute", sql=sqlstat)

    gscript.vector_history(points, replace=True)
コード例 #21
0
def main(options, flags):

    # Variables
    raster_cat = options["raster"]
    raster_cat = raster_cat.split(",")
    raster_cat_names = [z.split("@")[0] for z in raster_cat]
    raster_cat_names = [x.lower() for x in raster_cat_names]
    raster_cont = options["raster2"]
    raster_cont = raster_cont.split(",")
    raster_cont_names = [z.split("@")[0] for z in raster_cont]
    raster_cont_names = [x.lower() for x in raster_cont_names]
    input_vector = options["vector"]
    output_vector = options["output"]

    if flags["o"]:
        tmp_layer = tmpname()
        Module("g.copy", vector=[input_vector, output_vector], quiet=True)
    else:
        tmp_layer = output_vector
    # Create vector with column names
    base_column_names = ["x double precision, y double precision, label integer"]
    for i, raster_name in enumerate(raster_cat):
        data_types = gs.parse_command("r.info", flags="g", map=raster_name, quiet=True)[
            "datatype"
        ]
        if data_types == "CELL":
            base_column_names.append(
                "{0}_ID integer, {0} varchar(255)".format(raster_cat_names[i])
            )
        else:
            base_column_names.append(
                "ID_{0} double precision, {0} varchar(255)".format(raster_cat_names[i])
            )
    column_names = ",".join(base_column_names)

    # Get raster points of raster layers with labels
    # Export point map to text file first and use that as input in r.what
    point_to_ascii = Module(
        "v.out.ascii",
        input=input_vector,
        format="point",
        separator="space",
        precision=12,
        stdout_=PIPE,
    ).outputs.stdout
    raster_cats = Module(
        "r.what", flags="f", map=raster_cat, stdin_=point_to_ascii, stdout_=PIPE
    ).outputs.stdout
    ascii_to_point = raster_cats.replace("|*|", "||").replace("\r", "")
    Module(
        "v.in.ascii",
        input="-",
        stdin_=ascii_to_point,
        output=tmp_layer,
        columns=column_names,
        separator="pipe",
        format="point",
        x=1,
        y=2,
        quiet=True,
    )

    # In- or exclude coordinates
    if not flags["c"]:
        Module("v.db.dropcolumn", map=tmp_layer, columns=["x", "y"])
    # Get raster points of raster layers without labels (optional)
    if options["raster2"]:
        for j, raster_cont_values in enumerate(raster_cont):
            Module(
                "v.what.rast",
                map=tmp_layer,
                raster=raster_cont_values,
                column=raster_cont_names[j],
                quiet=True,
            )
    # Join table of original layer (and drop label columns)

    if flags["o"]:

        cols = Module("db.columns", table=tmp_layer, stdout_=PIPE).outputs.stdout.split(
            "\n"
        )
        cols.pop()
        del cols[:1]

        sqlstat = "CREATE INDEX {0}_label ON {0} (label);".format(tmp_layer)
        Module("db.execute", sql=sqlstat)
        Module(
            "v.db.join",
            map=output_vector,
            column="cat",
            other_table=tmp_layer,
            other_column="label",
            subset_columns=cols,
        )
    # Remove label column
    Module("v.db.dropcolumn", map=output_vector, columns=["label"])

    # Write metadata
    gs.vector_history(output_vector, replace=True)
コード例 #22
0
ファイル: v.dissolve.py プロジェクト: rashadkm/grass_cmake
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_('Column <%s> not found') % column)
        
	if coltype['type'] not in ('INTEGER', 'SMALLINT', 'CHARACTER', 'TEXT'):
	    grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

	table = f['table']

	tmpfile = '%s_%s' % (output, tmp)

        try:
            grass.run_command('v.reclass', input=input, output=tmpfile,
                              layer=layer, column=column)
            grass.run_command('v.extract', flags='d', input=tmpfile,
                              output=output, type='area', layer=layer)
        except CalledModuleError, e:
            grass.fatal(_("Final extraction steps failed."
                          " Check above error messages and"
                          " see following details:\n%s") % e)

    # write cmd history:
    grass.vector_history(output)

if __name__ == "__main__":
    options, flags = grass.parser()
    atexit.register(cleanup)
    main()
コード例 #23
0
def main():
    vector = options["map"]
    table = options["table"]
    layer = options["layer"]
    columns = options["columns"]
    key = options["key"]

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()["MAPSET"]
    if not grass.find_file(vector, element="vector", mapset=mapset)["file"]:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    map_name = vector.split("@")[0]

    if not table:
        if layer == "1":
            grass.verbose(
                _("Using vector map name as table name: <%s>") % map_name)
            table = map_name
        else:
            # to avoid tables with identical names on higher layers
            table = "%s_%s" % (map_name, layer)
            grass.verbose(
                _("Using vector map name extended by layer number as table name: <%s>"
                  ) % table)
    else:
        grass.verbose(_("Using user specified table name: %s") % table)

    # check if DB parameters are set, and if not set them.
    grass.run_command("db.connect", flags="c", quiet=True)
    grass.verbose(
        _("Creating new DB connection based on default mapset settings..."))
    kv = grass.db_connection()
    database = kv["database"]
    driver = kv["driver"]
    schema = kv["schema"]

    database2 = database.replace("$MAP/", map_name + "/")

    # maybe there is already a table linked to the selected layer?
    nuldev = open(os.devnull, "w")
    try:
        grass.vector_db(map_name, stderr=nuldev)[int(layer)]
        grass.fatal(_("There is already a table linked to layer <%s>") % layer)
    except KeyError:
        pass

    # maybe there is already a table with that name?
    tables = grass.read_command("db.tables",
                                flags="p",
                                database=database2,
                                driver=driver,
                                stderr=nuldev)
    tables = decode(tables)

    if table not in tables.splitlines():
        colnames = []
        column_def = []
        if columns:
            column_def = []
            for x in " ".join(columns.split()).split(","):
                colname = x.lower().split()[0]
                if colname in colnames:
                    grass.fatal(
                        _("Duplicate column name '%s' not allowed") % colname)
                colnames.append(colname)
                column_def.append(x)

        # if not existing, create it:
        if key not in colnames:
            column_def.insert(0, "%s integer" % key)
        column_def = ",".join(column_def)

        grass.verbose(_("Creating table with columns (%s)...") % column_def)

        sql = "CREATE TABLE %s (%s)" % (table, column_def)
        try:
            grass.run_command("db.execute",
                              database=database2,
                              driver=driver,
                              sql=sql)
        except CalledModuleError:
            grass.fatal(_("Unable to create table <%s>") % table)

    # connect the map to the DB:
    if schema:
        table = "{schema}.{table}".format(schema=schema, table=table)
    grass.verbose(_("Connecting new table to vector map <%s>...") % map_name)
    grass.run_command(
        "v.db.connect",
        quiet=True,
        map=map_name,
        database=database,
        driver=driver,
        layer=layer,
        table=table,
        key=key,
    )

    # finally we have to add cats into the attribute DB to make
    # modules such as v.what.rast happy: (creates new row for each
    # vector line):
    try:
        grass.run_command(
            "v.to.db",
            overwrite=True,
            map=map_name,
            layer=layer,
            option="cat",
            column=key,
            qlayer=layer,
        )
    except CalledModuleError:
        # remove link
        grass.run_command("v.db.connect",
                          quiet=True,
                          flags="d",
                          map=map_name,
                          layer=layer)
        return 1

    grass.verbose(_("Current attribute table links:"))
    if grass.verbosity() > 2:
        grass.run_command("v.db.connect", flags="p", map=map_name)

    # write cmd history:
    grass.vector_history(map_name)

    return 0
コード例 #24
0
def main():
    # Get the options
    input = options["input"]
    timestamp_column = options["timestamp_column"]
    columns = options["column"]
    layer = options["layer"]
    where = options["where"]
    strds = options["strds"]
    tempwhere = options["t_where"]
    i_flag = flags["i"]

    if where == "" or where == " " or where == "\n":
        where = None

    # overwrite = grass.overwrite()

    # Set verbosity level
    # quiet = True
    # if grass.verbosity() > 2:
    #     quiet = False

    grass.warning(_('This addon is experimental!'))

    # Check DB connection for input vector map
    dbcon = grass.vector_layer_db(input, layer)
    # Check the number of sample strds and the number of columns
    strds_names = strds.split(",")
    column_names = columns.split(",")
    if not len(column_names) == len(strds_names):
        grass.fatal(_('Number of columns and number of STRDS does not match.'))

    # Check type of timestamp column
    cols = grass.vector_columns(input, layer=layer)
    if timestamp_column not in cols.keys():
        grass.fatal(
            _('Could not find column {} \
                    in table connected to vector map {} \
                    at layer {}'.format(timestamp_column, input, layer)))
    if cols[timestamp_column]['type'] != 'DATE':
        if dbcon['driver'] != 'sqlite':
            # Note that SQLite does not have a DATE datatype and
            # and an index does not significantly speedup the process
            # (at least not with a couple of 100 points)
            grass.warning(
                _('Timestamp column is of type {}. \
                            It is recommended to use DATE type with an index. \
                            '.format(cols[timestamp_column]['type'])))

    # Make sure the temporal database exists
    tgis.init()
    # We need a database interface
    dbif = tgis.SQLDatabaseInterfaceConnection()
    dbif.connect()

    # Limit temporal extent to extent of points if no tempwhere is given
    if not tempwhere:
        extent = []
        for stat in ('min', 'max'):
            tsql = "SELECT {}({}) FROM {}".format(stat, timestamp_column,
                                                  dbcon['table'])
            extent.append(grass.read_command('db.select', flags='c', sql=tsql))

        grass.verbose(
            _('Temporal extent of vector points map is \
                      {} to {}'.format(extent[0], extent[1])))
    else:
        tempwhere = '({}) AND '.format(tempwhere)

    # Loop over STRDS
    counter = 0
    for strds_name in strds_names:

        cur_strds = tgis.open_old_stds(strds_name, "strds", dbif)

        granu = cur_strds.get_granularity()
        start_time = tgis.datetime_math.check_datetime_string(extent[0])
        start_gran = tgis.datetime_math.adjust_datetime_to_granularity(
            start_time, granu).isoformat()
        tempwhere += "(end_time > '{}' and start_time <= '{}')".format(
            start_gran, extent[1])  # needs to be set properly

        # Get info on registered maps in STRDS
        rows = cur_strds.get_registered_maps("name,mapset,start_time,end_time",
                                             tempwhere, "start_time", dbif)

        # Check temporal type and
        # define sampling function to use
        # becomes relevant when temporal type relative gets implemented
        if cur_strds.is_time_relative():
            grass.fatal(
                _('Sorry, STRDS of relative temporal type is not (yet) supported'
                  ))
            sample = sample_relative
        else:
            sample = sample_absolute

        # Check if there are raster maps to sample from that fullfill
        # temporal conditions
        if not rows and not tempwhere:
            dbif.close()
            grass.fatal(
                _("Space time raster dataset <%s> is empty".format(
                    cur_strds.get_id())))
        elif not rows and tempwhere:
            dbif.close()
            grass.fatal(
                _("No maps selected from Space time raster dataset <%s>, \
                          or dataset is empty".format(cur_strds.get_id())))

        # Include temporal condition into where clause
        where_clause = '({}) AND '.format(where) if where else ''

        # Loop over registered maps in STRDS
        row_number = 0
        for row in rows:
            # If r.what had a where option, r.what could be used to
            # collect raster values (without interpolation)
            # in a ParallelModuleQueue to collect values using multiple
            # cores and then upload results in one operation

            sample(input, layer, timestamp_column, column_names[counter], row,
                   where_clause, i_flag)

            row_number += 1
            grass.percent(row_number, len(rows), 3)
        counter = counter + 1

    dbif.close()
    grass.vector_history(input)
コード例 #25
0
ファイル: v.db.addtable.py プロジェクト: rkrug/grass-ci
def main():
    vector = options['map']
    table = options['table']
    layer = options['layer']
    columns = options['columns']
    key = options['key']

    # does map exist in CURRENT mapset?
    mapset = grass.gisenv()['MAPSET']
    if not grass.find_file(vector, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    map_name = vector.split('@')[0]

    if not table:
        if layer == '1':
            grass.verbose(_("Using vector map name as table name: <%s>") % map_name)
            table = map_name
        else:
            # to avoid tables with identical names on higher layers
            table = "%s_%s" % (map_name, layer)
            grass.verbose(
                _("Using vector map name extended by layer number as table name: <%s>") %
                table)
    else:
        grass.verbose(_("Using user specified table name: %s") % table)

    # check if DB parameters are set, and if not set them.
    grass.run_command('db.connect', flags='c')
    grass.verbose(_("Creating new DB connection based on default mapset settings..."))
    kv = grass.db_connection()
    database = kv['database']
    driver = kv['driver']
    schema = kv['schema']

    # maybe there is already a table linked to the selected layer?
    nuldev = file(os.devnull, 'w')
    try:
        grass.vector_db(map_name, stderr=nuldev)[int(layer)]
        grass.fatal(_("There is already a table linked to layer <%s>") % layer)
    except KeyError:
        pass

    # maybe there is already a table with that name?
    tables = grass.read_command('db.tables', flags='p', database=database, driver=driver,
                                stderr=nuldev)

    if not table in tables.splitlines():
        if columns:
            column_def = [x.strip().lower() for x in columns.strip().split(',')]
        else:
            column_def = []

        # if not existing, create it:
        column_def_key = "%s integer" % key
        if column_def_key not in column_def:
            column_def.insert(0, column_def_key)
        column_def = ','.join(column_def)

        grass.verbose(_("Creating table with columns (%s)...") % column_def)

        sql = "CREATE TABLE %s (%s)" % (table, column_def)
        try:
            grass.run_command('db.execute',
                              database=database, driver=driver, sql=sql)
        except CalledModuleError:
            grass.fatal(_("Unable to create table <%s>") % table)

    # connect the map to the DB:
    if schema:
        table = '{schema}.{table}'.format(schema=schema, table=table)
    grass.run_command('v.db.connect', quiet=True,
                      map=map_name, database=database, driver=driver,
                      layer=layer, table=table, key=key)

    # finally we have to add cats into the attribute DB to make modules such as v.what.rast happy:
    # (creates new row for each vector line):
    grass.run_command('v.to.db', map=map_name, layer=layer,
                      option='cat', column=key, qlayer=layer)

    grass.verbose(_("Current attribute table links:"))
    if grass.verbosity() > 2:
        grass.run_command('v.db.connect', flags='p', map=map_name)

    # write cmd history:
    grass.vector_history(map_name)

    return 0
コード例 #26
0
ファイル: v.db.update.py プロジェクト: rkrug/grass-ci
def main():
    vector = options["map"]
    layer = options["layer"]
    column = options["column"]
    value = options["value"]
    qcolumn = options["query_column"]
    where = options["where"]
    sqlitefile = options["sqliteextra"]

    mapset = grass.gisenv()["MAPSET"]

    # does map exist in CURRENT mapset?
    if not grass.find_file(vector, element="vector", mapset=mapset)["file"]:
        grass.fatal(_("Vector map <%s> not found in current mapset") % vector)

    try:
        f = grass.vector_db(vector)[int(layer)]
    except KeyError:
        grass.fatal(_("There is no table connected to this map. Run v.db.connect or v.db.addtable first."))

    table = f["table"]
    database = f["database"]
    driver = f["driver"]

    # check for SQLite backend for extra functions
    if sqlitefile and driver != "sqlite":
        grass.fatal(_("Use of libsqlitefunctions only with SQLite backend"))
    if driver == "sqlite" and sqlitefile:
        if not os.access(sqlitefile, os.R_OK):
            grass.fatal(_("File <%s> not found") % sqlitefile)

    # checking column types
    try:
        coltype = grass.vector_columns(vector, layer)[column]["type"]
    except KeyError:
        grass.fatal(_("Column <%s> not found") % column)

    if qcolumn:
        if value:
            grass.fatal(_("<value> and <qcolumn> are mutually exclusive"))
        # special case: we copy from another column
        value = qcolumn
    else:
        if not value:
            grass.fatal(_("Either <value> or <qcolumn> must be given"))
        # we insert a value
        if coltype.upper() not in ["INTEGER", "DOUBLE PRECISION"]:
            value = "'%s'" % value

    cmd = "UPDATE %s SET %s=%s" % (table, column, value)
    if where:
        cmd += " WHERE " + where

    # SQLite: preload extra functions from extension lib if provided by user
    if sqlitefile:
        sqliteload = "SELECT load_extension('%s');\n" % sqlitefile
        cmd = sqliteload + cmd

    grass.verbose('SQL: "%s"' % cmd)
    grass.write_command("db.execute", input="-", database=database, driver=driver, stdin=cmd)

    # write cmd history:
    grass.vector_history(vector)

    return 0
コード例 #27
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']
    otable = options['otable']
    ocolumn = options['ocolumn']
    if options['scolumns']:
        scolumns = options['scolumns'].split(',')
    else:
        scolumns = None
    
    f = grass.vector_layer_db(map, layer)

    maptable = f['table']
    database = f['database']
    driver   = f['driver']

    if driver == 'dbf':
	grass.fatal(_("JOIN is not supported for tables stored in DBF format"))

    if not maptable:
	grass.fatal(_("There is no table connected to this map. Unable to join any column."))

    # check if column is in map table
    if not grass.vector_columns(map, layer).has_key(column):
	grass.fatal(_("Column <%s> not found in table <%s>") % (column, maptable))

    # describe other table
    all_cols_ot = grass.db_describe(otable, driver = driver, database = database)['cols']

    # check if ocolumn is on other table
    if ocolumn not in [ocol[0] for ocol in all_cols_ot]:
	grass.fatal(_("Column <%s> not found in table <%s>") % (ocolumn, otable))

    # determine columns subset from other table
    if not scolumns:
        # select all columns from other table
        cols_to_add = all_cols_ot
    else:
        cols_to_add = []
    	# check if scolumns exists in the other table
    	for scol in scolumns:
            found = False
            for col_ot in all_cols_ot:
                if scol == col_ot[0]:
                    found = True
                    cols_to_add.append(col_ot)
                    break
            if not found:
                grass.warning(_("Column <%s> not found in table <%s>.") % (scol, otable))
    
    all_cols_tt = grass.vector_columns(map, int(layer)).keys()

    select = "SELECT $colname FROM $otable WHERE $otable.$ocolumn=$table.$column"
    template = string.Template("UPDATE $table SET $colname=(%s);" % select)

    for col in cols_to_add:
	# skip the vector column which is used for join
	colname = col[0]
	if colname == column:
	    continue
	# Sqlite 3 does not support the precision number any more
	if len(col) > 2 and driver != "sqlite":
	    coltype = "%s(%s)" % (col[1], col[2])
	else:
	    coltype = "%s" % col[1]

	colspec = "%s %s" % (colname, coltype)

	# add only the new column to the table
	if colname not in all_cols_tt:
	    if grass.run_command('v.db.addcolumn', map = map, columns = colspec, layer = layer) != 0:
	        grass.fatal(_("Error creating column <%s>") % colname)

	stmt = template.substitute(table = maptable, column = column,
				   otable = otable, ocolumn = ocolumn,
				   colname = colname)
        grass.debug(stmt, 1)
        grass.verbose(_("Updating column <%s> of vector map <%s>...") % (colname, map))
	if grass.write_command('db.execute', stdin = stmt, input = '-', database = database, driver = driver) != 0:
	    grass.fatal(_("Error filling column <%s>") % colname)

    # write cmd history
    grass.vector_history(map)

    return 0
コード例 #28
0
def main():
    input = options["input"]
    output = options["output"]
    column = options["column"]
    ftype = options["type"]
    xtiles = int(options["x"])
    ytiles = int(options["y"])

    rtvflags = ""
    for key in "sbtvz":
        if flags[key]:
            rtvflags += key

    # check options
    if xtiles <= 0:
        grass.fatal(_("Number of tiles in x direction must be > 0"))
    if ytiles < 0:
        grass.fatal(_("Number of tiles in y direction must be > 0"))
    if grass.find_file(name=input)["name"] == "":
        grass.fatal(_("Input raster %s not found") % input)

    grass.use_temp_region()
    curr = grass.region()
    width = int(curr["cols"] / xtiles)
    if width <= 1:
        grass.fatal("The requested number of tiles in x direction is too large")
    height = int(curr["rows"] / ytiles)
    if height <= 1:
        grass.fatal("The requested number of tiles in y direction is too large")

    do_clip = False
    overlap = 0
    if flags["s"] and ftype == "area":
        do_clip = True
        overlap = 2

    ewres = curr["ewres"]
    nsres = curr["nsres"]
    xoverlap = overlap * ewres
    yoverlap = overlap * nsres
    xoverlap2 = (overlap / 2) * ewres
    yoverlap2 = (overlap / 2) * nsres

    e = curr["e"]
    w = curr["w"] + xoverlap
    if w >= e:
        grass.fatal(_("Overlap is too large"))
    n = curr["n"] - yoverlap
    s = curr["s"]
    if s >= n:
        grass.fatal(_("Overlap is too large"))

    datatype = grass.raster_info(input)["datatype"]
    vtiles = None

    # north to south
    for ytile in range(ytiles):
        n = curr["n"] - ytile * height * nsres
        s = n - height * nsres - yoverlap
        if ytile == ytiles - 1:
            s = curr["s"]
        # west to east
        for xtile in range(xtiles):
            w = curr["w"] + xtile * width * ewres
            e = w + width * ewres + xoverlap

            if xtile == xtiles - 1:
                e = curr["e"]

            grass.run_command("g.region", n=n, s=s, e=e, w=w, nsres=nsres, ewres=ewres)

            if do_clip:
                tilename = output + "_stile_" + str(ytile) + str(xtile)
            else:
                tilename = output + "_tile_" + str(ytile) + str(xtile)

            outname = output + "_tile_" + str(ytile) + str(xtile)

            grass.run_command(
                "r.to.vect",
                input=input,
                output=tilename,
                type=ftype,
                column=column,
                flags=rtvflags,
            )

            if do_clip:
                n2 = curr["n"] - ytile * height * nsres - yoverlap2
                s2 = n2 - height * nsres
                if ytile == 0:
                    n2 = curr["n"]
                    s2 = n2 - height * nsres - yoverlap2
                if ytile == ytiles - 1:
                    s2 = curr["s"]

                w2 = curr["w"] + xtile * width * ewres + xoverlap2
                e2 = w2 + width * ewres
                if xtile == 0:
                    w2 = curr["w"]
                    e2 = w2 + width * ewres + xoverlap2
                if xtile == xtiles - 1:
                    e2 = curr["e"]

                tilename = output + "_stile_" + str(ytile) + str(xtile)
                if grass.vector_info_topo(tilename)["areas"] > 0:
                    grass.run_command(
                        "g.region", n=n2, s=s2, e=e2, w=w2, nsres=nsres, ewres=ewres
                    )

                    extname = "extent_tile_" + str(ytile) + str(xtile)
                    grass.run_command("v.in.region", output=extname, flags="d")
                    outname = output + "_tile_" + str(ytile) + str(xtile)
                    grass.run_command(
                        "v.overlay",
                        ainput=tilename,
                        binput=extname,
                        output=outname,
                        operator="and",
                        olayer="0,1,0",
                    )
                    grass.run_command(
                        "g.remove", flags="f", type="vector", name=extname, quiet=True
                    )

                    if vtiles is None:
                        vtiles = outname
                    else:
                        vtiles = vtiles + "," + outname

                grass.run_command(
                    "g.remove", flags="f", type="vector", name=tilename, quiet=True
                )

            else:
                # write cmd history:
                grass.vector_history(outname)
                if vtiles is None:
                    vtiles = outname
                else:
                    vtiles = vtiles + "," + outname

    if flags["p"]:
        grass.run_command("v.patch", input=vtiles, output=output, flags="e")

        grass.run_command("g.remove", flags="f", type="vector", name=vtiles, quiet=True)

        if grass.vector_info_topo(output)["boundaries"] > 0:
            outpatch = output + "_patch"
            grass.run_command("g.rename", vector=(output, outpatch))
            grass.run_command(
                "v.clean", input=outpatch, output=output, tool="break", flags="c"
            )
            grass.run_command("g.remove", flags="f", type="vector", name=outpatch)

    grass.message(_("%s complete") % "r.to.vect.tiled")

    return 0
コード例 #29
0
ファイル: r.to.vect.tiled.py プロジェクト: zarch/grass-addons
def main():
    input = options['input']
    output = options['output']
    column = options['column']
    ftype = options['type']
    xtiles = int(options['x'])
    ytiles = int(options['y'])

    rtvflags = ""
    for key in 'sbtvz':
        if flags[key]:
            rtvflags += key

    # check options
    if xtiles <= 0:
        grass.fatal(_("Number of tiles in x direction must be > 0"))
    if ytiles < 0:
        grass.fatal(_("Number of tiles in y direction must be > 0"))
    if grass.find_file(name=input)['name'] == '':
        grass.fatal(_("Input raster %s not found") % input)

    grass.use_temp_region()
    curr = grass.region()
    width = int(curr['cols'] / xtiles)
    if width <= 1:
        grass.fatal(
            "The requested number of tiles in x direction is too large")
    height = int(curr['rows'] / ytiles)
    if height <= 1:
        grass.fatal(
            "The requested number of tiles in y direction is too large")

    do_clip = False
    overlap = 0
    if flags['s'] and ftype == 'area':
        do_clip = True
        overlap = 2

    ewres = curr['ewres']
    nsres = curr['nsres']
    xoverlap = overlap * ewres
    yoverlap = overlap * nsres
    xoverlap2 = (overlap / 2) * ewres
    yoverlap2 = (overlap / 2) * nsres

    e = curr['e']
    w = curr['w'] + xoverlap
    if w >= e:
        grass.fatal(_("Overlap is too large"))
    n = curr['n'] - yoverlap
    s = curr['s']
    if s >= n:
        grass.fatal(_("Overlap is too large"))

    datatype = grass.raster_info(input)['datatype']
    vtiles = None

    # north to south
    for ytile in range(ytiles):
        n = curr['n'] - ytile * height * nsres
        s = n - height * nsres - yoverlap
        if ytile == ytiles - 1:
            s = curr['s']
        # west to east
        for xtile in range(xtiles):
            w = curr['w'] + xtile * width * ewres
            e = w + width * ewres + xoverlap

            if xtile == xtiles - 1:
                e = curr['e']

            grass.run_command('g.region',
                              n=n,
                              s=s,
                              e=e,
                              w=w,
                              nsres=nsres,
                              ewres=ewres)

            if do_clip:
                tilename = output + '_stile_' + str(ytile) + str(xtile)
            else:
                tilename = output + '_tile_' + str(ytile) + str(xtile)

            outname = output + '_tile_' + str(ytile) + str(xtile)

            grass.run_command('r.to.vect',
                              input=input,
                              output=tilename,
                              type=ftype,
                              column=column,
                              flags=rtvflags)

            if do_clip:
                n2 = curr['n'] - ytile * height * nsres - yoverlap2
                s2 = n2 - height * nsres
                if ytile == 0:
                    n2 = curr['n']
                    s2 = n2 - height * nsres - yoverlap2
                if ytile == ytiles - 1:
                    s2 = curr['s']

                w2 = curr['w'] + xtile * width * ewres + xoverlap2
                e2 = w2 + width * ewres
                if xtile == 0:
                    w2 = curr['w']
                    e2 = w2 + width * ewres + xoverlap2
                if xtile == xtiles - 1:
                    e2 = curr['e']

                tilename = output + '_stile_' + str(ytile) + str(xtile)
                if grass.vector_info_topo(tilename)['areas'] > 0:
                    grass.run_command('g.region',
                                      n=n2,
                                      s=s2,
                                      e=e2,
                                      w=w2,
                                      nsres=nsres,
                                      ewres=ewres)

                    extname = 'extent_tile_' + str(ytile) + str(xtile)
                    grass.run_command('v.in.region', output=extname, flags='d')
                    outname = output + '_tile_' + str(ytile) + str(xtile)
                    grass.run_command('v.overlay',
                                      ainput=tilename,
                                      binput=extname,
                                      output=outname,
                                      operator='and',
                                      olayer='0,1,0')
                    grass.run_command('g.remove',
                                      flags='f',
                                      type='vector',
                                      name=extname,
                                      quiet=True)

                    if vtiles is None:
                        vtiles = outname
                    else:
                        vtiles = vtiles + ',' + outname

                grass.run_command('g.remove',
                                  flags='f',
                                  type='vector',
                                  name=tilename,
                                  quiet=True)

            else:
                # write cmd history:
                grass.vector_history(outname)
                if vtiles is None:
                    vtiles = outname
                else:
                    vtiles = vtiles + ',' + outname

    if flags['p']:
        grass.run_command('v.patch', input=vtiles, output=output, flags='e')

        grass.run_command('g.remove',
                          flags='f',
                          type='vector',
                          name=vtiles,
                          quiet=True)

        if grass.vector_info_topo(output)['boundaries'] > 0:
            outpatch = output + '_patch'
            grass.run_command('g.rename', vector=(output, outpatch))
            grass.run_command('v.clean',
                              input=outpatch,
                              output=output,
                              tool='break',
                              flags='c')
            grass.run_command('g.remove',
                              flags='f',
                              type='vector',
                              name=outpatch)

    grass.message(_("%s complete") % 'r.to.vect.tiled')

    return 0
コード例 #30
0
def main():
    map = options['map']
    layer = options['layer']
    columns = options['columns'].split(',')

    mapset = grass.gisenv()['MAPSET']

    # does map exist in CURRENT mapset?
    if not grass.find_file(map, element='vector', mapset=mapset)['file']:
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    f = grass.vector_layer_db(map, layer)

    table = f['table']
    keycol = f['key']
    database = f['database']
    driver = f['driver']

    if not table:
        grass.fatal(_("There is no table connected to the input vector map. "
                      "Unable to delete any column. Exiting."))

    if keycol in columns:
        grass.fatal(_("Unable to delete <%s> column as it is needed to keep table <%s> "
                      "connected to the input vector map <%s>") %
                    (keycol, table, map))

    for column in columns:
        if column not in grass.vector_columns(map, layer):
            grass.warning(_("Column <%s> not found in table <%s>. Skipped") % (column, table))
            continue

        if driver == "sqlite":
            # echo "Using special trick for SQLite"
            # http://www.sqlite.org/faq.html#q11
            colnames = []
            coltypes = []
            for f in grass.db_describe(table, database=database, driver=driver)['cols']:
                if f[0] == column:
                    continue
                colnames.append(f[0])
                coltypes.append("%s %s" % (f[0], f[1]))

            colnames = ", ".join(colnames)
            coltypes = ", ".join(coltypes)

            cmds = [
                "BEGIN TRANSACTION",
                "CREATE TEMPORARY TABLE ${table}_backup(${coldef})",
                "INSERT INTO ${table}_backup SELECT ${colnames} FROM ${table}",
                "DROP TABLE ${table}",
                "CREATE TABLE ${table}(${coldef})",
                "INSERT INTO ${table} SELECT ${colnames} FROM ${table}_backup",
                "CREATE UNIQUE INDEX ${table}_cat ON ${table} (${keycol} )",
                "DROP TABLE ${table}_backup",
                "COMMIT"
            ]
            tmpl = string.Template(';\n'.join(cmds))
            sql = tmpl.substitute(table=table, coldef=coltypes, colnames=colnames, keycol=keycol)
        else:
            sql = "ALTER TABLE %s DROP COLUMN %s" % (table, column)

        try:
            grass.write_command('db.execute', input='-', database=database, driver=driver,
                                stdin=sql)
        except CalledModuleError:
            grass.fatal(_("Deleting column failed"))

    # write cmd history:
    grass.vector_history(map)
コード例 #31
0
def main():
    map = options['map']
    layer = options['layer']
    column = options['column']

    mapset = grass.gisenv()['MAPSET']

    if not grass.find_file(map, element='vector', mapset=mapset):
        grass.fatal(_("Vector map <%s> not found in current mapset") % map)

    f = grass.vector_layer_db(map, layer)

    table = f['table']
    keycol = f['key']
    database = f['database']
    driver = f['driver']

    if not table:
        grass.fatal(
            _("There is no table connected to the input vector map. Cannot rename any column"
              ))

    cols = column.split(',')
    oldcol = cols[0]
    newcol = cols[1]

    if driver == "dbf":
        if len(newcol) > 10:
            grass.fatal(
                _("Column name <%s> too long. The DBF driver supports column names not longer than 10 characters"
                  ) % newcol)

    if oldcol == keycol:
        grass.fatal(
            _("Cannot rename column <%s> as it is needed to keep table <%s> connected to the input vector map"
              ) % (oldcol, table))

    # describe old col
    oldcoltype = None
    for f in grass.db_describe(table)['cols']:
        if f[0] != oldcol:
            continue
        oldcoltype = f[1]
        oldcollength = f[2]

    # old col there?
    if not oldcoltype:
        grass.fatal(_("Column <%s> not found in table <%s>") % (oldcol, table))

    # some tricks
    if driver in ['sqlite', 'dbf']:
        if oldcoltype.upper() == "CHARACTER":
            colspec = "%s varchar(%s)" % (newcol, oldcollength)
        else:
            colspec = "%s %s" % (newcol, oldcoltype)

        grass.run_command('v.db.addcolumn',
                          map=map,
                          layer=layer,
                          column=colspec)
        sql = "UPDATE %s SET %s=%s" % (table, newcol, oldcol)
        grass.write_command('db.execute',
                            input='-',
                            database=database,
                            driver=driver,
                            stdin=sql)
        grass.run_command('v.db.dropcolumn',
                          map=map,
                          layer=layer,
                          column=oldcol)
    elif driver == 'mysql':
        if oldcoltype.upper() == "CHARACTER":
            newcoltype = "varchar(%s)" % (oldcollength)
        else:
            newcoltype = oldcoltype

        sql = "ALTER TABLE %s CHANGE %s %s %s" % (table, oldcol, newcol,
                                                  newcoltype)
        grass.write_command('db.execute',
                            input='-',
                            database=database,
                            driver=driver,
                            stdin=sql)
    else:
        sql = "ALTER TABLE %s RENAME %s TO %s" % (table, oldcol, newcol)
        grass.write_command('db.execute',
                            input='-',
                            database=database,
                            driver=driver,
                            stdin=sql)

    # write cmd history:
    grass.vector_history(map)
コード例 #32
0
def main():
    from dateutil.parser import parse

    try:
        from pygbif import occurrences
        from pygbif import species
    except ImportError:
        grass.fatal(
            _("Cannot import pygbif (https://github.com/sckott/pygbif)"
              " library."
              " Please install it (pip install pygbif)"
              " or ensure that it is on path"
              " (use PYTHONPATH variable)."))

    # Parse input options
    output = options["output"]
    mask = options["mask"]
    species_maps = flags["i"]
    no_region_limit = flags["r"]
    no_topo = flags["b"]
    print_species = flags["p"]
    print_species_table = flags["t"]
    print_species_shell = flags["g"]
    print_occ_number = flags["o"]
    allow_no_geom = flags["n"]
    hasGeoIssue = flags["s"]
    taxa_list = options["taxa"].split(",")
    institutionCode = options["institutioncode"]
    basisofrecord = options["basisofrecord"]
    recordedby = options["recordedby"].split(",")
    date_from = options["date_from"]
    date_to = options["date_to"]
    country = options["country"]
    continent = options["continent"]
    rank = options["rank"]

    # Define static variable
    # Initialize cat
    cat = 0
    # Number of occurrences to fetch in one request
    chunk_size = 300
    # lat/lon proj string
    latlon_crs = [
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0.000,0.000,0.000",
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0,0,0,0,0,0,0",
        "+proj=longlat +no_defs +a=6378137 +rf=298.257223563 +towgs84=0.000,0.000,0.000 +type=crs",
    ]
    # List attributes available in Darwin Core
    # not all attributes are returned in each request
    # to avoid key errors when accessing the dictionary returned by pygbif
    # presence of DWC keys in the returned dictionary is checked using this list
    # The number of keys in this list has to be equal to the number of columns
    # in the attribute table and the attributes written for each occurrence
    dwc_keys = [
        "key",
        "taxonRank",
        "taxonKey",
        "taxonID",
        "scientificName",
        "species",
        "speciesKey",
        "genericName",
        "genus",
        "genusKey",
        "family",
        "familyKey",
        "order",
        "orderKey",
        "class",
        "classKey",
        "phylum",
        "phylumKey",
        "kingdom",
        "kingdomKey",
        "eventDate",
        "verbatimEventDate",
        "startDayOfYear",
        "endDayOfYear",
        "year",
        "month",
        "day",
        "occurrenceID",
        "occurrenceStatus",
        "occurrenceRemarks",
        "Habitat",
        "basisOfRecord",
        "preparations",
        "sex",
        "type",
        "locality",
        "verbatimLocality",
        "decimalLongitude",
        "decimalLatitude",
        "coordinateUncertaintyInMeters",
        "geodeticDatum",
        "higerGeography",
        "continent",
        "country",
        "countryCode",
        "stateProvince",
        "gbifID",
        "protocol",
        "identifier",
        "recordedBy",
        "identificationID",
        "identifiers",
        "dateIdentified",
        "modified",
        "institutionCode",
        "lastInterpreted",
        "lastParsed",
        "references",
        "relations",
        "catalogNumber",
        "occurrenceDetails",
        "datasetKey",
        "datasetName",
        "collectionCode",
        "rights",
        "rightsHolder",
        "license",
        "publishingOrgKey",
        "publishingCountry",
        "lastCrawled",
        "specificEpithet",
        "facts",
        "issues",
        "extensions",
        "language",
    ]
    # Deinfe columns for attribute table
    cols = [
        ("cat", "INTEGER PRIMARY KEY"),
        ("g_search", "varchar(100)"),
        ("g_key", "integer"),
        ("g_taxonrank", "varchar(50)"),
        ("g_taxonkey", "integer"),
        ("g_taxonid", "varchar(50)"),
        ("g_scientificname", "varchar(255)"),
        ("g_species", "varchar(255)"),
        ("g_specieskey", "integer"),
        ("g_genericname", "varchar(255)"),
        ("g_genus", "varchar(50)"),
        ("g_genuskey", "integer"),
        ("g_family", "varchar(50)"),
        ("g_familykey", "integer"),
        ("g_order", "varchar(50)"),
        ("g_orderkey", "integer"),
        ("g_class", "varchar(50)"),
        ("g_classkey", "integer"),
        ("g_phylum", "varchar(50)"),
        ("g_phylumkey", "integer"),
        ("g_kingdom", "varchar(50)"),
        ("g_kingdomkey", "integer"),
        ("g_eventdate", "text"),
        ("g_verbatimeventdate", "varchar(50)"),
        ("g_startDayOfYear", "integer"),
        ("g_endDayOfYear", "integer"),
        ("g_year", "integer"),
        ("g_month", "integer"),
        ("g_day", "integer"),
        ("g_occurrenceid", "varchar(255)"),
        ("g_occurrenceStatus", "varchar(50)"),
        ("g_occurrenceRemarks", "varchar(50)"),
        ("g_Habitat", "varchar(50)"),
        ("g_basisofrecord", "varchar(50)"),
        ("g_preparations", "varchar(50)"),
        ("g_sex", "varchar(50)"),
        ("g_type", "varchar(50)"),
        ("g_locality", "varchar(255)"),
        ("g_verbatimlocality", "varchar(255)"),
        ("g_decimallongitude", "double precision"),
        ("g_decimallatitude", "double precision"),
        ("g_coordinateUncertaintyInMeters", "double precision"),
        ("g_geodeticdatum", "varchar(50)"),
        ("g_higerGeography", "varchar(255)"),
        ("g_continent", "varchar(50)"),
        ("g_country", "varchar(50)"),
        ("g_countryCode", "varchar(50)"),
        ("g_stateProvince", "varchar(50)"),
        ("g_gbifid", "varchar(255)"),
        ("g_protocol", "varchar(255)"),
        ("g_identifier", "varchar(50)"),
        ("g_recordedby", "varchar(255)"),
        ("g_identificationid", "varchar(255)"),
        ("g_identifiers", "text"),
        ("g_dateidentified", "text"),
        ("g_modified", "text"),
        ("g_institutioncode", "varchar(50)"),
        ("g_lastinterpreted", "text"),
        ("g_lastparsed", "text"),
        ("g_references", "varchar(255)"),
        ("g_relations", "text"),
        ("g_catalognumber", "varchar(50)"),
        ("g_occurrencedetails", "text"),
        ("g_datasetkey", "varchar(50)"),
        ("g_datasetname", "varchar(255)"),
        ("g_collectioncode", "varchar(50)"),
        ("g_rights", "varchar(255)"),
        ("g_rightsholder", "varchar(255)"),
        ("g_license", "varchar(50)"),
        ("g_publishingorgkey", "varchar(50)"),
        ("g_publishingcountry", "varchar(50)"),
        ("g_lastcrawled", "text"),
        ("g_specificepithet", "varchar(50)"),
        ("g_facts", "text"),
        ("g_issues", "text"),
        ("g_extensions", "text"),
        ("g_language", "varchar(50)"),
    ]

    # maybe no longer required in Python3
    set_output_encoding()
    # Set temporal filter if requested by user
    # Initialize eventDate filter
    eventDate = None
    # Check if date from is compatible (ISO compliant)
    if date_from:
        try:
            parse(date_from)
        except:
            grass.fatal("Invalid invalid start date provided")

        if date_from and not date_to:
            eventDate = "{}".format(date_from)
    # Check if date to is compatible (ISO compliant)
    if date_to:
        try:
            parse(date_to)
        except:
            grass.fatal("Invalid invalid end date provided")
        # Check if date to is after date_from
        if parse(date_from) < parse(date_to):
            eventDate = "{},{}".format(date_from, date_to)
        else:
            grass.fatal(
                "Invalid date range: End date has to be after start date!")
    # Set filter on basisOfRecord if requested by user
    if basisofrecord == "ALL":
        basisOfRecord = None
    else:
        basisOfRecord = basisofrecord
    # Allow also occurrences with spatial issues if requested by user
    hasGeospatialIssue = False
    if hasGeoIssue:
        hasGeospatialIssue = True
    # Allow also occurrences without coordinates if requested by user
    hasCoordinate = True
    if allow_no_geom:
        hasCoordinate = False

    # Set reprojection parameters
    # Set target projection of current LOCATION
    proj_info = grass.parse_command("g.proj", flags="g")
    target_crs = grass.read_command("g.proj", flags="fj").rstrip()
    target = osr.SpatialReference()

    # Prefer EPSG CRS definitions
    if proj_info["epsg"]:
        target.ImportFromEPSG(int(proj_info["epsg"]))
    else:
        target.ImportFromProj4(target_crs)

    # GDAL >= 3 swaps x and y axis, see: github.com/gdal/issues/1546
    if int(gdal_version[0]) >= 3:
        target.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    if target_crs == "XY location (unprojected)":
        grass.fatal("Sorry, XY locations are not supported!")

    # Set source projection from GBIF
    source = osr.SpatialReference()
    source.ImportFromEPSG(4326)
    # GDAL >= 3 swaps x and y axis, see: github.com/gdal/issues/1546
    if int(gdal_version[0]) >= 3:
        source.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

    if target_crs not in latlon_crs:
        transform = osr.CoordinateTransformation(source, target)
        reverse_transform = osr.CoordinateTransformation(target, source)

    # Generate WKT polygon to use for spatial filtering if requested
    if mask:
        if len(mask.split("@")) == 2:
            m = VectorTopo(mask.split("@")[0], mapset=mask.split("@")[1])
        else:
            m = VectorTopo(mask)
        if not m.exist():
            grass.fatal("Could not find vector map <{}>".format(mask))
        m.open("r")
        if not m.is_open():
            grass.fatal("Could not open vector map <{}>".format(mask))

        # Use map Bbox as spatial filter if map contains <> 1 area
        if m.number_of("areas") == 1:
            region_pol = [area.to_wkt() for area in m.viter("areas")][0]
        else:
            bbox = (str(m.bbox()).replace("Bbox(", "").replace(
                " ", "").rstrip(")").split(","))
            region_pol = "POLYGON (({0} {1}, {0} {3}, {2} {3}, {2} {1}, {0} {1}))".format(
                bbox[2], bbox[0], bbox[3], bbox[1])
        m.close()
    else:
        # Do not limit import spatially if LOCATION is able to take global data
        if no_region_limit:
            if target_crs not in latlon_crs:
                grass.fatal("Import of data from outside the current region is"
                            "only supported in a WGS84 location!")
            region_pol = None
        else:
            # Limit import spatially to current region
            # if LOCATION is !NOT! able to take global data
            # to avoid pprojection ERRORS
            region = grass.parse_command("g.region", flags="g")
            region_pol = "POLYGON (({0} {1},{0} {3},{2} {3},{2} {1},{0} {1}))".format(
                region["e"], region["n"], region["w"], region["s"])

    # Do not reproject in latlon LOCATIONS
    if target_crs not in latlon_crs:
        pol = ogr.CreateGeometryFromWkt(region_pol)
        pol.Transform(reverse_transform)
        pol = pol.ExportToWkt()
    else:
        pol = region_pol

    # Create output map if not output maps for each species are requested
    if (not species_maps and not print_species and not print_species_shell
            and not print_occ_number and not print_species_table):
        mapname = output
        new = Vector(mapname)
        new.open("w", tab_name=mapname, tab_cols=cols)
        cat = 1

    # Import data for each species
    for s in taxa_list:
        # Get the taxon key if not the taxon key is provided as input
        try:
            key = int(s)
        except:
            try:
                species_match = species.name_backbone(s,
                                                      rank=rank,
                                                      strict=False,
                                                      verbose=True)
                key = species_match["usageKey"]
            except:
                grass.error(
                    "Data request for taxon {} failed. Are you online?".format(
                        s))
                continue

        # Return matching taxon and alternatives and exit
        if print_species:
            print("Matching taxon for {} is:".format(s))
            print("{} {}".format(species_match["scientificName"],
                                 species_match["status"]))
            if "alternatives" in list(species_match.keys()):
                print("Alternative matches might be: {}".format(s))
                for m in species_match["alternatives"]:
                    print("{} {}".format(m["scientificName"], m["status"]))
            else:
                print("No alternatives found for the given taxon")
            continue
        if print_species_shell:
            print("match={}".format(species_match["scientificName"]))
            if "alternatives" in list(species_match.keys()):
                alternatives = []
                for m in species_match["alternatives"]:
                    alternatives.append(m["scientificName"])
                print("alternatives={}".format(",".join(alternatives)))
            continue
        if print_species_table:
            if "alternatives" in list(species_match.keys()):
                if len(species_match["alternatives"]) == 0:
                    print("{0}|{1}|{2}|".format(
                        s, key, species_match["scientificName"]))
                else:
                    alternatives = []
                    for m in species_match["alternatives"]:
                        alternatives.append(m["scientificName"])
                    print("{0}|{1}|{2}|{3}".format(
                        s,
                        key,
                        species_match["scientificName"],
                        ",".join(alternatives),
                    ))
            continue
        try:
            returns_n = occurrences.search(
                taxonKey=key,
                hasGeospatialIssue=hasGeospatialIssue,
                hasCoordinate=hasCoordinate,
                institutionCode=institutionCode,
                basisOfRecord=basisOfRecord,
                recordedBy=recordedby,
                eventDate=eventDate,
                continent=continent,
                country=country,
                geometry=pol,
                limit=1,
            )["count"]
        except:
            grass.error(
                "Data request for taxon {} faild. Are you online?".format(s))
            returns_n = 0

        # Exit if search does not give a return
        # Print only number of returns for the given search and exit
        if print_occ_number:
            print("Found {0} occurrences for taxon {1}...".format(
                returns_n, s))
            continue
        elif returns_n <= 0:
            grass.warning(
                "No occurrences for current search for taxon {0}...".format(s))
            continue
        elif returns_n >= 200000:
            grass.warning(
                "Your search for {1} returns {0} records.\n"
                "Unfortunately, the GBIF search API is limited to 200,000 records per request.\n"
                "The download will be incomplete. Please consider to split up your search."
                .format(returns_n, s))

        # Get the number of chunks to download
        chunks = int(math.ceil(returns_n / float(chunk_size)))
        grass.verbose("Downloading {0} occurrences for taxon {1}...".format(
            returns_n, s))

        # Create a map for each species if requested using map name as suffix
        if species_maps:
            mapname = "{}_{}".format(s.replace(" ", "_"), output)

            new = Vector(mapname)
            new.open("w", tab_name=mapname, tab_cols=cols)
            cat = 0

        # Download the data from GBIF
        for c in range(chunks):
            # Define offset
            offset = c * chunk_size
            # Adjust chunk_size to the hard limit of 200,000 records in GBIF API
            # if necessary
            if offset + chunk_size >= 200000:
                chunk_size = 200000 - offset
            # Get the returns for the next chunk
            returns = occurrences.search(
                taxonKey=key,
                hasGeospatialIssue=hasGeospatialIssue,
                hasCoordinate=hasCoordinate,
                institutionCode=institutionCode,
                basisOfRecord=basisOfRecord,
                recordedBy=recordedby,
                eventDate=eventDate,
                continent=continent,
                country=country,
                geometry=pol,
                limit=chunk_size,
                offset=offset,
            )

            # Write the returned data to map and attribute table
            for res in returns["results"]:
                if target_crs not in latlon_crs:
                    point = ogr.CreateGeometryFromWkt("POINT ({} {})".format(
                        res["decimalLongitude"], res["decimalLatitude"]))
                    point.Transform(transform)
                    x = point.GetX()
                    y = point.GetY()
                else:
                    x = res["decimalLongitude"]
                    y = res["decimalLatitude"]

                point = Point(x, y)

                for k in dwc_keys:
                    if k not in list(res.keys()):
                        res.update({k: None})

                cat = cat + 1
                new.write(
                    point,
                    cat=cat,
                    attrs=(
                        "{}".format(s),
                        res["key"],
                        res["taxonRank"],
                        res["taxonKey"],
                        res["taxonID"],
                        res["scientificName"],
                        res["species"],
                        res["speciesKey"],
                        res["genericName"],
                        res["genus"],
                        res["genusKey"],
                        res["family"],
                        res["familyKey"],
                        res["order"],
                        res["orderKey"],
                        res["class"],
                        res["classKey"],
                        res["phylum"],
                        res["phylumKey"],
                        res["kingdom"],
                        res["kingdomKey"],
                        "{}".format(res["eventDate"])
                        if res["eventDate"] else None,
                        "{}".format(res["verbatimEventDate"])
                        if res["verbatimEventDate"] else None,
                        res["startDayOfYear"],
                        res["endDayOfYear"],
                        res["year"],
                        res["month"],
                        res["day"],
                        res["occurrenceID"],
                        res["occurrenceStatus"],
                        res["occurrenceRemarks"],
                        res["Habitat"],
                        res["basisOfRecord"],
                        res["preparations"],
                        res["sex"],
                        res["type"],
                        res["locality"],
                        res["verbatimLocality"],
                        res["decimalLongitude"],
                        res["decimalLatitude"],
                        res["coordinateUncertaintyInMeters"],
                        res["geodeticDatum"],
                        res["higerGeography"],
                        res["continent"],
                        res["country"],
                        res["countryCode"],
                        res["stateProvince"],
                        res["gbifID"],
                        res["protocol"],
                        res["identifier"],
                        res["recordedBy"],
                        res["identificationID"],
                        ",".join(res["identifiers"]),
                        "{}".format(res["dateIdentified"])
                        if res["dateIdentified"] else None,
                        "{}".format(res["modified"])
                        if res["modified"] else None,
                        res["institutionCode"],
                        "{}".format(res["lastInterpreted"])
                        if res["lastInterpreted"] else None,
                        "{}".format(res["lastParsed"])
                        if res["lastParsed"] else None,
                        res["references"],
                        ",".join(res["relations"]),
                        res["catalogNumber"],
                        "{}".format(res["occurrenceDetails"])
                        if res["occurrenceDetails"] else None,
                        res["datasetKey"],
                        res["datasetName"],
                        res["collectionCode"],
                        res["rights"],
                        res["rightsHolder"],
                        res["license"],
                        res["publishingOrgKey"],
                        res["publishingCountry"],
                        "{}".format(res["lastCrawled"])
                        if res["lastCrawled"] else None,
                        res["specificEpithet"],
                        ",".join(res["facts"]),
                        ",".join(res["issues"]),
                        ",".join(res["extensions"]),
                        res["language"],
                    ),
                )

                cat = cat + 1

        # Close the current map if a map for each species is requested
        if species_maps:
            new.table.conn.commit()
            new.close()
            if not no_topo:
                grass.run_command("v.build", map=mapname, option="build")

            # Write history to map
            grass.vector_history(mapname)

    # Close the output map if not a map for each species is requested
    if (not species_maps and not print_species and not print_species_shell
            and not print_occ_number and not print_species_table):
        new.table.conn.commit()
        new.close()
        if not no_topo:
            grass.run_command("v.build", map=mapname, option="build")

        # Write history to map
        grass.vector_history(mapname)
コード例 #33
0
def main():
    global output, tmp

    input = options['input']
    output = options['output']
    layer = options['layer']
    column = options['column']

    # setup temporary file
    tmp = str(os.getpid())

    # does map exist?
    if not grass.find_file(input, element='vector')['file']:
        grass.fatal(_("Vector map <%s> not found") % input)

    if not column:
        grass.warning(
            _("No '%s' option specified. Dissolving based on category values from layer <%s>."
              ) % ("column", layer))
        grass.run_command('v.extract',
                          flags='d',
                          input=input,
                          output=output,
                          type='area',
                          layer=layer)
    else:
        if int(layer) == -1:
            grass.warning(
                _("Invalid layer number (%d). "
                  "Parameter '%s' specified, assuming layer '1'.") %
                (int(layer), 'column'))
            layer = '1'
        try:
            coltype = grass.vector_columns(input, layer)[column]
        except KeyError:
            grass.fatal(_('Column <%s> not found') % column)

        if coltype['type'] not in ('INTEGER', 'SMALLINT', 'CHARACTER', 'TEXT'):
            grass.fatal(_("Key column must be of type integer or string"))

        f = grass.vector_layer_db(input, layer)

        table = f['table']

        tmpfile = '%s_%s' % (output, tmp)

        try:
            grass.run_command('v.reclass',
                              input=input,
                              output=tmpfile,
                              layer=layer,
                              column=column)
            grass.run_command('v.extract',
                              flags='d',
                              input=tmpfile,
                              output=output,
                              type='area',
                              layer=layer)
        except CalledModuleError as e:
            grass.fatal(
                _("Final extraction steps failed."
                  " Check above error messages and"
                  " see following details:\n%s") % e)

    # write cmd history:
    grass.vector_history(output)
コード例 #34
0
def main():
    infile = options['input']
    outfile = options['output']

    # are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)
    if kv['+proj'] != 'longlat':
        grass.fatal(_("This module only operates in LatLong/WGS84 locations"))

    # input test
    if not os.access(infile, os.R_OK):
        grass.fatal(_("File <%s> not found") % infile)

    # DBF doesn't support lengthy text fields
    kv = grass.db_connection()
    dbfdriver = kv['driver'] == 'dbf'
    if dbfdriver:
        grass.warning(
            _("Since DBF driver is used, the content of the 'alternatenames' column might be cut with respect to the original Geonames.org column content"))

    with open(infile, encoding='utf-8') as f:
        num_places = sum(1 for each in f)
    grass.message(_("Converting %d place names...") % num_places)

    # pump data into GRASS:
    #  http://download.geonames.org/export/dump/readme.txt
    #  The main 'geoname' table has the following fields :
    #  ---------------------------------------------------
    #  geonameid         : integer id of record in geonames database
    #  name              : name of geographical point (utf8) varchar(200)
    #  asciiname         : name of geographical point in plain ascii characters, varchar(200)
    #  alternatenames    : alternatenames, comma separated varchar(4000)
    #  latitude          : latitude in decimal degrees (wgs84)
    #  longitude         : longitude in decimal degrees (wgs84)
    #  feature class     : see http://www.geonames.org/export/codes.html, char(1)
    #  feature code      : see http://www.geonames.org/export/codes.html, varchar(10)
    #  country code      : ISO-3166 2-letter country code, 2 characters
    #  cc2               : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters
    #  admin1 code       : fipscode (subject to change to iso code), isocode for the us and ch, see file admin1Codes.txt for display names of this code; varchar(20)
    #  admin2 code       : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80)
    #  admin3 code       : code for third level administrative division, varchar(20)
    #  admin4 code       : code for fourth level administrative division, varchar(20)
    #  population        : integer
    #  elevation         : in meters, integer
    #  gtopo30           : average elevation of 30'x30' (ca 900mx900m) area in meters, integer
    #  timezone          : the timezone id (see file http://download.geonames.org/export/dump/timeZones.txt)
    #  modification date : date of last modification in yyyy-MM-dd format

    # geonameid|name|asciiname|alternatenames|latitude|longitude|featureclass|featurecode|countrycode|cc2|admin1code|admin2code|admin3code|admin4code|population|elevation|gtopo30|timezone|modificationdate

    # debug:
    # head -n 3 ${TMPFILE}.csv

    # use different column names limited to 10 chars for dbf
    if dbfdriver:
        columns = ['geonameid integer',
                   'name varchar(200)',
                   'asciiname varchar(200)',
                   'altname varchar(4000)',
                   'latitude double precision',
                   'longitude double precision',
                   'featrclass varchar(1)',
                   'featrcode varchar(10)',
                   'cntrycode varchar(2)',
                   'cc2 varchar(60)',
                   'admin1code varchar(20)',
                   'admin2code varchar(20)',
                   'admin3code varchar(20)',
                   'admin4code varchar(20)',
                   'population integer',
                   'elevation integer',
                   'gtopo30 integer',
                   'timezone varchar(50)',
                   'mod_date date']
    else:
        columns = ['geonameid integer',
                   'name varchar(200)',
                   'asciiname varchar(200)',
                   'alternatename varchar(4000)',
                   'latitude double precision',
                   'longitude double precision',
                   'featureclass varchar(1)',
                   'featurecode varchar(10)',
                   'countrycode varchar(2)',
                   'cc2 varchar(60)',
                   'admin1code varchar(20)',
                   'admin2code varchar(20)',
                   'admin3code varchar(20)',
                   'admin4code varchar(20)',
                   'population integer',
                   'elevation integer',
                   'gtopo30 integer',
                   'timezone varchar(50)',
                   'modification date']

    grass.run_command('v.in.ascii', cat=0, x=6, y=5, sep='tab',
                      input=infile, output=outfile,
                      columns=columns)

    # write cmd history:
    grass.vector_history(outfile)
コード例 #35
0
def main():
    infile  = options['input']
    outfile = options['output']
    

    #### setup temporary file
    tmpfile = grass.tempfile()

    #are we in LatLong location?
    s = grass.read_command("g.proj", flags='j')
    kv = grass.parse_key_val(s)
    if kv['+proj'] != 'longlat':
	grass.fatal(_("This module only operates in LatLong/WGS84 locations"))

    # input test
    if not os.access(infile, os.R_OK):
	grass.fatal(_("File <%s> not found") % infile)

    # DBF doesn't support lengthy text fields
    kv = grass.db_connection()
    dbfdriver = kv['driver'] == 'dbf'
    if dbfdriver:
	grass.warning(_("Since DBF driver is used, the content of the 'alternatenames' column might be cut with respect to the original Geonames.org column content"))

    #let's go
    #change TAB to vertical bar
    num_places = 0
    inf = file(infile)
    outf = file(tmpfile, 'wb')
    for line in inf:
	fields = line.rstrip('\r\n').split('\t')
	line2 = '|'.join(fields) + '\n'
	outf.write(line2)
	num_places += 1
    outf.close()
    inf.close()

    grass.message(_("Converted %d place names.") % num_places)

    # pump data into GRASS:
    #  http://download.geonames.org/export/dump/readme.txt
    #  The main 'geoname' table has the following fields :
    #  ---------------------------------------------------
    #  geonameid         : integer id of record in geonames database
    #  name              : name of geographical point (utf8) varchar(200)
    #  asciiname         : name of geographical point in plain ascii characters, varchar(200)
    #  alternatenames    : alternatenames, comma separated varchar(4000)
    #  latitude          : latitude in decimal degrees (wgs84)
    #  longitude         : longitude in decimal degrees (wgs84)
    #  feature class     : see http://www.geonames.org/export/codes.html, char(1)
    #  feature code      : see http://www.geonames.org/export/codes.html, varchar(10)
    #  country code      : ISO-3166 2-letter country code, 2 characters
    #  cc2               : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters
    #  admin1 code       : fipscode (subject to change to iso code), isocode for the us and ch, see file admin1Codes.txt for display names of this code; varchar(20)
    #  admin2 code       : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80)
    #  admin3 code       : code for third level administrative division, varchar(20)
    #  admin4 code       : code for fourth level administrative division, varchar(20)
    #  population        : integer
    #  elevation         : in meters, integer
    #  gtopo30           : average elevation of 30'x30' (ca 900mx900m) area in meters, integer
    #  timezone          : the timezone id (see file http://download.geonames.org/export/dump/timeZones.txt)
    #  modification date : date of last modification in yyyy-MM-dd format

    # geonameid|name|asciiname|alternatenames|latitude|longitude|featureclass|featurecode|countrycode|cc2|admin1code|admin2code|admin3code|admin4code|population|elevation|gtopo30|timezone|modificationdate

    # TODO: elevation seems to contain spurious char stuff :(

    # debug:
    # head -n 3 ${TMPFILE}.csv

    # use different column names limited to 10 chars for dbf
    if dbfdriver:
	columns = ['geonameid integer',
		   'name varchar(200)',
		   'asciiname varchar(200)',
		   'altname varchar(4000)',
		   'latitude double precision',
		   'longitude double precision',
		   'featrclass varchar(1)',
		   'featrcode varchar(10)',
		   'cntrycode varchar(2)',
		   'cc2 varchar(60)',
		   'admin1code varchar(20)',
		   'admin2code varchar(20)',
		   'admin3code varchar(20)',
		   'admin4code varchar(20)',
		   'population integer',
		   'elevation varchar(5)',
		   'gtopo30 integer',
		   'timezone varchar(50)',
		   'mod_date date']
    else:
	columns = ['geonameid integer',
		   'name varchar(200)',
		   'asciiname varchar(200)',
		   'alternatename varchar(4000)',
		   'latitude double precision',
		   'longitude double precision',
		   'featureclass varchar(1)',
		   'featurecode varchar(10)',
		   'countrycode varchar(2)',
		   'cc2 varchar(60)',
		   'admin1code varchar(20)',
		   'admin2code varchar(20)',
		   'admin3code varchar(20)',
		   'admin4code varchar(20)',
		   'population integer',
		   'elevation varchar(5)',
		   'gtopo30 integer',
		   'timezone varchar(50)',
		   'modification date']

    grass.run_command('v.in.ascii', cat = 0, x = 6, y = 5, fs = '|',
		      input = tmpfile, output = outfile,
		      columns = columns)

    grass.try_remove(tmpfile)

    # write cmd history:
    grass.vector_history(outfile)