Exemplo n.º 1
0
def main():
    infile = options['input']
    compression_off = flags['c']

    global basedir
    basedir = grass.tempdir()

    # check if vector map exists
    gfile = grass.find_file(infile, element='vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)

    # check if input vector map is in the native format
    if vector.vector_info(gfile['fullname'])['format'] != 'native':
        grass.fatal(
            _("Unable to pack vector map <%s>. Only native format supported.")
            % gfile['fullname'])

    # split the name if there is the mapset name
    if infile.find('@'):
        infile = infile.split('@')[0]

    # output name
    if options['output']:
        outfile = options['output']
    else:
        outfile = infile + '.pack'

    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") %
                outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))

    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile['fullname']))

    # write tar file, optional compression
    if compression_off:
        tar = tarfile.open(name=outfile, mode='w:')
    else:
        tar = tarfile.open(name=outfile, mode='w:gz')
    tar.add(gfile['file'], infile)

    # check if exist a db connection for the vector
    db_vect = vector.vector_db(gfile['fullname'])
    if not db_vect:
        grass.verbose(
            _('There is not database connected with vector map <%s>') %
            gfile['fullname'])
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, 'db.sqlite')
        for i, dbconn in db_vect.items():
            grass.run_command('db.copy',
                              from_driver=dbconn['driver'],
                              from_database=dbconn['database'],
                              from_table=dbconn['table'],
                              to_driver='sqlite',
                              to_database=sqlitedb,
                              to_table=dbconn['table'])
        tar.add(sqlitedb, 'db.sqlite')

    # add to the tar file the PROJ files to check when unpack file
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS', 'EPSG']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            tar.add(path, 'PROJ_' + support)
    tar.close()

    grass.message(
        _("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
Exemplo n.º 2
0
def main():
    infile = options["input"]
    compression_off = flags["c"]

    global basedir
    basedir = grass.tempdir()

    # check if vector map exists
    gfile = grass.find_file(infile, element="vector")
    if not gfile["name"]:
        grass.fatal(_("Vector map <%s> not found") % infile)

    # check if input vector map is in the native format
    if vector.vector_info(gfile["fullname"])["format"] != "native":
        grass.fatal(
            _("Unable to pack vector map <%s>. Only native format supported.")
            % gfile["fullname"]
        )

    # split the name if there is the mapset name
    if infile.find("@"):
        infile = infile.split("@")[0]

    # output name
    if options["output"]:
        outfile = options["output"]
    else:
        outfile = infile + ".pack"

    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv("GRASS_OVERWRITE"):
            grass.warning(
                _("Pack file <%s> already exists and will be overwritten") % outfile
            )
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))

    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile["fullname"]))

    # write tar file, optional compression
    if compression_off:
        tar = tarfile.open(name=outfile, mode="w:")
    else:
        tar = tarfile.open(name=outfile, mode="w:gz")
    tar.add(gfile["file"], infile)

    # check if exist a db connection for the vector
    db_vect = vector.vector_db(gfile["fullname"])
    if not db_vect:
        grass.verbose(
            _("There is not database connected with vector map <%s>")
            % gfile["fullname"]
        )
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, "db.sqlite")
        for i, dbconn in db_vect.items():
            grass.run_command(
                "db.copy",
                from_driver=dbconn["driver"],
                from_database=dbconn["database"],
                from_table=dbconn["table"],
                to_driver="sqlite",
                to_database=sqlitedb,
                to_table=dbconn["table"],
            )
        tar.add(sqlitedb, "db.sqlite")

    # add to the tar file the PROJ files to check when unpack file
    gisenv = grass.gisenv()
    for support in ["INFO", "UNITS", "EPSG"]:
        path = os.path.join(
            gisenv["GISDBASE"], gisenv["LOCATION_NAME"], "PERMANENT", "PROJ_" + support
        )
        if os.path.exists(path):
            tar.add(path, "PROJ_" + support)
    tar.close()

    grass.message(_("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))
Exemplo n.º 3
0
def main():
    """Do the real work
    """
    #Parse remaining variables
    network_map = options['input']
    # network_mapset = network_map.split('@')[0]
    network = network_map.split('@')[1] if len(
        network_map.split('@')) > 1 else None
    suffix = options['suffix']
    layer = options['layer']
    corridor_tolerance = options['corridor_tolerance']
    cores = options['cores']
    where = None if options['where'] == '' else options['where']
    weights = options['weights'].split(',')
    s_flag = flags['s']
    d_flag = flags['d']
    r_flag = flags['r']

    ulimit = resource.getrlimit(resource.RLIMIT_NOFILE)

    net_hist_str = grass.read_command('v.info', map=network_map,
                                      flags='h').split('\n')[0].split(': ')[1]

    dist_cmd_dict = task.cmdstring_to_tuple(net_hist_str)

    dist_prefix = dist_cmd_dict[1]['prefix']
    #network_prefix = dist_cmd_dict[1]['prefix']

    #print(where)

    # in_vertices = dist_cmd_dict[1]['input']

    #Check if db-connection for edge map exists
    con = vect.vector_db(network_map)[int(layer)]
    if not con:
        grass.fatal("Database connection for map {} \
                    is not defined for layer {}.".format(network, layer))

    #Check if required columns exist and are of required type
    required_columns = ['con_id_u', 'from_p', 'to_p', 'cd_u']
    if weights:
        required_columns += weights

    in_columns = vect.vector_columns(network_map, layer=layer)

    missing_columns = np.setdiff1d(required_columns, in_columns.keys())

    if missing_columns:
        grass.fatal("Cannot find the following reqired/requested \
                    column(s) {} in vector map \
                    {}.".format(', '.join(missing_columns), network))

    #
    weight_types = []
    # Check properly if column is numeric
    for col in required_columns:
        if in_columns[col]['type'] not in [
                'INTEGER', 'DOUBLE PRECISION', 'REAL'
        ]:
            grass.fatal("Column {} is of type {}. \
                         Only numeric types (integer, \
                         real or double precision) \
                         allowed!".format(col, in_columns[col]['type']))

        if col in weights:
            weight_types.append(in_columns[col]['type'])

    # Extract necessary informartion on edges from attribute table of
    # edge map
    table_io = StringIO(
        unicode(
            grass.read_command('v.db.select',
                               flags='c',
                               map=network_map,
                               columns=required_columns,
                               separator=',',
                               where=where)))

    try:
        table_extract = np.genfromtxt(table_io,
                                      delimiter=',',
                                      dtype=None,
                                      names=required_columns)
    except:
        grass.fatal('No edges selected to compute corridors for...')

    # Output result of where-clause and exit (if requested)
    if s_flag:
        print(table_extract)
        #grass.message("con_id_u|from_p|to_p")
        #for fid in $selected_edges_ud:
        #    message_text = $(echo $table_extract | tr ' ' '\n' |
        # tr ',' ' ' | awk -v FID=$fid '{if($1==FID) print $1 "|" $2 "|"
        #  $3}' | head -n 1)
        #    grass.message(message_text)
        sys.exit(0)

    #Get unique identifiers for the selected undirected edges
    selected_patches = np.unique(
        np.append(table_extract['from_p'], table_extract['to_p']))

    selected_edges = np.unique(table_extract['con_id_u'])

    # activate z-flag if more maps have to be aggregated than ulimit
    z_flag = None if len(selected_edges) < ulimit else 'z'

    #Check if cost distance raster maps exist
    pattern = "{}_patch_*_cost_dist".format(dist_prefix)
    patchmaps = grass.read_command('g.list', pattern=pattern,
                                   type='raster').rstrip('\n').split('\n')

    for patch in selected_patches:
        #Check if cost distance raster maps exist
        patchmap = "{}_patch_{}_cost_dist".format(dist_prefix, patch)
        if not patchmap in patchmaps:
            grass.fatal("Cannot find raster map {}.".format(patchmap))

    #Create mapcalculator expressions for cost distance corridors,
    # assigning distance values
    corridormaps = {}
    if d_flag:
        pattern = "{}_corridor_*_cost_dist".format(dist_prefix)
        corridor_base = 'dist'
    else:
        pattern = "{}_corridor_[0-9]+$".format(dist_prefix)
        corridor_base = 'id'

    corridormaps[corridor_base] = grass.read_command(
        'g.list', flags='e', pattern=pattern,
        type='raster').rstrip('\n').split('\n')
    for weight in weights:
        pattern = "{}_corridor_[0-9]+_{}".format(dist_prefix, weight)
        corridormaps[weight] = grass.read_command(
            'g.list', flags='e', pattern=pattern,
            type='raster').rstrip('\n').split('\n')

    # Setup GRASS modules for raster processing
    mapcalc = Module("r.mapcalc", quiet=True, run_=False)
    reclass = Module("r.reclass", rules='-', quiet=True, run_=False)
    recode = Module("r.recode", rules='-', quiet=True, run_=False)

    # Setup paralel module queue if parallel processing is requested
    #print(weight_types)
    if cores > 1:
        mapcalc_queue = ParallelModuleQueue(nprocs=cores)

        if 'INTEGER' in weight_types:
            reclass_queue = ParallelModuleQueue(nprocs=cores)

        if 'REAL' in weight_types or 'DOUBLE PRECISION' in weight_types:
            recode_queue = ParallelModuleQueue(nprocs=cores)

    corridor_list = []
    for edge_id in selected_edges:
        edge = table_extract[table_extract['con_id_u'] == edge_id][0]
        #print(e.dtype.names)
        if d_flag:
            corridor = "{}_corridor_{}_cost_dist".format(dist_prefix, edge_id)
            #corridor_list.append(corridor)
            mc_expression = "{prefix}_corridor_{CON_ID}_cost_dist=if( \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist) - \
            (({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist) * \
            {cor_tolerance}/100.0)<= \
            ({prefix}_patch_{FROM_P}_cost_dist + \
            {prefix}_patch_{TO_P}_cost_dist), \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist), \
            null())".format(prefix=dist_prefix,
                            CON_ID=edge['con_id_u'],
                            FROM_P=edge['from_p'],
                            TO_P=edge['to_p'],
                            cor_tolerance=corridor_tolerance)
        else:
            corridor = "{}_corridor_{}".format(dist_prefix, edge['con_id_u'])
            #corridor_list.append(corridor)
            # Create mapcalculator expressions for cost distance
            # corridors, assigning connection IDs for reclassification
            mc_expression = "{prefix}_corridor_{CON_ID}=if( \
            ({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist)- \
            (({prefix}_patch_{FROM_P}_cost_dist+ \
            {prefix}_patch_{TO_P}_cost_dist)* \
            {cor_tolerance}/100.0)<={CD}, \
            {CON_ID}, null())".format(prefix=dist_prefix,
                                      CON_ID=edge['con_id_u'],
                                      FROM_P=edge['from_p'],
                                      TO_P=edge['to_p'],
                                      CD=edge['cd_u'],
                                      cor_tolerance=corridor_tolerance)

        corridor_list.append(corridor)
        #print(corridor)
        #print(corridormaps)

        if r_flag or corridor not in corridormaps[corridor_base]:
            new_mapcalc = copy.deepcopy(mapcalc)

            if cores > 1:
                calc = new_mapcalc(expression=mc_expression)
                mapcalc_queue.put(calc)
            else:
                calc = new_mapcalc(expression=mc_expression,
                                   region='intersect')
                calc.run()

        for weight in weights:
            if r_flag or corridor not in corridormaps[weight]:
                in_map = corridor
                out_map = '{}_{}'.format(in_map, weight)
                if in_columns[weight]['type'] == 'INTEGER':
                    new_reclass = copy.deepcopy(reclass)
                    reclass_rule = "{} = {}".format(edge['con_id_u'],
                                                    edge[weight])
                    rcl = new_reclass(input=in_map,
                                      output=out_map,
                                      stdin_=reclass_rule)

                    if cores > 1:
                        reclass_queue.put(rcl)
                    else:
                        rcl.run()

                if in_columns[weight]['type'] in ['REAL', 'DOUBLE PRECISION']:
                    new_recode = copy.deepcopy(recode)
                    recode_rule = "{0}:{0}:{1}:{1}".format(
                        edge['con_id_u'], edge[weight])
                    rco = new_recode(input=in_map,
                                     output=out_map,
                                     stdin_=recode_rule)
                    if cores > 1:
                        recode_queue.put(rco)
                    else:
                        rco.run()

    if cores > 1:
        mapcalc_queue.wait()
        if 'INTEGER' in weight_types:
            reclass_queue.wait()
        if 'REAL' in weight_types or 'DOUBLE PRECISION' in weight_types:
            recode_queue.wait()

    grass.verbose('Aggregating corridor maps...')

    if d_flag:
        grass.run_command('r.series',
                          flags=z_flag,
                          quiet=True,
                          input=','.join(corridor_list),
                          output='{}_corridors_min_cost_dist_{}'.format(
                              dist_prefix, suffix),
                          method='minimum')
    else:
        #Summarize corridors
        if not weights:
            print(','.join(corridor_list))
            output_map = '{}_corridors_count_{}'.format(dist_prefix, suffix)
            grass.run_command('r.series',
                              flags=z_flag,
                              quiet=True,
                              input=','.join(corridor_list),
                              output=output_map,
                              method='count')
            write_raster_history(output_map)

        else:
            #Weight corridors according to user requested weights
            for weight in weights:
                # Generate corridor map list
                corridor_map_list = (cm + '_{}'.format(weight)
                                     for cm in corridor_list)
                output_map = '{}_corridors_{}_sum_{}'.format(
                    dist_prefix, weight, suffix)
                #Summarize corridors using r.series
                grass.run_command('r.series',
                                  flags=z_flag,
                                  quiet=True,
                                  input=corridor_map_list,
                                  output=output_map,
                                  method='sum')
                write_raster_history(output_map)
Exemplo n.º 4
0
def main():
    infile = options['input']
    compression_off = flags['c']
    
    global basedir
    basedir = grass.tempdir()
    
    # check if vector map exists
    gfile = grass.find_file(infile, element = 'vector')
    if not gfile['name']:
        grass.fatal(_("Vector map <%s> not found") % infile)
    
    # check if input vector map is in the native format
    if vector.vector_info(gfile['fullname'])['format'] != 'native':
        grass.fatal(_("Unable to pack vector map <%s>. Only native format supported.") % \
                        gfile['fullname'])
    
    # split the name if there is the mapset name
    if infile.find('@'):
        infile = infile.split('@')[0]
    
    # output name
    if options['output']:
        outfile = options['output']
    else:
        outfile = infile + '.pack'
    
    # check if exists the output file
    if os.path.exists(outfile):
        if os.getenv('GRASS_OVERWRITE'):
            grass.warning(_("Pack file <%s> already exists and will be overwritten") % outfile)
            try_remove(outfile)
        else:
            grass.fatal(_("option <%s>: <%s> exists.") % ("output", outfile))
    
    # prepare for packing
    grass.verbose(_("Packing <%s>...") % (gfile['fullname']))
    
    # write tar file, optional compression 
    if compression_off:
        tar = tarfile.open(name = outfile, mode = 'w:')
    else:
        tar = tarfile.open(name = outfile, mode = 'w:gz')
    tar.add(gfile['file'], infile)
    
    # check if exist a db connection for the vector 
    db_vect = vector.vector_db(gfile['fullname'])
    if not db_vect:
        grass.verbose(_('There is not database connected with vector map <%s>') % gfile['fullname'])
    else:
        # for each layer connection save a table in sqlite database
        sqlitedb = os.path.join(basedir, 'db.sqlite')
        for i, dbconn in db_vect.iteritems():
            grass.run_command('db.copy', from_driver = dbconn['driver'], 
                              from_database = dbconn['database'],
                              from_table =  dbconn['table'], 
                              to_driver = 'sqlite', to_database = sqlitedb, 
                              to_table = dbconn['table'])
        tar.add(sqlitedb, 'db.sqlite')
    
    # add to the tar file the PROJ files to check when unpack file    
    gisenv = grass.gisenv()
    for support in ['INFO', 'UNITS']:
        path = os.path.join(gisenv['GISDBASE'], gisenv['LOCATION_NAME'],
                            'PERMANENT', 'PROJ_' + support)
        if os.path.exists(path):
            tar.add(path, 'PROJ_' + support)
    tar.close()
    
    grass.message(_("Pack file <%s> created") % os.path.join(os.getcwd(), outfile))