예제 #1
0
파일: col.py 프로젝트: jasp382/glass
def fields_to_tbls(inFolder, fields, tbl_format='.shp'):
    """
    Add fields to several tables in a folder
    """

    from glass.pys.oss import lst_ff

    tables = lst_ff(inFolder, file_format=tbl_format)

    for table in tables:
        add_fields(table, fields, api='ogr')
예제 #2
0
파일: col.py 프로젝트: jasp382/glass
def filename_to_col(tables, new_field, table_format='.dbf'):
    """
    Update a table with the filename in a new field
    """

    import os
    from glass.pys.oss import lst_ff
    from glass.g.tbl.col import add_fields

    if os.path.isdir(tables):
        __tables = lst_ff(tables, file_format=table_format)

    else:
        __tables = [tables]

    for table in __tables:
        add_fields(table, {new_field: 'varchar(50)'})

        name_tbl = os.path.splitext(os.path.basename(table))[0]
        name_tbl = name_tbl.lower() if name_tbl.isupper() else name_tbl
        update_cols(table, {new_field: name_tbl})
예제 #3
0
def grs_near(fromShp,
             toShp,
             nearCatCol='tocat',
             nearDistCol="todistance",
             maxDist=-1,
             as_cmd=None):
    """
    v.distance - Finds the nearest element in vector map 'to'
    for elements in vector map 'from'.
    """

    from glass.g.tbl.col import add_fields

    add_fields(fromShp, {
        nearCatCol: 'INTEGER',
        nearDistCol: 'DOUBLE PRECISION'
    },
               api="grass" if as_cmd else "pygrass")

    if not as_cmd:
        import grass.script as grass

        grass.run_command("v.distance",
                          _from=fromShp,
                          to=toShp,
                          upload='cat,dist',
                          column='{},{}'.format(nearCatCol, nearDistCol),
                          dmax=maxDist)

    else:
        from glass.pys import execmd

        rcmd = execmd(
            ("v.distance from={} to={} upload=cat,dist "
             "column={},{} dmax={}").format(fromShp, toShp, nearCatCol,
                                            nearDistCol, maxDist))
예제 #4
0
파일: attr.py 프로젝트: jasp382/glass
def geomattr_to_db(shp,
                   attrCol,
                   attr,
                   geomType,
                   createCol=None,
                   unit=None,
                   lyrN=1,
                   ascmd=None):
    """
    v.to.db - Populates attribute values from vector features.
    
    v.to.db loads vector map features or metrics into a database table,
    or prints them (or the SQL queries used to obtain them) in a form
    of a human-readable report. For uploaded/printed category values '-1'
    is used for 'no category' and 'null'/'-' if category cannot be
    found or multiple categories were found. For line azimuths '-1' is used
    for closed lines (start equals end).
    
    attrs options area:
    * cat: insert new row for each category if doesn't exist yet
    * area: area size
    * compact: compactness of an area, calculated as 
    compactness = perimeter / (2 * sqrt(PI * area))
    * fd: fractal dimension of boundary defining a polygon, calculated as
    fd = 2 * (log(perimeter) / log(area))
    * perimeter: perimeter length of an area
    * length: line length
    * count: number of features for each category
    * coor: point coordinates, X,Y or X,Y,Z
    * start: line/boundary starting point coordinates, X,Y or X,Y,Z
    * end: line/boundary end point coordinates, X,Y or X,Y,Z
    * sides: categories of areas on the left and right side of the boundary,
    'query_layer' is used for area category
    * query: result of a database query for all records of the geometry(or
    geometries) from table specified by 'query_layer' option
    * slope: slope steepness of vector line or boundary
    * sinuous: line sinuousity, calculated as line length / distance between
    end points
    * azimuth: line azimuth, calculated as angle between North direction and
    endnode direction at startnode
    * bbox: bounding box of area, N,S,E,W
    """

    from glass.pys import obj_to_lst

    attrCol = obj_to_lst(attrCol)

    if createCol:
        from glass.g.tbl.col import add_fields

        for c in attrCol:
            add_fields(shp, {c: "DOUBLE PRECISION"},
                       api="grass" if ascmd else "pygrass")

    if not ascmd:
        from grass.pygrass.modules import Module

        vtodb = Module(
            "v.to.db",
            map=shp,
            type=geomType,
            layer=lyrN,
            option=attr,
            columns=",".join(attrCol) if attr != 'length' else attrCol[0],
            units=unit,
            run_=False,
            quiet=True,
            overwrite=True)

        vtodb()

    else:
        from glass.pys import execmd

        rcmd = execmd(
            ("v.to.db map={} type={} layer={} option={} "
             "columns={} units={} --quiet --overwrite").format(
                 shp, geomType, lyrN, attr,
                 ",".join(attrCol) if attr != 'length' else attrCol[0], unit))
예제 #5
0
def vector_based(osmdata, nomenclature, refRaster, lulcShp,
                 overwrite=None, dataStore=None, RoadsAPI='POSTGIS'):
    """
    Convert OSM Data into Land Use/Land Cover Information
    
    An vector based approach.
    
    TODO: Add a detailed description.
    
    RoadsAPI Options:
    * GRASS
    * SQLITE
    * POSTGIS
    """
    
    # ************************************************************************ #
    # Python Modules from Reference Packages #
    # ************************************************************************ #
    import datetime; import os; import copy
    # ************************************************************************ #
    # glass dependencies #
    # ************************************************************************ #
    from glass.pys.oss               import fprop, mkdir
    from glass.g.wenv.grs          import run_grass
    if RoadsAPI == 'POSTGIS':
        from glass.ng.sql.db            import create_db
        from glass.g.it.db           import osm_to_psql
        from glass.ng.sql.db            import drop_db
        from glass.ng.sql.bkup        import dump_db
    else:
        from glass.g.it.osm import osm_to_sqdb
    from glass.ete.osm2lulc.utils    import osm_project, add_lulc_to_osmfeat, get_ref_raster
    from glass.g.dp.mge    import shps_to_shp
    from glass.ete.osm2lulc.mod1     import grs_vector
    if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS':
        from glass.ete.osm2lulc.mod2 import roads_sqdb
    else:
        from glass.ete.osm2lulc.mod2 import grs_vec_roads
    from glass.ete.osm2lulc.m3_4     import grs_vect_selbyarea
    from glass.ete.osm2lulc.mod5     import grs_vect_bbuffer
    from glass.ete.osm2lulc.mod6     import vector_assign_pntags_to_build
    from glass.g.dp.mge    import same_attr_to_shp
    from glass.g.prj            import def_prj
    # ************************************************************************ #
    # Global Settings #
    # ************************************************************************ #
    # Check if input parameters exists!
    if not os.path.exists(os.path.dirname(lulcShp)):
        raise ValueError('{} does not exist!'.format(os.path.dirname(lulcShp)))
    
    if not os.path.exists(osmdata):
        raise ValueError('File with OSM DATA ({}) does not exist!'.format(osmdata))
    
    if not os.path.exists(refRaster):
        raise ValueError('File with reference area ({}) does not exist!'.format(refRaster))
    
    # Check if Nomenclature is valid
    nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \
        nomenclature != "CORINE_LAND_COVER" and \
        nomenclature == "GLOBE_LAND_30" else nomenclature
    
    time_a = datetime.datetime.now().replace(microsecond=0)
    
    # Create workspace for temporary files
    workspace = os.path.join(os.path.dirname(
        lulcShp), 'osmtolulc') if not dataStore else dataStore
    
    # Check if workspace exists
    if os.path.exists(workspace):
        if overwrite:
            mkdir(workspace)
        else:
            raise ValueError('Path {} already exists'.format(workspace))
    else:
        mkdir(workspace)
    
    # Get Reference Raster
    refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=10)
    
    from glass.ete.osm2lulc import osmTableData, PRIORITIES, LEGEND
    
    __priorities = PRIORITIES[nomenclature]
    __legend     = LEGEND[nomenclature]
    
    time_b = datetime.datetime.now().replace(microsecond=0)
    
    if RoadsAPI != 'POSTGIS':
        # ******************************************************************** #
        # Convert OSM file to SQLITE DB #
        # ******************************************************************** #
        osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite'))
    else:
        # Convert OSM file to POSTGRESQL DB #
        osm_db = create_db(fprop(
            osmdata, 'fn', forceLower=True), overwrite=True)
        osm_db = osm_to_psql(osmdata, osm_db)
    time_c = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Add Lulc Classes to OSM_FEATURES by rule #
    # ************************************************************************ #
    add_lulc_to_osmfeat(
        osm_db, osmTableData, nomenclature,
        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI
    )
    time_d = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Transform SRS of OSM Data #
    # ************************************************************************ #
    osmTableData = osm_project(
        osm_db, epsg,
        api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI,
        isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True
    )
    time_e = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # Start a GRASS GIS Session #
    # ************************************************************************ #
    grass_base = run_grass(workspace, grassBIN='grass78', location='grloc', srs=epsg)
    #import grass.script as grass
    import grass.script.setup as gsetup
    gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT')
    
    # ************************************************************************ #
    # IMPORT SOME glass MODULES FOR GRASS GIS #
    # ************************************************************************ #
    from glass.g.gp.ovl       import erase
    from glass.g.wenv.grs     import rst_to_region
    from glass.g.gp.gen     import dissolve
    from glass.g.tbl.grs import add_and_update, reset_table, update_table
    from glass.g.tbl.col import add_fields
    from glass.g.it.shp import shp_to_grs, grs_to_shp
    from glass.g.it.rst     import rst_to_grs
    # ************************************************************************ #
    # SET GRASS GIS LOCATION EXTENT #
    # ************************************************************************ #
    extRst = rst_to_grs(refRaster, 'extent_raster')
    rst_to_region(extRst)
    time_f = datetime.datetime.now().replace(microsecond=0)
    
    # ************************************************************************ #
    # MapResults #
    # ************************************************************************ #
    osmShps = []
    # ************************************************************************ #
    # 1 - Selection Rule #
    # ************************************************************************ #
    ruleOneShp, timeCheck1 = grs_vector(
        osm_db, osmTableData['polygons'], apidb=RoadsAPI
    )
    osmShps.append(ruleOneShp)
    
    time_g = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 2 - Get Information About Roads Location #
    # ************************************************************************ #
    ruleRowShp, timeCheck2 = roads_sqdb(
        osm_db, osmTableData['lines'], osmTableData['polygons'], apidb=RoadsAPI
    ) if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS' else grs_vec_roads(
        osm_db, osmTableData['lines'], osmTableData['polygons'])
    
    osmShps.append(ruleRowShp)
    time_h = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 3 - Area Upper than #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleThreeShp, timeCheck3 = grs_vect_selbyarea(
            osm_db, osmTableData['polygons'], UPPER=True, apidb=RoadsAPI
        )
    
        osmShps.append(ruleThreeShp)
        time_l = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck3 = None
        time_l     = None
    # ************************************************************************ #
    # 4 - Area Lower than #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleFourShp, timeCheck4 = grs_vect_selbyarea(
            osm_db, osmTableData['polygons'], UPPER=False, apidb=RoadsAPI
        )
    
        osmShps.append(ruleFourShp)
        time_j = datetime.datetime.now().replace(microsecond=0)
    else:
        timeCheck4 = None
        time_j     = None
    # ************************************************************************ #
    # 5 - Get data from lines table (railway | waterway) #
    # ************************************************************************ #
    ruleFiveShp, timeCheck5 = grs_vect_bbuffer(
        osm_db, osmTableData["lines"], api_db=RoadsAPI
    )
    
    osmShps.append(ruleFiveShp)
    time_m = datetime.datetime.now().replace(microsecond=0)
    # ************************************************************************ #
    # 7 - Assign untagged Buildings to tags #
    # ************************************************************************ #
    if nomenclature != "GLOBE_LAND_30":
        ruleSeven11, ruleSeven12, timeCheck7 = vector_assign_pntags_to_build(
            osm_db, osmTableData['points'], osmTableData['polygons'],
            apidb=RoadsAPI
        )
        
        if ruleSeven11:
            osmShps.append(ruleSeven11)
        
        if ruleSeven12:
            osmShps.append(ruleSeven12)
        
        time_n = datetime.datetime.now().replace(microsecond=0)
    
    else:
        timeCheck7 = None
        time_n = datetime.datetime.now().replace(microsecond=0)
    
    # ************************************************************************ #
    # Produce LULC Map  #
    # ************************************************************************ #
    """
    Get Shps with all geometries related with one class - One Shape for Classe
    """
    
    _osmShps = []
    for i in range(len(osmShps)):
        if not osmShps[i]: continue
        
        _osmShps.append(grs_to_shp(
            osmShps[i], os.path.join(workspace, osmShps[i] + '.shp'),
            'auto', lyrN=1, asCMD=True, asMultiPart=None
        ))
    
    for shp in _osmShps:
        def_prj(os.path.splitext(shp)[0] + '.prj', epsg=epsg, api='epsgio')
    
    _osmShps = same_attr_to_shp(
        _osmShps, "cat", workspace, "osm_", resultDict=True
    )
    del osmShps
    
    time_o = datetime.datetime.now().replace(microsecond=0)
    
    """
    Merge all Classes into one feature class using a priority rule
    """
    
    osmShps = {}
    for cls in _osmShps:
        if cls == '1':
            osmShps[1221] = shp_to_grs(_osmShps[cls], "osm_1221", asCMD=True)
        
        else:
            osmShps[int(cls)] = shp_to_grs(_osmShps[cls], "osm_" + cls,
                asCMD=True)
    
    # Erase overlapping areas by priority
    osmNameRef = copy.deepcopy(osmShps)
    
    for e in range(len(__priorities)):
        if e + 1 == len(__priorities): break
        
        if __priorities[e] not in osmShps:
            continue
        else:
            for i in range(e+1, len(__priorities)):
                if __priorities[i] not in osmShps:
                    continue
                else:
                    osmShps[__priorities[i]] = erase(
                        osmShps[__priorities[i]], osmShps[__priorities[e]],
                        "{}_{}".format(osmNameRef[__priorities[i]], e),
                        notTbl=True, api='pygrass'
                    )
    
    time_p = datetime.datetime.now().replace(microsecond=0)
    
    # Export all classes
    lst_merge = []
    a = None
    for i in range(len(__priorities)):
        if __priorities[i] not in osmShps:
            continue
        
        if not a:
            reset_table(
                osmShps[__priorities[i]],
                {'cls' : 'varchar(5)', 'leg' : 'varchar(75)'},
                {'cls' : str(__priorities[i]), 'leg' : str(__legend[__priorities[i]])}
            )
            
            a = 1
        
        else:
            add_and_update(
                osmShps[__priorities[i]],
                {'cls' : 'varchar(5)'},
                {'cls' : str(__priorities[i])}
            )
        
        ds = dissolve(
            osmShps[__priorities[i]],
            'dl_{}'.format(str(__priorities[i])), 'cls', api="grass"
        )
        
        add_fields(ds, {'leg': 'varchar(75)'}, api="grass")
        update_table(ds, 'leg', str(__legend[__priorities[i]]), 'leg is null')
        
        lst_merge.append(grs_to_shp(
            ds, os.path.join(
                workspace, "lulc_{}.shp".format(str(__priorities[i]))
            ), 'auto', lyrN=1, asCMD=True, asMultiPart=None
        ))
    
    time_q = datetime.datetime.now().replace(microsecond=0)
    
    if fprop(lulcShp, 'ff') != '.shp':
        lulcShp = os.path.join(
            os.path.dirname(lulcShp), fprop(lulcShp, 'fn') + '.shp')
    
    shps_to_shp(lst_merge, lulcShp, api='pandas')

    # Check if prj of lulcShp exists and create it if necessary
    prj_ff = os.path.splitext(lulcShp)[0] + '.prj'
    if not os.path.exists(prj_ff):
        def_prj(prj_ff, epsg=epsg, api='epsgio')
    
    time_r = datetime.datetime.now().replace(microsecond=0)

    # Dump Database if PostGIS was used
    # Drop Database if PostGIS was used
    if RoadsAPI == 'POSTGIS':
        dump_db(osm_db, os.path.join(
            workspace, osm_db + '.sql'
        ), api='psql')
        drop_db(osm_db)
    
    return lulcShp, {
        0  : ('set_settings', time_b - time_a),
        1  : ('osm_to_sqdb', time_c - time_b),
        2  : ('cls_in_sqdb', time_d - time_c),
        3  : ('proj_data', time_e - time_d),
        4  : ('set_grass', time_f - time_e),
        5  : ('rule_1', time_g - time_f, timeCheck1),
        6  : ('rule_2', time_h - time_g, timeCheck2),
        7  : None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3),
        8  : None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4),
        9  : ('rule_5',
            time_m - time_j if timeCheck4 else time_m - time_h, timeCheck5),
        10 : None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7),
        11 : ('disj_cls', time_o - time_n),
        12 : ('priority_rule', time_p - time_o),
        13 : ('export_cls', time_q - time_p),
        14 : ('merge_cls', time_r - time_q)
    }
예제 #6
0
파일: confmtx.py 프로젝트: jasp382/glass
def confmtx_fm_pntsample(pnt, idcol, refcol, rst, clscol, out_mtx):
    """
    Confusion matrix using classified raster and point feature class with
    reference values
    """

    import pandas as pd
    import numpy as np
    from glass.g.rd.shp import shp_to_obj
    from glass.ng.wt import obj_to_tbl
    from glass.ng.cls.eval import get_measures_for_mtx
    from glass.g.tbl.col import add_fields, update_cols
    from glass.g.smp import rst_val_to_points

    # Extract raster values to points
    rval = rst_val_to_points(pnt, rst)

    # Add field to shape
    add_fields(pnt, {clscol: 'INTEGER'}, api='ogrinfo')

    # Update point table
    edit = {}
    for k in rval:
        if int(rval[k]) not in edit:
            edit[int(rval[k])] = ["{}={}".format(idcol, str(k))]
        else:
            edit[int(rval[k])].append("{}={}".format(idcol, str(k)))

    update_cols(pnt, clscol, edit)

    # Produce confusion matrix
    df = shp_to_obj(pnt)

    df[clscol] = df[clscol].astype(int)
    df[refcol] = df[refcol].astype(int)

    # Remove nan values
    df = df[pd.notnull(df[refcol])]
    df = df[pd.notnull(df[clscol])]

    # Get rows and Cols
    rows = df[clscol].unique()
    cols = df[refcol].unique()
    refval = list(np.sort(np.unique(np.append(rows, cols))))

    # Produce matrix
    outdf = []
    for row in refval:
        newcols = [row]
        for col in refval:
            newdf = df[(df[clscol] == row) & (df[refcol] == col)]

            if not newdf.shape[0]:
                newcols.append(0)

            else:
                newcols.append(newdf.shape[0])

        outdf.append(newcols)

    outcols = ['class'] + refval

    outdf = pd.DataFrame(outdf, columns=outcols)

    out_df = get_measures_for_mtx(outdf, 'class')

    return obj_to_tbl(out_df, out_mtx)
예제 #7
0
파일: mtx.py 프로젝트: jasp382/glass
def prod_matrix(origins,
                destinations,
                networkGrs,
                speedLimitCol,
                onewayCol,
                thrdId="1",
                asCmd=None):
    """
    Get Matrix Distance:
    """

    from glass.g.tbl import category
    from glass.g.tbl.filter import sel_by_attr
    from glass.g.tbl.col import add_fields
    from glass.g.tbl.grs import add_table, update_table
    from glass.g.mob.grstbx.vnet import add_pnts_to_network
    from glass.g.mob.grstbx.vnet import run_allpairs
    from glass.g.cp import copy_insame_vector
    from glass.g.tbl.attr import geomattr_to_db
    from glass.g.dp.mge import shps_to_shp
    from glass.g.prop.feat import feat_count
    from glass.g.it.shp import shp_to_grs

    # Merge Origins and Destinations into the same Feature Class
    ORIGINS_NFEAT = feat_count(origins, gisApi='pandas')
    DESTINATIONS_NFEAT = feat_count(destinations, gisApi='pandas')

    ORIGINS_DESTINATIONS = shps_to_shp([origins, destinations],
                                       os.path.join(
                                           os.path.dirname(origins),
                                           "points_od_{}.shp".format(thrdId)),
                                       api='pandas')

    pointsGrs = shp_to_grs(ORIGINS_DESTINATIONS,
                           "points_od_{}".format(thrdId),
                           asCMD=asCmd)

    # Connect Points to Network
    newNetwork = add_pnts_to_network(networkGrs,
                                     pointsGrs,
                                     "rdv_points_{}".format(thrdId),
                                     asCMD=asCmd)

    # Sanitize Network Table and Cost Columns
    newNetwork = category(newNetwork,
                          "rdv_points_time_{}".format(thrdId),
                          "add",
                          LyrN="3",
                          geomType="line",
                          asCMD=asCmd)

    add_table(newNetwork,
              ("cat integer,kph double precision,length double precision,"
               "ft_minutes double precision,"
               "tf_minutes double precision,oneway text"),
              lyrN=3,
              asCMD=asCmd)

    copy_insame_vector(newNetwork,
                       "kph",
                       speedLimitCol,
                       3,
                       geomType="line",
                       asCMD=asCmd)
    copy_insame_vector(newNetwork,
                       "oneway",
                       onewayCol,
                       3,
                       geomType="line",
                       asCMD=asCmd)

    geomattr_to_db(newNetwork,
                   "length",
                   "length",
                   "line",
                   createCol=False,
                   unit="meters",
                   lyrN=3,
                   ascmd=asCmd)

    update_table(newNetwork, "kph", "3.6", "kph IS NULL", lyrN=3, ascmd=asCmd)
    update_table(newNetwork, "kph", "3.6", "oneway = 'N'", lyrN=3, ascmd=asCmd)
    update_table(newNetwork,
                 "ft_minutes",
                 "(length * 60) / (kph * 1000.0)",
                 "ft_minutes IS NULL",
                 lyrN=3,
                 ascmd=asCmd)
    update_table(newNetwork,
                 "tf_minutes",
                 "(length * 60) / (kph * 1000.0)",
                 "tf_minutes IS NULL",
                 lyrN=3,
                 ascmd=asCmd)

    # Exagerate Oneway's
    update_table(newNetwork,
                 "ft_minutes",
                 "1000",
                 "oneway = 'TF'",
                 lyrN=3,
                 ascmd=asCmd)
    update_table(newNetwork,
                 "tf_minutes",
                 "1000",
                 "oneway = 'FT'",
                 lyrN=3,
                 ascmd=asCmd)

    # Produce matrix
    matrix = run_allpairs(newNetwork,
                          "ft_minutes",
                          "tf_minutes",
                          'result_{}'.format(thrdId),
                          arcLyr=3,
                          nodeLyr=2,
                          asCMD=asCmd)

    # Exclude unwanted OD Pairs
    q = "({}) AND ({})".format(
        " OR ".join(
            ["from_cat={}".format(str(i + 1)) for i in range(ORIGINS_NFEAT)]),
        " OR ".join([
            "to_cat={}".format(str(ORIGINS_NFEAT + i + 1))
            for i in range(DESTINATIONS_NFEAT)
        ]))

    matrix_sel = sel_by_attr(matrix,
                             q,
                             "sel_{}".format(matrix),
                             geomType="line",
                             lyrN=3,
                             asCMD=asCmd)

    add_fields(matrix_sel, "from_fid", "INTEGER", lyrN=3, asCMD=asCmd)
    add_fields(matrix_sel, "to_fid", "INTEGER", lyrN=3, asCMD=asCmd)

    update_table(matrix_sel,
                 "from_fid",
                 "from_cat - 1",
                 "from_fid IS NULL",
                 lyrN=3,
                 ascmd=asCmd)
    update_table(matrix_sel,
                 "to_fid",
                 "to_cat - {} - 1".format(str(ORIGINS_NFEAT)),
                 "to_fid IS NULL",
                 lyrN=3,
                 ascmd=asCmd)

    return matrix_sel