Example #1
0
    def __init__(self, name, mapset=None, mode='r', stack=None, position='last', **kwargs):
        super(MVector, self).__init__(name, mapset, **kwargs)

        assert self.exist(), "%r not found" % name
        if mapset:
            self._set_mapset(mapset)
        else:
            self._get_mapset()
            glist = g.list(type='vector', pattern=name, stderr_=PIPE, stdout_=PIPE)
            assert len(glist.outputs.stdout.split()) == 1,\
                   "%r found in multiple mapsets. Specify mapset argument." % name

        path = os.path.join('$GISDBASE','$LOCATION_NAME', self.mapset,'sqlite','sqlite.db')
        self._con = sql.connect(gtable.get_path(path))
        self.region = Region()
        self.open(mode)
        #TODO: add reset method to stack
        self.stack = dict()
        if stack is None:
            self.stack = ListDict({0: {'type': 'v', 'map': self.full_name}})
        elif isinstance(stack, dict):
            stack = ListDict(stack)
            if isinstance(position, int):
                if position in stack.keys():
                    stack.insert_before(position, (max(stack.keys())+1,
                                        {'type': 'v', 'map': self.full_name}))
                else:
                    pass
            elif position is 'last':
                stack.insert_after(stack.keys()[-1], (max(stack.keys())+1,
                                   {'type': 'v', 'map' : self.full_name}))
            elif position is 'first':
                stack.insert_before(stack.keys()[0], (max(stack.keys())+1,
                                    {'type': 'v', 'map' : self.full_name}))
            self.stack = stack
Example #2
0
def db_path():
    '''
    :returns path to grass sqlite database
    '''

    # get this path from gproject
    path = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db'

    return table.get_path(path)
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1):
    #
    # map=Input map name
    # map_out=Output map with extensions
    # maxlen=Max length in map units that line can be extended (def=200)
    # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5)
    # vlen=number of verticies to look back in calculating line end direction (def=1)
    # Not sure if it is worth putting this in as parameter.
    #
    allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1'
    grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format(
        map, map_out, maxlen, scale, debug))
    vlen = 1  # not sure if this is worth putting in as parameter
    cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'),
            (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'),
            (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'),
            (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'),
            (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'),
            (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'),
            (u'x_len', 'DOUBLE PRECISION')]
    extend = VectorTopo('extend')
    if extend.exist():
        extend.remove()
    extend.open('w', tab_name='extend', tab_cols=cols)
    #
    # Go through input map, looking at each line and it's two nodes to find nodes
    # with only a single line starting/ending there - i.e. a dangle.
    # For each found, generate an extension line in the new map "extend"
    #
    inMap = VectorTopo(map)
    inMap.open('r')
    dangleCnt = 0
    tickLen = len(inMap)
    grass.info("Searching {} features for dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")
    for ln in inMap:
        ticker = (ticker + 1)
        grass.percent(ticker, tickLen, 5)
        if ln.gtype == 2:  # Only process lines
            for nd in ln.nodes():
                if nd.nlines == 1:  # We have a dangle
                    dangleCnt = dangleCnt + 1
                    vtx = min(len(ln) - 1, vlen)
                    if len([1 for _ in nd.lines(only_out=True)
                            ]) == 1:  # Dangle starting at node
                        dend = "head"
                        sx = ln[0].x
                        sy = ln[0].y
                        dx = sx - ln[vtx].x
                        dy = sy - ln[vtx].y
                    else:  # Dangle ending at node
                        dend = "tail"
                        sx = ln[-1].x
                        sy = ln[-1].y
                        dx = sx - ln[-(vtx + 1)].x
                        dy = sy - ln[-(vtx + 1)].y
                    endaz = math.atan2(dy, dx)
                    if scale > 0:
                        extLen = min(ln.length() * scale, maxlen)
                    else:
                        extLen = maxlen
                    ex = extLen * math.cos(endaz) + sx
                    ey = extLen * math.sin(endaz) + sy
                    extLine = geo.Line([(sx, sy), (ex, ey)])
                    quiet = extend.write(extLine,
                                         (ln.cat, dend, sx, sy, extLen, endaz,
                                          0, 0, 0, 0, 'null', extLen))

    grass.info(
        "{} dangle nodes found, committing table extend".format(dangleCnt))
    extend.table.conn.commit()
    extend.close(build=True, release=True)
    inMap.close()

    #
    # Create two tables where extensions intersect;
    # 1. intersect with original lines
    # 2. intersect with self - to extract intersects between extensions
    #
    # First the intersects with original lines
    grass.info(
        "Searching for intersects between potential extensions and original lines"
    )
    table_isectIn = Table('isectIn',
                          connection=sqlite3.connect(get_path(path)))
    if table_isectIn.exist():
        table_isectIn.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to=map,
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectIn")
    # Will have touched the dangle it comes from, so remove those touches
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectIn ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectIn SET ntype = 'orig' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Now second self intersect table
    #
    grass.info("Searching for intersects of potential extensions")
    table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path)))
    if table_isectX.exist():
        table_isectX.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to="extend",
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectX")
    # Obviously all extensions will intersect with themself, so remove those "intersects"
    run_command("db.execute",
                sql="DELETE FROM isectX WHERE from_cat = near_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectX ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectX SET ntype = 'ext' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Combine the two tables and add a few more attributes
    #
    run_command("db.execute",
                sql="INSERT INTO isectIn SELECT * FROM isectX",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    cols_isectIn = Columns('isectIn',
                           connection=sqlite3.connect(get_path(path)))
    cols_isectIn.add(['from_x'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['from_y'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION'])
    # Get starting coordinate at the end of the dangle
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    # For each intersect point, calculate the distance along extension line from end of dangle
    # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents
    grass.info(
        "Calculating distances of intersects along potential extensions")
    cur = table_isectIn.execute(
        sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn")
    for row in cur.fetchall():
        rowid, fx, fy, nx, ny = row
        x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2)
        sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format(
            x_len, rowid)
        table_isectIn.execute(sql_code=sqlStr)
    grass.verbose("Ready to commit isectIn changes")
    table_isectIn.conn.commit()
    # Remove any zero distance from end of their dangle.
    # This happens when another extension intersects exactly at that point
    run_command("db.execute",
                sql="DELETE FROM isectIn WHERE ext_len = 0.0",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()

    # Go through the extensions and find the intersect closest to each origin.
    grass.info("Searching for closest intersect for each potential extension")

    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT"
    #    run_command("db.execute",
    #                sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))",
    #               driver = "sqlite",
    #               database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    grass.verbose("CREATE index")
    run_command("db.execute",
                sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE best_xid")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE x_len")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_x")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_y")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE other_cat")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE xtype")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("DROP index")
    run_command("db.execute",
                sql="DROP INDEX idx_from_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("CREATE index on near_cat")
    run_command("db.execute",
                sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny',
                                         'near_cat', 'ntype')
    #    quiet=table_isectIn.filters.order_by(['ext_len ASC'])
    quiet = table_isectIn.filters.order_by('ext_len ASC')
    quiet = table_isectIn.filters.limit(1)
    table_extend = Table('extend', connection=sqlite3.connect(get_path(path)))

    # Code below was relplaced by commands above untill memory problem can be sorted
    #    table_extend.filters.select('cat')
    #    cur=table_extend.execute()
    #    updateCnt = 0
    #    for row in cur.fetchall():
    #        cat, = row
    #        quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat))

    ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1

    #        x_sect=table_isectIn.execute().fetchone()
    #        if x_sect is not None:
    #            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
    #            sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
    #            table_extend.execute(sql_code=sqlStr)
    ## Try periodic commit to avoide crash!
    #            updateCnt = (updateCnt + 1) % 10000
    #            if updateCnt == 0:
    #              table_extend.conn.commit()
    grass.verbose("Ready to commit extend changes")
    table_extend.conn.commit()
    #
    # There may be extensions that crossed, and that intersection chosen by one but
    # not "recripricated" by the other.
    # Need to remove those possibilities and allow the jilted extension to re-search.
    #
    grass.verbose("Deleting intersects already resolved")
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')",  #"AND from_cat!=other_cat" no second chance!
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    grass.verbose("Deleting complete")

    # To find the jilted - need a copy of extensions that have found an
    # intersection (won't overwrite so drop first)
    grass.verbose(
        "Re-searching for mis-matched intersects between potential extensions")
    table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path)))
    if table_imatch.exist():
        table_imatch.drop(force=True)
    wvar = "xtype!='null'"
    run_command(
        "db.copy",
        overwrite=True,
        quiet=True,
        from_driver="sqlite",
        from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        from_table="extend",
        to_driver="sqlite",
        to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        to_table="imatch",
        where=wvar)
    # Memory problems?
    if gc.isenabled():
        grass.verbose("Garbage collection enabled - forcing gc cycle")
        gc.collect()
    else:
        grass.verbose("Garbage collection not enabled")
# Ensure tables are commited
    table_extend.conn.commit()
    table_imatch.conn.commit()
    table_isectIn.conn.commit()
    # Identify the jilted
    sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat"
    cur = table_extend.execute(sql_code=sqlStr)
    updateCnt = 0
    for row in cur.fetchall():
        cat, = row
        grass.verbose("Reworking extend.cat={}".format(cat))
        quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat))
        #print("SQL: {}".format(table_isectIn.filters.get_sql()))
        x_sect = table_isectIn.execute().fetchone(
        )  ## Problem here under modules
        if x_sect is None:
            sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format(
                cat)
        else:
            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
            sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(
                x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
        table_extend.execute(sql_code=sqlStr)
        ## Try periodic commit to avoide crash!
        updateCnt = (updateCnt + 1) % 100
        if (updateCnt == 0):  # or (cat == 750483):
            grass.verbose(
                "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX")
            table_extend.conn.commit()

    grass.verbose("Committing adjustments to table extend")
    table_extend.conn.commit()
    #
    # For debugging, create a map with the chosen intersect points
    #
    if debug:
        wvar = "xtype!='null' AND x_len!=0"
        #        print(wvar)
        run_command(
            "v.in.db",
            overwrite=True,
            quiet=True,
            table="extend",
            driver="sqlite",
            database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
            x="near_x",
            y="near_y",
            key="cat",
            where=wvar,
            output="chosen")
#
# Finally adjust the dangle lines in input map - use a copy (map_out) if requested
#
    if map_out:
        run_command("g.copy",
                    overwrite=allowOverwrite,
                    quiet=True,
                    vector=map + "," + map_out)
    else:  # Otherwise just modify the original dataset (map)
        if allowOverwrite:
            grass.warning("Modifying vector map ({})".format(map))
            map_out = map
        else:
            grass.error(
                "Use switch --o to modifying input vector map ({})".format(
                    map))
            return 1
#
# Get info for lines that need extending
    table_extend.filters.select(
        'parent, dend, near_x, near_y, search_az, xtype')
    table_extend.filters.where("xtype!='null'")
    extLines = table_extend.execute().fetchall()
    cat_mods = [ext[0] for ext in extLines]
    tickLen = len(cat_mods)
    grass.info("Extending {} dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")

    # Open up the map_out copy (or the original) and work through looking for lines that need modifying
    inMap = VectorTopo(map_out)
    inMap.open('rw', tab_name=map_out)

    for ln_idx in range(len(inMap)):
        ln = inMap.read(ln_idx + 1)
        if ln.gtype == 2:  # Only process lines
            while ln.cat in cat_mods:  # Note: could be 'head' and 'tail'
                ticker = (ticker + 1)
                grass.percent(ticker, tickLen, 5)
                cat_idx = cat_mods.index(ln.cat)
                cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx)
                dump = cat_mods.pop(cat_idx)
                if xtype == 'orig':  # Overshoot by 0.1 as break lines is unreliable
                    nx = nx + 0.1 * math.cos(endaz)
                    ny = ny + 0.1 * math.sin(endaz)
                newEnd = geo.Point(x=nx, y=ny, z=None)
                if dend == 'head':
                    ln.insert(0, newEnd)
                else:  # 'tail'
                    ln.append(newEnd)
                quiet = inMap.rewrite(ln_idx + 1, ln)
        else:
            quite = inMap.delete(ln_idx + 1)


## Try periodic commit and garbage collection to avoide crash!
        if (ln_idx % 1000) == 0:
            #           inMap.table.conn.commit()  - no such thing - Why??
            if gc.isenabled():
                quiet = gc.collect()

    inMap.close(build=True, release=True)
    grass.message("v.extendlines completing")
    #
    # Clean up temporary tables and maps
    #
    if not debug:
        table_isectIn.drop(force=True)
        table_isectX.drop(force=True)
        table_imatch.drop(force=True)
        extend.remove()
        chosen = VectorTopo('chosen')
        if chosen.exist():
            chosen.remove()
    return 0
Example #4
0
class DBconnection(object):
    """Define a class to share common methods between TestCase."""
    path = os.path.join(tmp.gettempdir(), randstr(prefix='temp', suffix='.db'))
    connection = sqlite3.connect(get_path(path))
    for t in (np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16,
              np.uint32, np.uint64):
        sqlite3.register_adapter(t, int)
    columns = [('cat', 'INTEGER PRIMARY KEY'), ('cint', 'INT'),
               ('creal', 'REAL'), ('ctxt', 'TEXT')]

    def create_table_instance(self, **kw):
        """Return a Table class instance

        :param **kw: keyword arguments of Table class
                     without name and connection.
        :type **kw: key-word arguments

        :returns: Table instance
        """
        self.tname = randstr(prefix='temp')
        return Table(name=self.tname, connection=self.connection, **kw)

    def create_empty_table(self, columns=None, **kw):
        """Create an empty table in the database and return Table class
        instance.

        :param columns: list of tuple containing the column names and types.
        :type columns: list of tuple

        :param **kw: keyword arguments of Table class
                     without name and connection.
        :type **kw: key-word arguments

        :returns: Table instance
        """
        columns = self.columns if columns is None else columns
        table = self.create_table_instance(**kw)
        table.create(columns)
        return table

    def create_not_empty_table(self,
                               nrows=None,
                               values=None,
                               columns=None,
                               **kw):
        """Create a not empty table in the database and return Table class
        instance.

        :param nrows: number of rows.
        :type nrows: list of tuple

        :param values: list of tuple containing the values for each row.
        :type values: list of tuple

        :param columns: list of tuple containing the column names and types.
        :type columns: list of tuple

        :param **kw: keyword arguments of Table class
                     without name and connection.
        :type **kw: key-word arguments

        :returns: Table instance
        """
        if nrows is None and values is None:
            msg = "Both parameters ``nrows`` ``values`` are empty"
            raise RuntimeError(msg)
        columns = self.columns if columns is None else columns
        values = (get_table_random_values(nrows, columns)
                  if values is None else values)
        table = self.create_empty_table(columns=columns, **kw)
        table.insert(values, many=True)
        return table

    def setUp(self):
        """Create a not empty table instance"""
        self.table = self.create_not_empty_table(10)
        self.cols = self.table.columns

    def tearDown(self):
        """Remove the generated vector map, if exist"""
        self.table.drop(force=True)
        self.table = None
        self.cols = None
Example #5
0
    def add_color_table(self, cbrew=False, color=None, column=None, attr=None, layer='1',
                        nk=5, s=None, cramp=('Diverging','RdYlBu'), method='Equal_Interval',
                        invert_ramp=False, *args, **kwargs):
        """
        Add/replace color table

        Keyword     Description
        ---------- ----------------------------------------------------------------------
        nk          number of classes
        column      layer column to color classify, If s is not provided, column is needed.
                    Default=None
        s           pandas series or numpy array. If column is not provided, s is needed.
                    Default=None
        ccramp      Boolean. If true, color scheme and color ramp use Color Brewer
                    (http://colorbrewer2.org/)
        method      pysal.mapclassify methods. Options: Map_Classifier, Box_Plot,
                    Equal_Interval, Fisher_Jenks, Jenks_Caspall, Jenks_Caspall_Forced,
                    Jenks_Caspall_Sampled, Max_P_Classifier, Maximum_Breaks,
                    Natural_Breaks, Quantiles, Percentiles, Std_Mean, User_Defined.
                    Default User_Defined. See PySal documentation for valid parameters
                    in each case (http://pysal.org/1.2/library/esda/mapclassify.html).
        invert_ramp Boolean. Default False.
        layer       Grass layer where rgb column will be added
        *args       Additional arguments for v.colors
        **kwargs    Additional arguments for rgb_classify. For example, if method='User_Defined'
                    then the arguments bins is required.
        ---------- ----------------------------------------------------------------------
        """
        #todo: allow editing if current mapset and mapset are PERMANENT
        #assert self.mapset is not 'PERMANENT',\
        #       '%r is in PERMANENT and is not writable by user' % self.name

        assert int(layer) <= self.dblinks.num_dblinks(), 'layer %r does not exist' % layer

        if cbrew:
            table_name = self.dblinks.by_layer(int(layer)).name
            sqlfilter = gtable.Table(name=table_name, connection=self._con)
            assert sqlfilter.exist(), 'table %r not found in mapset %r. Verify dblinks.'\
                                       % (sqlfilter.name, self.mapset)

            if hasattr(s, '__len__'):
                dtype = s.dtype.type
                assert (np.issubdtype(dtype, np.integer) or np.issubdtype(dtype, np.float)),\
                       'Numpy array dtype must be integer or float'

                sqlfilter.filters.select(sqlfilter.key)
                cur = sqlfilter.execute()
                cat = cur.fetchall()
                cat = np.array(cat)
                cat = cat.flatten()
                assert s.shape == cat.shape, 'Series does not match map key shape'

            if column:
                assert column in sqlfilter.columns.names(),\
                       '%r has not column %r' % (table_name, column)

                sqlfilter.filters.select(sqlfilter.key, column)
                cur = sqlfilter.execute()
                col = cur.fetchall()
                col = np.array(col).T
                s   = col[1]
                cat = col[0]
                dtype = s.dtype.type
                if not (np.issubdtype(dtype, np.integer) or np.issubdtype(dtype, np.float)):
                    nulls = np.where(s == '')[0]
                    if nulls.any:
                        s[s == ''] = np.nan
                    s = s.astype(float)

            s = pd.Series(s)
            s_rgb = rgb_classify(s, nk, cramp=cramp, method=method, invert_ramp=invert_ramp,
                                 **kwargs)

            #TODO convert this into a save_colr_file function
            # Write color table
            mapset = Mapset()
            mapset.current()
            if self.mapset is mapset.name:
                filename = os.path.join('$GISDBASE','$LOCATION_NAME', self.mapset,
                                        'vector', self.name, 'colr')
            else:
                filename = os.path.join('$GISDBASE','$LOCATION_NAME', mapset.name,
                                        'vcolr2', self.mapset, self.name)
            filename = gtable.get_path(filename)
            directory = os.path.dirname(filename)
            if not os.path.exists(directory):
                os.makedirs(directory)
            with open(filename,'w') as f:
                f.write('% ' +  '%r %r' % (int(min(cat)), self.table.n_rows()) + '\n')
                for i,item in enumerate(s_rgb):
                    f.write(str(cat[i]) + ':' + item + '\n')

        else:
           assert color, 'color attribute is required'
           if column:
               v.colors(map=self.full_name, layer=layer, column=column, color=color, *args)
           else:
               v.colors(map=self.full_name, color=color, *args)