예제 #1
0
def extendLine(map, map_out, maxlen=200, scale=0.5, debug=False, verbose=1):
    #
    # map=Input map name
    # map_out=Output map with extensions
    # maxlen=Max length in map units that line can be extended (def=200)
    # scale=Maximum length of extension as proportion of original line, disabled if 0 (def=0.5)
    # vlen=number of verticies to look back in calculating line end direction (def=1)
    # Not sure if it is worth putting this in as parameter.
    #
    allowOverwrite = os.getenv('GRASS_OVERWRITE', '0') == '1'
    grass.info("map={}, map_out={}, maxlen={}, scale={}, debug={}".format(
        map, map_out, maxlen, scale, debug))
    vlen = 1  # not sure if this is worth putting in as parameter
    cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'parent', 'INTEGER'),
            (u'dend', 'TEXT'), (u'orgx', 'DOUBLE PRECISION'),
            (u'orgy', 'DOUBLE PRECISION'), (u'search_len', 'DOUBLE PRECISION'),
            (u'search_az', 'DOUBLE PRECISION'), (u'best_xid', 'INTEGER'),
            (u'near_x', 'DOUBLE PRECISION'), (u'near_y', 'DOUBLE PRECISION'),
            (u'other_cat', 'INTEGER'), (u'xtype', 'TEXT'),
            (u'x_len', 'DOUBLE PRECISION')]
    extend = VectorTopo('extend')
    if extend.exist():
        extend.remove()
    extend.open('w', tab_name='extend', tab_cols=cols)
    #
    # Go through input map, looking at each line and it's two nodes to find nodes
    # with only a single line starting/ending there - i.e. a dangle.
    # For each found, generate an extension line in the new map "extend"
    #
    inMap = VectorTopo(map)
    inMap.open('r')
    dangleCnt = 0
    tickLen = len(inMap)
    grass.info("Searching {} features for dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")
    for ln in inMap:
        ticker = (ticker + 1)
        grass.percent(ticker, tickLen, 5)
        if ln.gtype == 2:  # Only process lines
            for nd in ln.nodes():
                if nd.nlines == 1:  # We have a dangle
                    dangleCnt = dangleCnt + 1
                    vtx = min(len(ln) - 1, vlen)
                    if len([1 for _ in nd.lines(only_out=True)
                            ]) == 1:  # Dangle starting at node
                        dend = "head"
                        sx = ln[0].x
                        sy = ln[0].y
                        dx = sx - ln[vtx].x
                        dy = sy - ln[vtx].y
                    else:  # Dangle ending at node
                        dend = "tail"
                        sx = ln[-1].x
                        sy = ln[-1].y
                        dx = sx - ln[-(vtx + 1)].x
                        dy = sy - ln[-(vtx + 1)].y
                    endaz = math.atan2(dy, dx)
                    if scale > 0:
                        extLen = min(ln.length() * scale, maxlen)
                    else:
                        extLen = maxlen
                    ex = extLen * math.cos(endaz) + sx
                    ey = extLen * math.sin(endaz) + sy
                    extLine = geo.Line([(sx, sy), (ex, ey)])
                    quiet = extend.write(extLine,
                                         (ln.cat, dend, sx, sy, extLen, endaz,
                                          0, 0, 0, 0, 'null', extLen))

    grass.info(
        "{} dangle nodes found, committing table extend".format(dangleCnt))
    extend.table.conn.commit()
    extend.close(build=True, release=True)
    inMap.close()

    #
    # Create two tables where extensions intersect;
    # 1. intersect with original lines
    # 2. intersect with self - to extract intersects between extensions
    #
    # First the intersects with original lines
    grass.info(
        "Searching for intersects between potential extensions and original lines"
    )
    table_isectIn = Table('isectIn',
                          connection=sqlite3.connect(get_path(path)))
    if table_isectIn.exist():
        table_isectIn.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to=map,
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectIn")
    # Will have touched the dangle it comes from, so remove those touches
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn INNER JOIN extend ON from_cat=cat WHERE near_cat=parent)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectIn ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectIn SET ntype = 'orig' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Now second self intersect table
    #
    grass.info("Searching for intersects of potential extensions")
    table_isectX = Table('isectX', connection=sqlite3.connect(get_path(path)))
    if table_isectX.exist():
        table_isectX.drop(force=True)
    run_command("v.distance",
                flags='a',
                overwrite=True,
                quiet=True,
                from_="extend",
                from_type="line",
                to="extend",
                to_type="line",
                dmax="0",
                upload="cat,dist,to_x,to_y",
                column="near_cat,dist,nx,ny",
                table="isectX")
    # Obviously all extensions will intersect with themself, so remove those "intersects"
    run_command("db.execute",
                sql="DELETE FROM isectX WHERE from_cat = near_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="ALTER TABLE isectX ADD ntype VARCHAR",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command("db.execute",
                sql="UPDATE isectX SET ntype = 'ext' ",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    #
    # Combine the two tables and add a few more attributes
    #
    run_command("db.execute",
                sql="INSERT INTO isectIn SELECT * FROM isectX",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    cols_isectIn = Columns('isectIn',
                           connection=sqlite3.connect(get_path(path)))
    cols_isectIn.add(['from_x'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['from_y'], ['DOUBLE PRECISION'])
    cols_isectIn.add(['ext_len'], ['DOUBLE PRECISION'])
    # Get starting coordinate at the end of the dangle
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_x = (SELECT extend.orgx FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    run_command(
        "db.execute",
        sql=
        "UPDATE isectIn SET from_y = (SELECT extend.orgy FROM extend WHERE from_cat=extend.cat)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    # For each intersect point, calculate the distance along extension line from end of dangle
    # Would be nicer to do this in the database but SQLite dosen't support sqrt or exponents
    grass.info(
        "Calculating distances of intersects along potential extensions")
    cur = table_isectIn.execute(
        sql_code="SELECT rowid, from_x, from_y, nx, ny FROM isectIn")
    for row in cur.fetchall():
        rowid, fx, fy, nx, ny = row
        x_len = math.sqrt((fx - nx)**2 + (fy - ny)**2)
        sqlStr = "UPDATE isectIn SET ext_len={:.8f} WHERE rowid={:d}".format(
            x_len, rowid)
        table_isectIn.execute(sql_code=sqlStr)
    grass.verbose("Ready to commit isectIn changes")
    table_isectIn.conn.commit()
    # Remove any zero distance from end of their dangle.
    # This happens when another extension intersects exactly at that point
    run_command("db.execute",
                sql="DELETE FROM isectIn WHERE ext_len = 0.0",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()

    # Go through the extensions and find the intersect closest to each origin.
    grass.info("Searching for closest intersect for each potential extension")

    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN bst INTEGER"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nrx DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN nry DOUBLE PRECISION"
    # db.execute sql="ALTER TABLE extend_t1 ADD COLUMN ocat TEXT"
    #    run_command("db.execute",
    #                sql = "INSERT OR REPLACE INTO extend_t1 (bst, nrx, nry, ocat) VALUES ((SELECT isectIn.rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=extend_t1.cat ORDER BY ext_len ASC LIMIT 1))",
    #               driver = "sqlite",
    #               database = "$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    grass.verbose("CREATE index")
    run_command("db.execute",
                sql="CREATE INDEX idx_from_cat ON isectIn (from_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE best_xid")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET best_xid = (SELECT isectIn.rowid FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE x_len")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET x_len = (SELECT ext_len FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_x")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_x = (SELECT nx FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE near_y")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET near_y = (SELECT ny FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE other_cat")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET other_cat = (SELECT near_cat FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("UPDATE xtype")
    run_command(
        "db.execute",
        sql=
        "UPDATE extend SET xtype = (SELECT ntype FROM isectIn WHERE from_cat=extend.cat ORDER BY ext_len ASC LIMIT 1)",
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("DROP index")
    run_command("db.execute",
                sql="DROP INDEX idx_from_cat",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    grass.verbose("CREATE index on near_cat")
    run_command("db.execute",
                sql="CREATE INDEX idx_near_cat ON isectIn (near_cat)",
                driver="sqlite",
                database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")

    quiet = table_isectIn.filters.select('rowid', 'ext_len', 'nx', 'ny',
                                         'near_cat', 'ntype')
    #    quiet=table_isectIn.filters.order_by(['ext_len ASC'])
    quiet = table_isectIn.filters.order_by('ext_len ASC')
    quiet = table_isectIn.filters.limit(1)
    table_extend = Table('extend', connection=sqlite3.connect(get_path(path)))

    # Code below was relplaced by commands above untill memory problem can be sorted
    #    table_extend.filters.select('cat')
    #    cur=table_extend.execute()
    #    updateCnt = 0
    #    for row in cur.fetchall():
    #        cat, = row
    #        quiet=table_isectIn.filters.where('from_cat={:d}'.format(cat))

    ##SELECT rowid, ext_len, nx, ny, near_cat, ntype FROM isectIn WHERE from_cat=32734 ORDER BY ext_len ASC LIMIT 1

    #        x_sect=table_isectIn.execute().fetchone()
    #        if x_sect is not None:
    #            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
    #            sqlStr="UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
    #            table_extend.execute(sql_code=sqlStr)
    ## Try periodic commit to avoide crash!
    #            updateCnt = (updateCnt + 1) % 10000
    #            if updateCnt == 0:
    #              table_extend.conn.commit()
    grass.verbose("Ready to commit extend changes")
    table_extend.conn.commit()
    #
    # There may be extensions that crossed, and that intersection chosen by one but
    # not "recripricated" by the other.
    # Need to remove those possibilities and allow the jilted extension to re-search.
    #
    grass.verbose("Deleting intersects already resolved")
    run_command(
        "db.execute",
        sql=
        "DELETE FROM isectIn WHERE rowid IN (SELECT isectIn.rowid FROM isectIn JOIN extend ON near_cat=cat WHERE ntype='ext' AND xtype!='null')",  #"AND from_cat!=other_cat" no second chance!
        driver="sqlite",
        database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db")
    table_isectIn.conn.commit()
    grass.verbose("Deleting complete")

    # To find the jilted - need a copy of extensions that have found an
    # intersection (won't overwrite so drop first)
    grass.verbose(
        "Re-searching for mis-matched intersects between potential extensions")
    table_imatch = Table('imatch', connection=sqlite3.connect(get_path(path)))
    if table_imatch.exist():
        table_imatch.drop(force=True)
    wvar = "xtype!='null'"
    run_command(
        "db.copy",
        overwrite=True,
        quiet=True,
        from_driver="sqlite",
        from_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        from_table="extend",
        to_driver="sqlite",
        to_database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
        to_table="imatch",
        where=wvar)
    # Memory problems?
    if gc.isenabled():
        grass.verbose("Garbage collection enabled - forcing gc cycle")
        gc.collect()
    else:
        grass.verbose("Garbage collection not enabled")
# Ensure tables are commited
    table_extend.conn.commit()
    table_imatch.conn.commit()
    table_isectIn.conn.commit()
    # Identify the jilted
    sqlStr = "SELECT extend.cat FROM extend JOIN imatch ON extend.other_cat=imatch.cat WHERE extend.xtype='ext' and extend.cat!=imatch.other_cat"
    cur = table_extend.execute(sql_code=sqlStr)
    updateCnt = 0
    for row in cur.fetchall():
        cat, = row
        grass.verbose("Reworking extend.cat={}".format(cat))
        quiet = table_isectIn.filters.where('from_cat={:d}'.format(cat))
        #print("SQL: {}".format(table_isectIn.filters.get_sql()))
        x_sect = table_isectIn.execute().fetchone(
        )  ## Problem here under modules
        if x_sect is None:
            sqlStr = "UPDATE extend SET best_xid=0, x_len=search_len, near_x=0, near_y=0, other_cat=0, xtype='null' WHERE cat={:d}".format(
                cat)
        else:
            x_rowid, ext_len, nx, ny, other_cat, ntype = x_sect
            sqlStr = "UPDATE extend SET best_xid={:d}, x_len={:.8f}, near_x={:.8f}, near_y={:.8f}, other_cat={:d}, xtype='{}' WHERE cat={:d}".format(
                x_rowid, ext_len, nx, ny, other_cat, ntype, cat)
        table_extend.execute(sql_code=sqlStr)
        ## Try periodic commit to avoide crash!
        updateCnt = (updateCnt + 1) % 100
        if (updateCnt == 0):  # or (cat == 750483):
            grass.verbose(
                "XXXXXXXXXXX Committing table_extend XXXXXXXXXXXXXXXXXXXXXX")
            table_extend.conn.commit()

    grass.verbose("Committing adjustments to table extend")
    table_extend.conn.commit()
    #
    # For debugging, create a map with the chosen intersect points
    #
    if debug:
        wvar = "xtype!='null' AND x_len!=0"
        #        print(wvar)
        run_command(
            "v.in.db",
            overwrite=True,
            quiet=True,
            table="extend",
            driver="sqlite",
            database="$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db",
            x="near_x",
            y="near_y",
            key="cat",
            where=wvar,
            output="chosen")
#
# Finally adjust the dangle lines in input map - use a copy (map_out) if requested
#
    if map_out:
        run_command("g.copy",
                    overwrite=allowOverwrite,
                    quiet=True,
                    vector=map + "," + map_out)
    else:  # Otherwise just modify the original dataset (map)
        if allowOverwrite:
            grass.warning("Modifying vector map ({})".format(map))
            map_out = map
        else:
            grass.error(
                "Use switch --o to modifying input vector map ({})".format(
                    map))
            return 1
#
# Get info for lines that need extending
    table_extend.filters.select(
        'parent, dend, near_x, near_y, search_az, xtype')
    table_extend.filters.where("xtype!='null'")
    extLines = table_extend.execute().fetchall()
    cat_mods = [ext[0] for ext in extLines]
    tickLen = len(cat_mods)
    grass.info("Extending {} dangles".format(tickLen))
    ticker = 0
    grass.message("Percent complete...")

    # Open up the map_out copy (or the original) and work through looking for lines that need modifying
    inMap = VectorTopo(map_out)
    inMap.open('rw', tab_name=map_out)

    for ln_idx in range(len(inMap)):
        ln = inMap.read(ln_idx + 1)
        if ln.gtype == 2:  # Only process lines
            while ln.cat in cat_mods:  # Note: could be 'head' and 'tail'
                ticker = (ticker + 1)
                grass.percent(ticker, tickLen, 5)
                cat_idx = cat_mods.index(ln.cat)
                cat, dend, nx, ny, endaz, xtype = extLines.pop(cat_idx)
                dump = cat_mods.pop(cat_idx)
                if xtype == 'orig':  # Overshoot by 0.1 as break lines is unreliable
                    nx = nx + 0.1 * math.cos(endaz)
                    ny = ny + 0.1 * math.sin(endaz)
                newEnd = geo.Point(x=nx, y=ny, z=None)
                if dend == 'head':
                    ln.insert(0, newEnd)
                else:  # 'tail'
                    ln.append(newEnd)
                quiet = inMap.rewrite(ln_idx + 1, ln)
        else:
            quite = inMap.delete(ln_idx + 1)


## Try periodic commit and garbage collection to avoide crash!
        if (ln_idx % 1000) == 0:
            #           inMap.table.conn.commit()  - no such thing - Why??
            if gc.isenabled():
                quiet = gc.collect()

    inMap.close(build=True, release=True)
    grass.message("v.extendlines completing")
    #
    # Clean up temporary tables and maps
    #
    if not debug:
        table_isectIn.drop(force=True)
        table_isectX.drop(force=True)
        table_imatch.drop(force=True)
        extend.remove()
        chosen = VectorTopo('chosen')
        if chosen.exist():
            chosen.remove()
    return 0
예제 #2
0
def create_maps(parsed_obs, offering, seconds_granularity, event_time):
    """Create vector map representing offerings and observed properties.

    :param parsed_obs: Observations for a given offering in geoJSON format
    :param offering: A collection of sensors used to conveniently group them up
    :param seconds_granularity: Granularity in seconds
    :param event_time:
    """
    timestamp_pattern = '%Y-%m-%dT%H:%M:%S'  # TODO: Timezone
    start_time = event_time.split('+')[0]
    epoch_s = int(time.mktime(time.strptime(start_time, timestamp_pattern)))
    end_time = event_time.split('+')[1].split('/')[1]
    epoch_e = int(time.mktime(time.strptime(end_time, timestamp_pattern)))

    for key, observation in parsed_obs.items():

        run_command('g.message',
                    message='Creating vector maps for {}...'.format(key))

        map_name = '{}_{}_{}'.format(options['output'], offering, key)
        if ':' in map_name:
            map_name = '_'.join(map_name.split(':'))
        if '-' in map_name:
            map_name = '_'.join(map_name.split('-'))
        if '.' in map_name:
            map_name = '_'.join(map_name.split('.'))

        run_command('t.create',
                    output=map_name,
                    type='stvds',
                    title='Dataset for offering {} and observed '
                    'property {}'.format(offering, key),
                    description='Vector space time dataset')

        free_cat = 1
        points = dict()
        new = VectorTopo(map_name)
        if overwrite() is True:
            try:
                new.remove()
            except:
                pass

        data = json.loads(observation)

        cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'),
                (u'value', 'DOUBLE')]

        intervals = {}
        for secondsStamp in range(epoch_s, epoch_e + 1, seconds_granularity):
            intervals.update({secondsStamp: dict()})

        timestamp_pattern = 't%Y%m%dT%H%M%S'  # TODO: Timezone

        for a in data['features']:
            name = a['properties']['name']
            if a['properties']['name'] not in points.keys():
                if new.is_open() is False:
                    new.open('w')
                points.update({a['properties']['name']: free_cat})
                new.write(Point(*a['geometry']['coordinates']))
                free_cat += 1

            for timestamp, value in a['properties'].items():
                if timestamp != 'name':
                    observationstart_time = timestamp[:-4]
                    seconds_timestamp = int(
                        time.mktime(
                            time.strptime(observationstart_time,
                                          timestamp_pattern)))
                    for interval in intervals.keys():
                        if interval <= seconds_timestamp < (
                                interval + seconds_granularity):
                            if name in intervals[interval].keys():
                                intervals[interval][name].append(float(value))
                            else:
                                intervals[interval].update(
                                    {name: [float(value)]})
                            break

        if new.is_open():
            new.close(build=False)
            run_command('v.build', map=map_name, quiet=True)

        i = 1
        layers_timestamps = list()
        for interval in intervals.keys():
            if len(intervals[interval]) != 0:
                timestamp = datetime.datetime.fromtimestamp(interval).strftime(
                    't%Y%m%dT%H%M%S')
                table_name = '{}_{}_{}_{}'.format(options['output'], offering,
                                                  key, timestamp)
                if ':' in table_name:
                    table_name = '_'.join(table_name.split(':'))
                if '-' in table_name:
                    table_name = '_'.join(table_name.split('-'))
                if '.' in table_name:
                    table_name = '_'.join(table_name.split('.'))

                new.open('rw')
                db = '$GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db'
                link = Link(layer=i,
                            name=table_name,
                            table=table_name,
                            key='cat',
                            database=db,
                            driver='sqlite')
                new.dblinks.add(link)
                new.table = new.dblinks[i - 1].table()
                new.table.create(cols)

                i += 1
                layers_timestamps.append(timestamp)

                for name, values in intervals[interval].items():
                    if options['method'] == 'average':
                        aggregated_value = sum(values) / len(values)
                    elif options['method'] == 'sum':
                        aggregated_value = sum(values)

                    new.table.insert(
                        tuple([points[name], name, aggregated_value]))
                    new.table.conn.commit()

                new.close(build=False)
                run_command('v.build', map=map_name, quiet=True)

        create_temporal(map_name, i, layers_timestamps)
예제 #3
0
    def structures(self, elev, stream=None, ndigits=0, resolution=None, contour=None):
        """Return a tuple with lines structres options of a hypotetical plant.

        ::

              river
               \ \
                \i\-------_______
                |\ \              \
                | \ \              \
                )  \ \              )  cond0
               /    \ \           /
              /      \ \         /
             ( cond1  \ \       /
              \        \ \      |
               \        \ \     |
                \        \ \    |
                 o--------\r\---o
               pstk1       \ \   pstk0
                            \ \

        Parameters
        ----------

        elev: raster
            Raster instance already opened with the elevation.
        intake_pnt: point
            It is the point of the intake.
        restitution_pnt: point
            It is the point of the restitution.

        Returns
        -------

        a list of tuple: [(HydroStruct(intake, conduct=cond0, penstock=pstk0),
                           HydroStruct(intake, conduct=cond1, penstock=pstk1))]
           Return a list of tuples, containing two HydroStruct the first with
           the shortest penstock and the second with the other option.
        """

        def get_struct(contur, respoint):
            """Return the lines of the conduct and the penstock.

            Parameters
            ----------

            contur: line segment
                It is a line segment of the contur line splited with splitline
                function, where the first point is the intake.
            respoint: point
                It is the point of the plant restitution.

            Returns
            -------

            tuple: (conduct, penstock)
               Return two lines, the first with the conduct and the second with
               the penstock. Note: both conduct and penstock lines are coherent
               with water flow direction.
            """
            dist = contur.distance(respoint)
            conduct = contur.segment(0, dist.sldist)
            penstock = Line([dist.point, respoint])
            return conduct, penstock

        def get_all_structs(contur, itk, res):
            l0, l1 = splitline(contur, itk.point, 3 * itk.point.distance(res.point))
            # get structs
            c0, p0 = get_struct(l0, res.point)
            c1, p1 = get_struct(l1, res.point)
            s0, s1 = "option0", "option1"
            # TODO: uncomment this to have left and right distinction...
            # but sofar is not working properly, therefore is commented.
            # if stream is not None:
            #    sitk = stream.find['by_point'].geo(itk.point, maxdist=100000)
            #    s0, s1 = (('right', 'left') if isinverted(sitk, elev, reg)
            #              else ('left', 'right'))
            return (HydroStruct(itk, c0, p0, s0), HydroStruct(itk, c1, p1, s1))

        result = []
        if contour is None:
            levels = sorted(
                set(
                    [
                        closest(itk.elevation, ndigits=ndigits, resolution=resolution)
                        for itk in self.intakes
                    ]
                )
            )

            # generate the contur line that pass to the point
            contour_tmp = "tmpvect%04d" % random.randint(1000, 9999)
            r.contour(
                input="%s@%s" % (elev.name, elev.mapset),
                output=contour_tmp,
                step=0,
                levels=levels,
                overwrite=True,
            )

            cnt = VectorTopo(contour_tmp)
            cnt.open()
        else:
            cnt = contour

        for itk in self.intakes:
            # find the closest contur line
            contur_res = cnt.find["by_point"].geo(
                self.restitution.point, maxdist=100000.0
            )

            # TODO: probably find the contur line for the intake and
            # the restitution it is not necessary, and we could also remove
            # the check bellow: contur_itk.id != contur_res.id
            contur_itk = cnt.find["by_point"].geo(itk.point, maxdist=100000.0)
            if contur_itk is None or contur_res is None:
                msg = (
                    "Not able to find the contur line closest to the "
                    "intake point %r, of the plant %r"
                    "from the contur line map: %s"
                )
                raise TypeError(msg % (itk, self, cnt.name))
            if contur_itk.id != contur_res.id:
                print("=" * 30)
                print(itk)
                msg = (
                    "Contur lines are different! %d != %d, in %s."
                    "Therefore %d will be used."
                )
                print(msg % (contur_itk.id, contur_res.id, cnt.name, contur_itk.id))

            # check contour
            contur = not_overlaped(contur_itk)
            contur = sort_by_west2east(contur)
            result.append(get_all_structs(contur, itk, self.restitution))

        # remove temporary vector map
        if contour is None:
            cnt.close()
            cnt.remove()
        return result
예제 #4
0
def create_maps(parsed_obs, offering, secondsGranularity, resolution):
    """
    Create raster maps representing offerings, observed props and procedures
    :param parsed_obs: Observations for a given offering in geoJSON format
    :param offering: A collection of sensors used to conveniently group them up
    :param secondsGranularity: Granularity in seconds
    :param resolution: 2D grid resolution for rasterization
    """

    timestampPattern = '%Y-%m-%dT%H:%M:%S'  # TODO: Timezone
    startTime = options['event_time'].split('+')[0]
    epochS = int(time.mktime(time.strptime(startTime, timestampPattern)))
    endTime = options['event_time'].split('+')[1].split('/')[1]
    epochE = int(time.mktime(time.strptime(endTime, timestampPattern)))

    for key, observation in parsed_obs.iteritems():
        print('Creating raster maps for offering '
              '{}, observed property {}'.format(offering, key))

        data = json.loads(observation)

        cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'),
                (u'value', 'DOUBLE')]

        geometries = dict()
        intervals = {}
        for secondsStamp in range(epochS, epochE + 1, secondsGranularity):
            intervals.update({secondsStamp: dict()})

        timestampPattern = 't%Y%m%dT%H%M%S'  # TODO: Timezone

        for a in data['features']:
            name = a['properties']['name']
            geometries.update({name: a['geometry']['coordinates']})

            for timestamp, value in a['properties'].iteritems():
                if timestamp != 'name':
                    observationStartTime = timestamp[:-4]
                    secondsTimestamp = int(
                        time.mktime(
                            time.strptime(observationStartTime,
                                          timestampPattern)))
                    for interval in intervals.keys():
                        if secondsTimestamp >= interval \
                                and secondsTimestamp < (
                                            interval + secondsGranularity):
                            if name in intervals[interval].keys():
                                intervals[interval][name].append(float(value))
                            else:
                                intervals[interval].update(
                                    {name: [float(value)]})
                            break

        for interval in intervals.keys():
            if len(intervals[interval]) != 0:
                timestamp = datetime.datetime.fromtimestamp(interval).strftime(
                    't%Y%m%dT%H%M%S')

                tableName = '{}_{}_{}_{}'.format(options['output'], offering,
                                                 key, timestamp)
                if ':' in tableName:
                    tableName = '_'.join(tableName.split(':'))
                if '-' in tableName:
                    tableName = '_'.join(tableName.split('-'))
                if '.' in tableName:
                    tableName = '_'.join(tableName.split('.'))

                new = VectorTopo(tableName)
                if overwrite() is True:
                    try:
                        new.remove()
                    except:
                        pass

                new.open(mode='w',
                         layer=1,
                         tab_name=tableName,
                         link_name=tableName,
                         tab_cols=cols,
                         overwrite=True)
                i = 0
                for procedure, values in intervals[interval].iteritems():
                    if new.exist() is False:
                        i = 1
                    else:
                        i += 1

                    if options['method'] == 'average':
                        value = sum(values) / len(values)
                    elif options['method'] == 'sum':
                        value = sum(values)
                    # TODO: Other aggregations methods

                    new.write(Point(*geometries[procedure]),
                              cat=i,
                              attrs=(
                                  procedure,
                                  value,
                              ))

                new.table.conn.commit()

                new.close(build=False)
                run_command('v.build', quiet=True, map=tableName)

                if options['bbox'] == '':
                    run_command('g.region', vect=tableName, res=resolution)

                run_command('v.to.rast',
                            input=tableName,
                            output=tableName,
                            use='attr',
                            attribute_column='value',
                            layer=1,
                            label_column='name',
                            type='point',
                            quiet=True)

                if flags['k'] is False:
                    run_command('g.remove',
                                'f',
                                type='vector',
                                name=tableName,
                                quiet=True)
예제 #5
0
def full_maps(parsed_obs, offering, seconds_granularity, resolution,
              event_time, target):
    """Create raster maps.

    Maps represent represent offerings, observed props and procedures

    :param parsed_obs: Observations for a given offering in geoJSON format
    :param offering: A collection of sensors used to conveniently group them up
    :param seconds_granularity: Granularity in seconds
    :param resolution: 2D grid resolution for rasterization
    :param event_time: Timestamp of first/of last requested observation
    :param target:
    """
    timestamp_pattern = '%Y-%m-%dT%H:%M:%S'  # TODO: Timezone
    start_time = event_time.split('+')[0]
    epoch_s = int(time.mktime(time.strptime(start_time, timestamp_pattern)))
    end_time = event_time.split('+')[1].split('/')[1]
    epoch_e = int(time.mktime(time.strptime(end_time, timestamp_pattern)))

    for key, observation in parsed_obs.items():
        print('Creating raster maps for offering '
              '{}, observed property {}'.format(offering, key))

        data = json.loads(observation)
        crs = data['crs']
        crs = int(crs['properties']['name'].split(':')[-1])
        transform = soslib.get_transformation(crs, target)

        cols = [(u'cat', 'INTEGER PRIMARY KEY'), (u'name', 'VARCHAR'),
                (u'value', 'DOUBLE')]

        geometries = dict()
        intervals = {}
        for secondsStamp in range(epoch_s, epoch_e + 1, seconds_granularity):
            intervals.update({secondsStamp: dict()})

        timestamp_pattern = 't%Y%m%dT%H%M%S'  # TODO: Timezone

        for a in data['features']:
            name = a['properties']['name']

            sx, sy, sz = a['geometry']['coordinates']
            point = ogr.CreateGeometryFromWkt('POINT ({} {} {})'.format(
                sx, sy, sz))
            point.Transform(transform)
            coords = (point.GetX(), point.GetY(), point.GetZ())
            geometries.update({name: coords})

            for timestamp, value in a['properties'].items():
                if timestamp != 'name':
                    observation_start_time = timestamp[:-4]
                    seconds_timestamp = int(
                        time.mktime(
                            time.strptime(observation_start_time,
                                          timestamp_pattern)))
                    for interval in intervals.keys():
                        if interval <= seconds_timestamp < (
                                interval + seconds_granularity):
                            if name in intervals[interval].keys():
                                intervals[interval][name].append(float(value))
                            else:
                                intervals[interval].update(
                                    {name: [float(value)]})
                            break

        for interval in intervals.keys():
            if len(intervals[interval]) != 0:
                timestamp = datetime.datetime.fromtimestamp(interval).strftime(
                    't%Y%m%dT%H%M%S')

                table_name = '{}_{}_{}_{}'.format(options['output'], offering,
                                                  key, timestamp)
                if ':' in table_name:
                    table_name = '_'.join(table_name.split(':'))
                if '-' in table_name:
                    table_name = '_'.join(table_name.split('-'))
                if '.' in table_name:
                    table_name = '_'.join(table_name.split('.'))

                new = VectorTopo(table_name)
                if overwrite() is True:
                    try:
                        new.remove()
                    except:
                        pass

                new.open(mode='w',
                         layer=1,
                         tab_name=table_name,
                         link_name=table_name,
                         tab_cols=cols,
                         overwrite=True)
                i = 0
                n = None
                s = None
                e = None
                w = None

                for procedure, values in intervals[interval].items():
                    if new.exist() is False:
                        i = 1
                    else:
                        i += 1

                    if options['method'] == 'average':
                        value = sum(values) / len(values)
                    elif options['method'] == 'sum':
                        value = sum(values)
                    # TODO: Other aggregations methods

                    new.write(Point(*geometries[procedure]),
                              cat=i,
                              attrs=(
                                  procedure,
                                  value,
                              ))

                    if options['bbox'] == '':
                        x, y, z = geometries[procedure]
                        if not n:
                            n = y + resolution / 2
                            s = y - resolution / 2
                            e = x + resolution / 2
                            w = x - resolution / 2
                        else:
                            if y >= n:
                                n = y + resolution / 2
                            if y <= s:
                                s = y - resolution / 2
                            if x >= e:
                                e = x + resolution / 2
                            if x <= w:
                                w = x - resolution / 2

                new.table.conn.commit()

                new.close(build=False)
                run_command('v.build', quiet=True, map=table_name)

                if options['bbox'] == '':
                    run_command('g.region', n=n, s=s, w=w, e=e, res=resolution)

                run_command('v.to.rast',
                            input=table_name,
                            output=table_name,
                            use='attr',
                            attribute_column='value',
                            layer=1,
                            type='point',
                            quiet=True)

                if flags['k'] is False:
                    run_command('g.remove',
                                'f',
                                type='vector',
                                name=table_name,
                                quiet=True)