def field_magic(self, country):
        dbfile = os.path.join(DBDIR, f"{country}-{self.dataset}.sqlite")
        print(dbfile)
        with spatialite.connect(dbfile) as db:
            for geom in self.geometry_types:

                add_missing_text_fields(db, table=geom, fields=self.fields)
                add_missing_calc_fields(db,
                                        table=geom,
                                        fields=self.calculated_fields)

                records = db.execute(f'select ogc_fid, other_tags from {geom}')

                for record in records:
                    fid, other_tags = record

                    if not other_tags:
                        continue

                    values = parse_other_tags(other_tags)
                    update_fields(db,
                                  table=geom,
                                  ogc_fid=fid,
                                  fields=self.fields,
                                  values=values)
                    update_calc_fields(db,
                                       table=geom,
                                       ogc_fid=fid,
                                       calc_fields=self.calculated_fields,
                                       values=values)

                db.commit()
    def modifyRoadDB(self, country):
        dbfile = os.path.join(DBDIR, f"{country}-highway.sqlite")
        print(dbfile)
        with spatialite.connect(dbfile) as db:
            db.execute(f'ALTER TABLE lines ADD category varchar(30);')
            db.execute(f'UPDATE lines SET category = highway;')

            db.execute(
                f'UPDATE lines SET category = "track_1" WHERE (highway = "track") AND (other_tags like "%grade1%");'
            )
            db.execute(
                f'UPDATE lines SET category = "track_2" WHERE (highway = "track") AND (other_tags like "%grade2%");'
            )
            db.execute(
                f'UPDATE lines SET category = "track_3" WHERE (highway = "track") AND (other_tags like "%grade3%");'
            )
            db.execute(
                f'UPDATE lines SET category = "track_4" WHERE (highway = "track") AND (other_tags like "%grade4%");'
            )
            db.execute(
                f'UPDATE lines SET category = "track_5" WHERE (highway = "track") AND (other_tags like "%grade5%");'
            )
            db.execute(
                f'UPDATE lines SET category = "track_na" WHERE (highway = "track") AND (category = "track")'
            )

            db.commit()
Exemplo n.º 3
0
 def open_create(self):
     if os.path.isfile(self.file_name):
         os.remove(self.file_name)
     self.db = spatialite.connect(self.file_name)
     self.cursor = self.db.cursor()
     self.initialize_database()
     self.first_free_points_id = 0
Exemplo n.º 4
0
 def open_append(self):
     self.db = spatialite.connect(self.file_name)
     self.cursor = self.db.cursor()
     last_ROWID = self.cursor.execute("SELECT max(ROWID) from Points").fetchone()[0]
     if last_ROWID is None:
         self.first_free_points_id = 0
     else:
         self.first_free_points_id = last_ROWID + 1
Exemplo n.º 5
0
def get_points(db_path, track_name):
    with spatialite.connect(db_path) as db:
        sql = f"""
        SELECT AsGeoJSON(geometry)
        FROM tracks
        WHERE name='{track_name}';
        """
        s = db.execute(sql).fetchone()[0]
    points = json.loads(s)['coordinates']
    return points
Exemplo n.º 6
0
Arquivo: db.py Projeto: joefg/WiFiMap
def connect(location):
    conn = spatialite.connect(location)
    cursor = conn.cursor()

    def dict_factory(cursor, row):
        d = {}
        for index, column in enumerate(cursor.description):
            d[column[0]] = row[index]
        return d

    conn.row_factory = dict_factory

    return conn.cursor()
Exemplo n.º 7
0
def main(cvatxml_path, db_path):
    conn = spatialite.connect(db_path)
    cursor = conn.cursor()

    damagedict = {'zero': 0, 'light': 1, 'medium': 2, 'high': 3, 'non_recoverable': 4}

    # Get the video_id from the name field of the videos table
    video = os.path.basename(cvatxml_path).replace('.xml', '.mp4')
    sql = (f'SELECT id FROM videos WHERE name = "{video}"')
    cursor.execute(sql)
    video_id = cursor.fetchone()[0]

    # Iterate through the XML tree, entering records into the trees and vcuts database tables
    root = ET.parse(cvatxml_path).getroot()
    for image in root.iter('image'):
        frame_number = int(image.attrib['id'])
        sql = f'SELECT id FROM frames WHERE video_id={video_id} AND frame_number={frame_number};'
        cursor.execute(sql)

        #frame_id = cursor.fetchone()[0]
        data = cursor.fetchone()
        if data is None:
            print(f'ERROR: No data returned by "{sql}"')
        else:
            frame_id = data[0]
            for box in image.iter('box'):
                damage_index_text = box.attrib['label']
                damage_index = damagedict[damage_index_text]
                xtl = int(float(box.attrib['xtl']))
                ytl = int(float(box.attrib['ytl']))
                xbr = int(float(box.attrib['xbr']))
                ybr = int(float(box.attrib['ybr']))
                geometry = f'GeomFromText("MULTIPOINT({xtl} {ytl},{xbr} {ybr})", -1)'
                sql = f'INSERT OR IGNORE INTO trees(frame_id,damage_index,geometry) VALUES ({frame_id},{damage_index},{geometry});'
                cursor.execute(sql)
                conn.commit()
            for polygon in image.iter('polygon'):
                points = polygon.attrib['points']
                points = points.replace(',', ' ').replace(';', ',') # Reformat points to match WKT for a polygon
                geometry = f'PolyFromText("POLYGON(({points}))",-1)'
                sql = f'INSERT OR IGNORE INTO vcuts(frame_id,geometry) VALUES({frame_id},{geometry});'
                cursor.execute(sql)
                conn.commit()
    cursor.close()
    conn.close()
Exemplo n.º 8
0
def main(osmfile, stops_file, gtfs_file, stopsprovider):
    logging.basicConfig(filename='matching.log',
                        filemode='w',
                        level=logging.INFO,
                        format='%(asctime)s %(levelname)-8s %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')

    logger = logging.getLogger('compare_stops')
    db = spatialite.connect('stops.db')
    db.execute("PRAGMA case_sensitive_like=ON")
    db.row_factory = sqlite3.Row

    if stops_file:
        if stopsprovider == 'NVBW':
            NvbwStopsImporter(db).import_stops(stops_file)
        elif stopsprovider == 'DELFI':
            DelfiStopsImporter(db).import_stops(stops_file)
        else:
            logger.error("No importer for stopsprovider %s", stopsprovider)
            return 1
        logger.info("Imported %s stops", stopsprovider)

    if osmfile:
        OsmStopsImporter(db, osm_file=osmfile)
        print("Imported osm file")

    if gtfs_file:
        importer = GtfsStopsImporter(db)
        importer.import_gtfs(gtfs_file)
        importer.update_name_steig()
        importer.update_mode()

    StopMatcher(db).match_stops()
    print("Matched and exported candidates")
    MatchPicker(db).pick_matches()
    MatchResultValidator(db).check_assertions()
    StatisticsUpdater(db).update_match_statistics()

    db.close()

    return 0
Exemplo n.º 9
0
def main(gtfs_file, sqlitedb):
    db = spatialite.connect(sqlitedb)
    db.execute("PRAGMA case_sensitive_like=ON")
    importer = GtfsStopsImporter(db)
    importer.import_gtfs(gtfs_file)
    importer.update_name_steig()
Exemplo n.º 10
0
def main(video_path, db_path):
    """
    Populates the frames field in the survey database.
    Columns are: frame_number, timestamp (UTC), lat, lon
    """
    print('STARTING')
    FRAME_INTERVAL = 1000  # Sets frequency for logging

    logging.basicConfig(
        level=logging.INFO,
        format="%(asctime)s [%(levelname)s] %(funcName)s %(message)s",
        datefmt="%Y-%m-%dT%H:%M:%S%z",
        handlers=[logging.StreamHandler()])
    logging.info('Starting georef.py')

    # Connect database
    conn = spatialite.connect(db_path)
    cursor = conn.cursor()

    # Find the GPS log file
    gpslogpath = re.sub(r'_\d{6}.mp4', '.csv', video_path, 0, re.MULTILINE)
    logging.info(f'gpslogpath = {gpslogpath}')

    # Load the gps log file csv into a dataframe
    logging.info(f'Reading GPS log from {gpslogpath}')
    dfgps = pd.read_csv(gpslogpath, parse_dates=['time'])
    dfgps['time'] = dfgps['time'].dt.tz_localize(None)

    # Slice dfgps so that is contains data between start_time-10s and stop_time+10s
    start_time, stop_time = get_video_start_stop(video_path, cursor)
    pd.set_option('display.max_columns', None)
    logging.info(
        f'dfgps prior to slicing: {dfgps.time.iloc[0]}   {dfgps.time.iloc[-1]} UTC'
    )
    dfgps = dfgps[dfgps.time.between(start_time - timedelta(0, 10),
                                     stop_time + timedelta(0, 10))]
    logging.info(
        f'dfgps after slicing:    {dfgps.time.iloc[0]}   {dfgps.time.iloc[-1]} UTC'
    )

    dfgps.to_pickle(f'map/{os.path.basename(video_path)}.pikl')
    logging.info(f'dfgps pickled')

    gpslog_start_time = dfgps.time.min().to_pydatetime()
    gpslog_stop_time = dfgps.time.max().to_pydatetime()
    logging.info(
        f'dfgps after slicing:    {gpslog_start_time}   {gpslog_stop_time} UTC'
    )

    # Get the video_id from the videos table
    video = os.path.basename(video_path)
    sql = (f'SELECT id FROM videos WHERE name = "{video}"')
    cursor.execute(sql)
    video_id = cursor.fetchone()[0]

    # Delete all records for video_id in the frames table
    sql = (f'DELETE FROM frames WHERE video_id="{video_id}";')
    cursor.execute(sql)
    conn.commit()

    # populate frames table with records for the current video
    if (start_time > gpslog_start_time) and (stop_time < gpslog_stop_time):
        logging.info(f'Started processing {video_path}')
        logging.info('Building timestamp-location table.')

        cap = cv2.VideoCapture(video_path)
        while True:
            pos_msec = cap.get(cv2.CAP_PROP_POS_MSEC)
            frame_number = cap.get(cv2.CAP_PROP_POS_FRAMES)
            timestamp = start_time + timedelta(milliseconds=pos_msec)
            lat, lon = get_lat_lon(timestamp, dfgps)
            geometry = f'TRANSFORM(GeomFromText("POINT({lon} {lat})", 4326), 3857)'
            sql = f'INSERT INTO frames(video_id,frame_number,time,geometry) VALUES({video_id},{frame_number},"{timestamp}",{geometry});'
            logging.debug(sql)
            cursor.execute(sql)
            conn.commit()
            if (frame_number % FRAME_INTERVAL == 0):
                logging.info(
                    f'{frame_number} {timestamp} {lat:7.4f} {lon:8.4f}')
            frame_exists, _ = cap.read()
            if not frame_exists:
                break
        cap.release()
    else:
        logging.error(
            f'Cannot process {video_path} - gps log data not available')

    # Disconnect database
    cursor.close()
    conn.close()

    logging.info(f'FINISHED {video_path}')
Exemplo n.º 11
0
# Pre-run checklist

if equal_samples:
    assert SAMPLESIZE % 5 == 0, 'SAMPLESIZE must be a multiple of 5 when the equal_samples flag is set'

assert os.path.exists(VIDEODIR), f'VIDEODIR does not exist at {VIDEODIR}'

assert not os.path.exists(IMAGEDIR), \
    f'IMAGEDIR already exists at {IMAGEDIR}. Either delete IMAGEDIR or specify a new one.'

assert os.path.exists(DATABASE), f'DATABASE does not exist at {DATABASE}'

run_randy(equal_samples)
os.mkdir(IMAGEDIR)
conn = spatialite.connect(DATABASE)
df = pd.read_sql('SELECT * FROM randy ORDER BY RANDOM();', conn)
df = df.apply(parse_bounding_box_column, axis=1)
df.drop('bounding_box', axis=1, inplace=True)
df['image_file_name'] = df.apply(format_image_file_name, axis=1)
df['image_url'] = df.apply(format_image_url, axis=1)
df.to_csv(f'{IMAGEDIR}/index.csv', index=False)
for i, r in df.iterrows():
    video = f'{VIDEODIR}/{r.video}'
    try:
        extract_image(IMAGEDIR, r.image_file_name, video, r.frame_number,
                      r.tree_id, r.xtl, r.ytl, r.xbr, r.ybr)
    except:
        print(
            f'ERROR: Could not extract {video} frame {frame_number} tree image{tree_id}'
        )
Exemplo n.º 12
0
 def open_append(self):
     self.db = spatialite.connect(self.file_name)
     self.cursor = self.db.cursor()
Exemplo n.º 13
0
Arquivo: shp-q.py Projeto: jokoon/eio
gis_osm_pois_a_free_1
gis_osm_pois_free_1
gis_osm_railways_free_1
gis_osm_roads_free_1
gis_osm_traffic_a_free_1
gis_osm_traffic_free_1
gis_osm_transport_a_free_1
gis_osm_transport_free_1
gis_osm_water_a_free_1
gis_osm_waterways_free_1'''.split('\n')

zonewhere = "where MBRContains(BuildCircleMBR(2.192429,42.658764,0.05),Geometry)"
zonewhere_larger = "where MBRContains(BuildCircleMBR(2.192429,42.658764,0.5),Geometry)"

f = open('repr-blob.txt','w')
with spatialite.connect('db.sqlite') as db:
    # print(db.execute('SELECT spatialite_version()').fetchone()[0])
    # query_wrap(db,"SELECT fclass,name from gis_osm_natural_free_1 "+zonewhere)

    # show polygons from buildings
    # query_wrap(db,"SELECT fclass,name,AsText(GEOMETRY) from gis_osm_buildings_a_free_1 "+zonewhere)
    
    # show polygons from POFW are place of worship
    # query_wrap(db,"SELECT fclass,name from gis_osm_pofw_a_free_1 "+zonewhere_larger)

    #shows content from all tables
    for a in tables:
        # query_wrap(db,"PRAGMA table_info(%s);"%a)
        # query_wrap(db,"SELECT fclass,name from %s "%a+zonewhere)
        query_wrap_write(db,"SELECT fclass,name,AsText(GEOMETRY) from %s "%a+zonewhere,f)
Exemplo n.º 14
0
             horizontalalignment='center',
             verticalalignment='center',
             fontsize=28)
    ax.set(aspect="equal")
    #plt.savefig('pie.svg')
    plt.savefig(indexpath.replace('index.html', 'pie.svg'))


args = docopt(__doc__)
print(args)
indexpath = args['<indexpath>']
databasepath = args['<databasepath>']

# Query DB for stats

conn = spatialite.connect(databasepath)
df = pd.read_sql('SELECT * FROM stats;', conn)
stats = dict(zip(df.name, df.val))
print('a')
print(stats)
print('b')

bake_pie(stats)

# Add project abstract
abstract = '<b>Coconut Rhinoceros Beetle Damage</b><br>'
abstract += '<b>Guam Roadside Video Survey 1</b><br><br>'
abstract += '<table>'
abstract += f'<tr><td>Start date:</td><td  style="text-align:right">{stats["frames_min_time"][:10]}</td></tr>'
abstract += f'<tr><td>End date:</td><td  style="text-align:right">{stats["frames_max_time"][:10]}</td></tr>'
abstract += f'<tr><td>Video frames examined:</td><td style="text-align:right">{int(stats["frames_count"]):,}</td></tr>'
Exemplo n.º 15
0
def open_database(db_path):
	""" Open connection to spatialite database """
	return spatialite.connect(db_path)
Exemplo n.º 16
0
    async def execute(self, filter={}, user=None):

        # Creating a temporary file where the
        # SpatialLite DB will be populated
        spatia_lite_file = tempfile.NamedTemporaryFile(suffix='.db',
                                                       prefix='bdms_',
                                                       delete=False)

        print(spatia_lite_file.name)
        try:
            with spatialite.connect(spatia_lite_file.name) as db:
                print(db.execute('SELECT spatialite_version()').fetchone()[0])

                cur = db.cursor()
                # Create the Borehole table

                print("creating spatialite db:")
                print(" - InitSpatialMetaData")

                cur.execute("SELECT InitSpatialMetaData()")

                print(" - Creating schema")

                cur.execute("""
                    CREATE TABLE boreholes (
                        id integer NOT NULL PRIMARY KEY,
                        original_name text,
                        project_name text,
                        public_name text,
                        kind integer,

                        restriction integer,
                        restriction_until text
                    )
                """)

                cur.execute("""
                    SELECT AddGeometryColumn(
                        'boreholes', 'the_geom',
                        2056, 'POINT', 'XY');
                """)

                print(".. done.")

                # Load all the boreholes
                boreholes = await self.conn.fetch(
                    ListBorehole.get_sql_geolcode())

                print(f"Loaded: {len(boreholes)} boreholes")

                for borehole in boreholes:

                    print(f'id: {borehole["id"]}')

                    sql = """
                        INSERT INTO boreholes VALUES (
                            ?,
                            ?, ?, ?,
                            ?, ?, ?,
                            GeomFromText(?, 2056)
                        )
                    """, (borehole['id'], borehole['original_name'],
                          borehole['project_name'], borehole['public_name'],
                          borehole['kind'],
                          (borehole['restriction']
                           if borehole['restriction'] is not None else 'NULL'),
                          borehole['restriction_until'],
                          "POINT({} {})".format(borehole['location_x'],
                                                borehole['location_y']))

                    print(sql)

                    cur.execute(sql)

                cur.commit()

            spatia_lite_file.close()

            return spatia_lite_file

        except Exception as ex:
            print(traceback.print_exc())
            spatia_lite_file.close()

        return None