Esempio n. 1
0
    def _init_spatialite(self, instance_id, target):
        # initialize the spatialite database file
        if target is None:
            dst_data_dir = os.path.join(instance_id, instance_id, "data")
        else:
            dst_data_dir = os.path.join(os.path.abspath(target),
                                        instance_id, instance_id, "data")

        os.chdir(dst_data_dir)
        db_name = "config.sqlite"
        print("Setting up initial database.")
        try:
            from pyspatialite import dbapi2 as db
            conn = db.connect(db_name)
            print("Using pyspatialite.")
        except ImportError:
            try:
                from pysqlite2 import dbapi2 as db
                print("Using pysqlite.")
            except ImportError:
                from sqlite3 import dbapi2 as db
                print("Using sqlite3.")

            conn = db.connect(db_name)
            spatialite_lib = find_library('spatialite')
            try:
                conn.execute("SELECT load_extension(%s)", (spatialite_lib,))
            except Exception, msg:
                raise Exception(
                    'Unable to load the SpatiaLite library extension '
                    '"%s" because: %s' % (spatialite_lib, msg)
                )
    def setUp(self):
        try:
            os.remove(get_db_path())
        except OSError:
            pass

        self.con1 = sqlite.connect(get_db_path(), timeout=0.1)
        self.cur1 = self.con1.cursor()

        self.con2 = sqlite.connect(get_db_path(), timeout=0.1)
        self.cur2 = self.con2.cursor()
Esempio n. 3
0
    def importusers(self):
        print "importuser"
        delta_days = self.days
        indb = db.connect(self.indb)
        dbout = db.connect(self.outdb)
        incur = indb.cursor()
        ago = ""
        if (delta_days == 0):
            ago = datetime.today() - timedelta(delta_days)
        else:
            sql = '''CREATE VIEW users_lastdays as SELECT user,
            MAX(timestamp) as tempo FROM osm_nodes GROUP BY user;'''
            incur.execute(sql)
        s = 0
        for i in self.tables:
            
            if (delta_days > 0):
                sql = '''select distinct(user) from 
                        users_lastdays where tempo > "%s"''' % str(ago)
            else:
                sql = "SELECT distinct(user) from osm_nodes";
                
            rs = incur.execute(sql)
            r = rs.fetchall()
            if s == 0:
                outcur = dbout.cursor()
                for u in r:
                    user = u[0]
                    sql = "INSERT INTO users (user) VALUES (?)"
            if user is not None:
                outcur.execute(sql,[user])
                s = s+1
                outcur.close()
                dbout.commit()
            if (delta_days >0):
                sql = "DROP VIEW users_lastdays;"
                incur.execute(sql)

            else:
                outcur = dbout.cursor()
                for u in r:
                    user = u[0]
                    sql = "Select user from users where user = ?" # user
                    rsu = list(outcur.execute(sql,user))
                    if len(rsu) == 0:
                        sql = "INSERT INTO users (user) VALUES (?)"
                        outcur.execute(sql,[user])
                outcur.close()
                dbout.commit()
        incur.close()
        indb.close()
        dbout.close()
        print "Users imported"
Esempio n. 4
0
    def importusers(self,dbname,delta_days=180):
        indb = db.connect(dbname)
        dbout = db.connect(self.outdb)
        incur = indb.cursor()
        sixmonthago = ""
        if (delta_days == 0):
            sixmonthago = datetime.today() - timedelta(delta_days)
        else:
            sql = "CREATE VIEW users_lastdays as SELECT user,MAX(timestamp) as tempo FROM osm_nodes GROUP BY user;"
            incur.execute(sql)
        
        s = 0
        for i in self.tables:
            
            if (delta_days > 0):
                sql = 'select distinct(user) from users_lastdays where tempo > "%s"' % str(sixmonthago)
            else:
                sql = "SELECT distinct(user) from osm_nodes";
                
            rs = incur.execute(sql)
            r = rs.fetchall()
            if s == 0:
                outcur = dbout.cursor()
                for u in r:
                    user = u[0]
                    sql = "INSERT INTO users (user) VALUES ('%s')" % (user)
                    outcur.execute(sql)
                s = s+1
                outcur.close()
                dbout.commit()
            if (delta_days >0):
                sql = "DROP VIEW users_lastdays;"
                incur.execute(sql)

            else:
                outcur = dbout.cursor()
                for u in r:
                    user = u[0]
                    sql = "Select user from users where user = '******';" % user
                    rsu = list(outcur.execute(sql))
                    if len(rsu) == 0:
                        sql = "INSERT INTO users (user) VALUES ('%s')" % (user)
                        outcur.execute(sql)
                outcur.close()
                dbout.commit()
        incur.close()
        indb.close()
        dbout.close()
        print "Users imported"
Esempio n. 5
0
def saveR(rectangles, A, cc, n):
    zona = takeZona(n)
    polis =[]
    for r in rectangles:
        polis.append(r[0])

    union = affinity.rotate(cascaded_union(polis), -A, origin=cc)
    dx = union.centroid.x-cc.x
    dy = union.centroid.y-cc.y
    print 'translate : ',dx, dy
    data2save=()
    for r in rectangles:
        rotated=affinity.rotate(r[0], -A, origin=cc)
        translated = affinity.translate(rotated, -dx, -dy)
        #verificar si interseca
        print zona.intersects(translated)
        if zona.intersects(translated):
            data = (n, A, r[1], r[2], "SRID=25831;"+str(translated))
            data2save += (data,)
        #print data
    conn = db.connect(rootData)
    c = conn.cursor()
    c.executemany('''insert into elementsdiv ( name, ang, x, y, geometry ) values ( ?, ?, ?,?, GeomFromEWKT( ? ) )''', data2save )
    conn.commit()
    conn.close()

    return
Esempio n. 6
0
File: utils.py Progetto: volaya/QGIS
def spatialite_connect(*args, **kwargs):
    """returns a dbapi2.Connection to a spatialite db
either using pyspatialite if it is present
or using the "mod_spatialite" extension (python3)"""
    try:
        from pyspatialite import dbapi2
    except ImportError:
        import sqlite3
        con = sqlite3.dbapi2.connect(*args, **kwargs)
        con.enable_load_extension(True)
        cur = con.cursor()
        libs = [
            # Spatialite >= 4.2 and Sqlite >= 3.7.17, should work on all platforms
            ("mod_spatialite", "sqlite3_modspatialite_init"),
            # Spatialite >= 4.2 and Sqlite < 3.7.17 (Travis)
            ("mod_spatialite.so", "sqlite3_modspatialite_init"),
            # Spatialite < 4.2 (linux)
            ("libspatialite.so", "sqlite3_extension_init")
        ]
        found = False
        for lib, entry_point in libs:
            try:
                cur.execute("select load_extension('{}', '{}')".format(lib, entry_point))
            except sqlite3.OperationalError:
                continue
            else:
                found = True
                break
        if not found:
            raise RuntimeError("Cannot find any suitable spatialite module")
        cur.close()
        con.enable_load_extension(False)
        return con
    return dbapi2.connect(*args, **kwargs)
Esempio n. 7
0
 def CheckConnectionExecutemany(self):
     con = sqlite.connect(":memory:")
     con.execute("create table test(foo)")
     con.executemany("insert into test(foo) values (?)", [(3,), (4,)])
     result = con.execute("select foo from test order by foo").fetchall()
     self.assertEqual(result[0][0], 3, "Basic test of Connection.executemany")
     self.assertEqual(result[1][0], 4, "Basic test of Connection.executemany")
    def __init__(self, dbpath):
        self.dbpath = dbpath
        # creating/connecting the test_db
        self.conn = sqlite.connect(self.dbpath) 
        # creating a Cursor
        self.cur = self.conn.cursor()
        self.cur.execute("PRAGMA foreign_keys = ON")    #Foreign key constraints are disabled by default (for backwards compatibility), so must be enabled separately for each database connection separately.
        
        #add layer styles
        self.add_layer_styles_2_db()

        #load style from file and set it as value into the layer styles table
        """
        self.style_from_file_into_db('obs_lines', 'obs_lines_tablayout.qml','obs_lines_tablayout.sld')
        self.style_from_file_into_db('obs_p_w_strat', 'obs_p_w_strat.qml','obs_p_w_strat.sld')
        self.style_from_file_into_db('obs_p_w_lvl', 'obs_p_w_lvl.qml','obs_p_w_lvl.sld')
        #osv
        """
        self.style_from_file_into_db('obs_points', 'obs_points_tablayout.qml','obs_points_tablayout.sld')
        self.style_from_file_into_db('stratigraphy', 'stratigraphy_tablayout.qml','stratigraphy_tablayout.sld')

        self.cur.execute("PRAGMA foreign_keys = OFF")
        #FINISHED WORKING WITH THE DATABASE, CLOSE CONNECTIONS
        self.rs.close()
        self.conn.close()
Esempio n. 9
0
    def create(self, force = True):
        if os.path.exists(self.file) and not force:
            return
        elif os.path.exists(self.file):
            os.remove(self.file)
        try:
            connection = db.connect(self.file, check_same_thread = False)
            cursor = connection.cursor()
    
            cursor.execute('SELECT InitSpatialMetadata()')
    
            cursor.execute(self.sql_point)
            cursor.execute('''SELECT AddGeometryColumn('fs_point', 'geometry', %i, '%s', 2);''' % (int(self.srs), "POINT"))
    
            cursor.execute(self.sql_line)
            cursor.execute('''SELECT AddGeometryColumn('fs_line', 'geometry', %i, '%s', 2);''' % (int(self.srs), "LINESTRING"))
    
            cursor.execute(self.sql_polygon)
            cursor.execute('''SELECT AddGeometryColumn('fs_polygon', 'geometry', %i, '%s', 2);''' % (int(self.srs), "POLYGON"))
    
    
            sql_clean = "CREATE TABLE fs_clean (id INTEGER PRIMARY KEY AUTOINCREMENT, clean_date TEXT);"
            cursor.execute(sql_clean)
    
            now = datetime.now().strftime(self.fmt)
    
            cursor.execute("INSERT INTO fs_clean(\"clean_date\") VALUES(date('"+now+"'));")

            connection.commit()
            connection.close()
        except Exception as e:
            raise
    def createDB(self):
        if self.stopThread:
            return
        
        if os.path.exists(self.DATABASE_OUTNAME):
            os.unlink(self.DATABASE_OUTNAME)
        # read 
        geosisma_geo_schema = ""
        with open(self.DATABASE_OUTNAME_SCHEMAFILE, 'r') as fs:
            geosisma_geo_schema += fs.read()
        # connect spatialite db
        conn = db.connect(self.DATABASE_OUTNAME)
        
        # create spatial metadata... depending on SL3 or SL4
        try:
            conn.cursor().execute("SELECT InitSpatialMetadata(1);")
        except:
            conn.cursor().execute("SELECT InitSpatialMetadata();")

        # create DB
        try:
            self.procMessage.emit("Inizializza il DB Spatialite temporaneo", QgsMessageLog.INFO)
            conn.cursor().executescript(geosisma_geo_schema)
        except db.Error as e:
            self.procMessage.emit(e.message, QgsMessageLog.CRITICAL)
            raise e
    def populateDB(self, selectedComuni):
        if self.stopThread:
            return

        # connect spatialite db
        conn = db.connect(self.DATABASE_OUTNAME)
        try:
            # copy tables
            tables = ["istat_regioni", "istat_province", "codici_belfiore", "istat_loc_tipi", "istat_comuni"]
            for table in tables:
                self.copyTable(conn, table)
             
            # copy table with geom
            tables = ["istat_loc"]
            for table in tables:
                self.copyGeomTable(conn, table)
             
            # get fab_catasto poligons only related to selectedComuni
            for comune in selectedComuni:
                self.copyCatastoPolygons(conn, comune)
            
            # get fab_10k poligons only related to selectedComuni
            for comune in selectedComuni:
                self.copyFab10kPolygons(conn, comune)
            
            #commit population
            conn.commit()
        except db.Error as e:
            self.procMessage.emit(e.message, QgsMessageLog.CRITICAL)
            raise e
Esempio n. 12
0
 def add_price(self, bar, price, drink_type, orig_date, lat, lon, remote_host, user_agent, userid):
     conn = sqlite3.connect(config['dbfile'])
     conn.execute("""insert into pricings
         (barid, drink_type, price, date, geometry, host, user_agent, userid)
         values (?, ?, ?, ?, geomFromText('point(%f %f)', 4326), ?, ?, ?)""" % (lon, lat),
         (bar.pkuid, drink_type, price, orig_date, remote_host, user_agent, userid))
     conn.commit()
Esempio n. 13
0
    def add_or_update_nodes(self, nodeset, source_file="unknown"):
        ignored_bars = 0
        updated_bars = 0
        new_bars = 0
        conn = sqlite3.connect(config['dbfile'])
        c = conn.cursor()
        update_tstamp = time.time()
        for barn in nodeset.values():
            if 'name' not in barn.tags or barn.tags["name"] == "":
                ignored_bars += 1
            else:
                # Always updated, but use a key to make sure that we never make a duplicate.
                bar = Bar(barn.tags["name"], float(barn.lat), float(barn.lon), type=barn.tags["amenity"], osmid=barn.id)
                cnt = c.execute("select count(1) from bars where osmid = ?", (bar.osmid,)).fetchone()[0]
                if cnt >= 1:
                    c.execute("update bars set name = ?, type =?, updated = ?, geometry = geomFromText('POINT(%f %f)', 4326) where osmid = ?" % (bar.lon, bar.lat),
                        (bar.name, bar.type, update_tstamp, bar.osmid))
                    updated_bars += 1
                else:
                    # oh f**k you very much spatialite
                    c.execute("insert into bars (name, type, osmid, created, geometry) values (?, ?, ?, ?, geomFromText('POINT(%f %f)', 4326))" % (bar.lon, bar.lat),
                        (bar.name, bar.type, bar.osmid, update_tstamp))
                    new_bars += 1

        username = "******" # FIXME
        # FIXME - make this log a failure too please!
        c.execute("""insert into data_updates (date, username, bars_created, bars_modified, source_file, status)
                    values (?, ?, ?, ?, ?, ?)""",
                    (update_tstamp, username, new_bars, updated_bars, source_file, "OK"))
        conn.commit()
        conn.close()
        log.info("loaded %d bars, ignored %d nameless, created %d, updated %d", len(nodeset), ignored_bars, new_bars, updated_bars)
Esempio n. 14
0
def searchObject(point_lat, point_lng, db_file):
    conn = db.connect(DB_DIR + db_file)
    cur = conn.cursor()
    sql = "SELECT id, geometry, name, sub_type, country, min_lat, min_lng, max_lat, max_lng, scale, eng_name FROM object WHERE min_lng <= " + str(point_lng) + " AND min_lat <= " + str(point_lat) + " AND max_lng  >= " + str(point_lng) + " AND max_lat >= " + str(point_lat)
    id = -1
    res = cur.execute(sql)
    for rec in res:
        id = rec[0]
        geometry = rec[1].strip().encode('utf-8')
        name = rec[2].encode('utf-8')
        sub_type = rec[3].encode('utf-8')
        country = rec[4].encode('utf-8')
        min_lat = rec[5]
        min_lng = rec[6]
        max_lat = rec[7]
        max_lng = rec[8]
        scale = rec[9]
        eng_name = rec[10].encode('utf-8')
        point_geometry = '{"type":"Point","coordinates":[' + str(point_lng) + ',' + str(point_lat) + ']}'
        if id != -1:
            sql = "SELECT Intersects(GeomFromGeoJSON('" + geometry + "'),GeomFromGeoJSON('" + point_geometry + "'))"
            res2 = cur.execute(sql)
            in_obj = 0
            for rec2 in res2:
                print 're2c=' + str(rec2)
                in_obj = rec2[0]
                if in_obj == 1:
                    cur.close()
                    conn.close()
                    return (name, sub_type, geometry, country, id, (min_lat + max_lat) / 2, (min_lng + max_lng) / 2, scale, eng_name)

    cur.close()
    conn.close()
    return None
Esempio n. 15
0
 def avg_prices_for_bar(self, bar_pkuid):
     prices = []
     conn = sqlite3.connect(config['dbfile'])
     rows = conn.execute("select drink_type, avg(price), count(price) from pricings where barid = ? group by drink_type", (bar_pkuid,)).fetchall()
     for row in rows:
         prices.append({'drink_type': row[0], 'average': row[1], 'samples': row[2]})
     return prices
Esempio n. 16
0
    def init_spatialite(self):
        # Get spatialite version
        c = self.con.cursor()
        try:
            self._exec_sql(c, u'SELECT spatialite_version()')
            rep = c.fetchall()
            v = [int(a) for a in rep[0][0].split('.')]
            vv = v[0] * 100000 + v[1] * 1000 + v[2] * 10

            # Add spatialite support
            if vv >= 401000:
                # 4.1 and above
                sql = "SELECT initspatialmetadata(1)"
            else:
                # Under 4.1
                sql = "SELECT initspatialmetadata()"
            self._exec_sql_and_commit(sql)
        except:
            return False
        finally:
            self.con.close()

        try:
            self.con = sqlite.connect(self.con_info())

        except (sqlite.InterfaceError, sqlite.OperationalError) as e:
            raise DbError(unicode(e))

        return self.check_spatialite()
Esempio n. 17
0
    def init_spatialite(self):
        # Get spatialite version
        c = self.con.cursor()
        try:
            self._exec_sql(c, u'SELECT spatialite_version()')
            rep = c.fetchall()
            v = [int(x) if x.isdigit() else x for x in re.findall("\d+|[a-zA-Z]+", rep[0][0])]

            # Add spatialite support
            if v >= [4, 1, 0]:
                # 4.1 and above
                sql = "SELECT initspatialmetadata(1)"
            else:
                # Under 4.1
                sql = "SELECT initspatialmetadata()"
            self._exec_sql_and_commit(sql)
        except:
            return False
        finally:
            self.con.close()

        try:
            self.con = sqlite.connect(self.con_info())

        except (sqlite.InterfaceError, sqlite.OperationalError) as e:
            raise DbError(unicode(e))

        return self.check_spatialite()
 def getNativeFields(self, type):
     '''
     Gets the native fields with type from database
     
     :param type: XSD schema type
     :type type: PAGType
     '''
     
     conn = db.connect(self.database)
     
     cursor = conn.cursor() 
     rs = cursor.execute("PRAGMA table_info('{}')".format(type.name))
     
     for i in range(len(rs.description)):
         if rs.description[i][0] == 'name':
             name_index = i
         if rs.description[i][0] == 'type':
             type_index = i
     
     fields =[]
     
     for row in rs:
         fields.append((row[name_index],row[type_index]))
     
     cursor.close()
     del cursor
     
     conn.close()
     del conn
     
     return fields
 def _updateDatabase(self):
     '''
     Updates the project database
     '''
     
     xsd_schema = main.xsd_schema
     createdb = not os.path.isfile(self.database)
     
     conn = db.connect(self.database)
     
     # Create database if not exist
     if createdb:
         cursor=conn.cursor() 
         cursor.execute("SELECT InitSpatialMetadata()")
         del cursor
     
     # Check and update tables
     for type in xsd_schema.types:
         uri = self.getTypeUri(type)
         layer = QgsVectorLayer(uri, type.friendlyName(), 'spatialite')
         
         # Create layer if not valid
         if not layer.isValid():
             self._createTable(conn, type)
             layer = QgsVectorLayer(uri, type.friendlyName(), 'spatialite')
         
         self._updateTable(type, layer, True)
     
     # Check and update the import log table
     self._updateImportLogTable(conn)
     
     conn.close()
     del conn
    def setUpClass(cls):
        """Run before all tests"""
        # create test db
        cls.dbname = os.path.join(tempfile.gettempdir(), "test.sqlite")
        if os.path.exists(cls.dbname):
            os.remove(cls.dbname)
        con = sqlite3.connect(cls.dbname)
        cur = con.cursor()
        sql = "SELECT InitSpatialMetadata()"
        cur.execute(sql)

        # simple table with primary key
        sql = "CREATE TABLE test_pg (id INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL)"
        cur.execute(sql)
        sql = "SELECT AddGeometryColumn('test_pg', 'geometry', 4326, 'POLYGON', 'XY')"
        cur.execute(sql)
        sql = "INSERT INTO test_pg (id, name, geometry) "
        sql += "VALUES (1, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)

        # table with multiple column primary key
        sql = "CREATE TABLE test_pg_mk (id INTEGER NOT NULL, name TEXT NOT NULL, PRIMARY KEY(id,name))"
        cur.execute(sql)
        sql = "SELECT AddGeometryColumn('test_pg_mk', 'geometry', 4326, 'POLYGON', 'XY')"
        cur.execute(sql)
        sql = "INSERT INTO test_pg_mk (id, name, geometry) "
        sql += "VALUES (1, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)

        con.commit()
        con.close()
Esempio n. 21
0
def addObject(name, sub_type, country, geometry, scale, eng_name, db_file):
    conn = db.connect(DB_DIR + db_file)
    cur = conn.cursor()
    sql = "SELECT MbrMinX(GeomFromGeoJSON('"+ geometry +"')) as min_lng, MbrMinY(GeomFromGeoJSON('"+ geometry +"')) as min_lat, MbrMaxX(GeomFromGeoJSON('"+ geometry +"')) as max_lng, MbrMaxY(GeomFromGeoJSON('"+ geometry +"')) as max_lat"
    print sql
    res = cur.execute(sql)
    for rec in res:
        print rec
        min_lng = rec[0]
        min_lat = rec[1]
        max_lng = rec[2]
        max_lat = rec[3]
    name = filterString(name)
    if len(name) == 0:
        return None
    cur.execute("INSERT INTO object (name, sub_type, geometry, min_lng, min_lat, max_lng, max_lat, country, scale, eng_name) VALUES(?,?,?,?,?,?,?,?,?,?)", (name, sub_type, geometry,min_lng,min_lat,max_lng,max_lat,country,scale,eng_name))
    conn.commit()
    cur.execute("SELECT id, geometry, name, sub_type, country, min_lat, min_lng, max_lat, max_lng, scale, eng_name FROM object WHERE name=?",(name,))
    id = -1
    for rec in res:
        id = rec[0]
        geometry = rec[1].strip().encode('utf-8')
        name = rec[2].encode('utf-8')
        sub_type = rec[3].encode('utf-8')
        country = rec[4].encode('utf-8')
        min_lat = rec[5]
        min_lng = rec[6]
        max_lat = rec[7]
        max_lng = rec[8]
        scale = rec[9]
        eng_name = rec[10].encode('utf-8')
    if id == -1:
        return None
    else:
        return (name, sub_type, geometry, country, id, (min_lat+max_lat)/2, (min_lng+max_lng)/2, scale, eng_name)
Esempio n. 22
0
def store_attempt(task_id):
    """Stores information about the task"""
    conn = db.connect('noaddr.sqlite')
    cur = conn.cursor()
    res = cur.execute("SELECT id from anomaly where id IS %d" % int(task_id))
    recs = res.fetchall()
    if not len(recs) == 1:
        abort(404)
    #dct = geojson.loads(request.json)
    dct = request.form
    # We can now handle this object as we like, but for now, let's
    # just handle the action
    action = dct['action']
    if action == 'fixed':
        pass
    elif action == 'notfixed':
        pass
    elif action == 'someonebeatme':
        pass
    elif action == 'falsepositive':
        pass
    elif action == 'skip':
        pass
    elif action == 'noerrorafterall':
        pass
    # We need to return something, so let's return an empty
    # string. Maybe in the future we'll think of something useful to
    # return and return that instead
    return ""
Esempio n. 23
0
def searchCity(point_lat, point_lng, db_file):
    conn = db.connect(DB_DIR + db_file)
    cur = conn.cursor()
    sql = "SELECT id, geometry, city_name, city_lastname, country, min_lat, min_lng, max_lat, max_lng, scale FROM city WHERE min_lng <= " + str(point_lng) + " AND min_lat <= " + str(point_lat) + " AND max_lng  >= " + str(point_lng) + " AND max_lat >= " + str(point_lat)
    id = -1
    res = cur.execute(sql)
    for rec in res:
        id = rec[0]
        print 'id=%i' % id
        city_geometry = rec[1].strip().encode('utf-8')
        city_name = rec[2].encode('utf-8')
        city_lastname = rec[3].encode('utf-8')
        city_country = rec[4].encode('utf-8')
        min_lat = rec[5]
        min_lng = rec[6]
        max_lat = rec[7]
        max_lng = rec[8]
        scale = rec[9]
        #print 'city_name=%s' % city_name
        #print 'min_lat=%f max_lat=%f min_lng=%f max_lng=%f' % (min_lat, max_lat, min_lng, max_lng)
        point_geometry = '{"type":"Point","coordinates":[' + str(point_lng) + ',' + str(point_lat) + ']}'
        if id != -1:
            sql = "SELECT Intersects(GeomFromGeoJSON('" + city_geometry + "'),GeomFromGeoJSON('" + point_geometry + "'))"
            res2 = cur.execute(sql)
            in_city = 0
            for rec2 in res2:
                print 'rec=' + str(rec2)
                in_city = rec2[0]
                if in_city == 1:
                    cur.close()
                    conn.close()
                    return (city_name, city_lastname, city_geometry, city_country, id, (min_lat + max_lat) / 2, (min_lng + max_lng) / 2, scale)
    cur.close()
    conn.close()
    return None
Esempio n. 24
0
def getListObject(db_file):
    conn = db.connect(DB_DIR + db_file)
    cur = conn.cursor()
    sql = "SELECT id, geometry, name, sub_type, country, min_lat, min_lng, max_lat, max_lng, scale, eng_name FROM object ORDER BY name"
    res = cur.execute(sql)
    objlist = []
    for rec in res:
        id = rec[0]
        geometry = rec[1].strip().encode('utf-8')
        name = rec[2].encode('utf-8')
        sub_type = rec[3].encode('utf-8')
        country = rec[4].encode('utf-8')
        min_lat = rec[5]
        min_lng = rec[6]
        max_lat = rec[7]
        max_lng = rec[8]
        scale = rec[9]
        eng_name = rec[10].encode('utf-8')
        item = '{"res":true, "name":"' + name + '", "sub_type":"' + sub_type + '","geometry":' + geometry + ', "country":"' + country + '", "id":' + str(id)+ ', "avg_lat":'+str((min_lat+max_lat)/2)+', "avg_lng":'+str((min_lng+max_lng)/2)+', "scale":'+str(scale)+', "eng_name":"'+eng_name+'"}'
        objlist.append(item)
    cur.close()
    conn.close()
    if len(objlist) == 0:
        return None
    else:
        print len(objlist)
        return objlist
Esempio n. 25
0
    def delete_from_sqlite(self, source, name=None):
        """Delete the oat object from sqlite

        Args:
            source (str): the sqlite file (including path)
            name (list): the sensor name to be used
        """

        try:
            from pyspatialite import dbapi2 as db
        except ImportError:
            raise ImportError('<pyspatialite> package not installed')

        #connect / create the DB
        con = db.connect(source)
        cur = con.cursor()

        if name is None:
            name = self.name

        #check if sensor exists
        sql = "SELECT name FROM freewat_sensors WHERE name=?;"
        res = cur.execute(sql, (self.name,)).fetchall()

        if res:
            #delete the sensor metadata
            sql = "DELETE FROM freewat_sensors WHERE name=?;"
            res = cur.execute(sql, (self.name,)).fetchall()
            #delete the sensor data
            sql = "DROP TABLE %s ;" % (name)
            res = cur.execute(sql).fetchall()  # , (name,)
        else:
            raise ValueError("%s not found in db %s" % (name, source))
def processCsv(infile, schemadict, outdb):
    """function that takes a file, schemadict and outdb
    loops through row by row, matches with the schema
    and throws each row out to the outdb"""
    # let the user know that the script is running
    print infile
    # take the name of the file
    tablename = infile.split('.')[0]
    # grab pertinent part of schemadict
    schema = schemadict[tablename]
    f = open(infile)
    # start of the dictreader, which is a great little option for csvs
    reader = csv.DictReader(f)
    # open a connection and create cursor to access the database
    conn = sqlite3.connect(outdb)
    cur = conn.cursor()
    # find the intersection of csv fieldnames and the schema
    headers = [x for x in reader.fieldnames if x in schema.keys()]
    # i really have no experience with how to do this, i know it is wrong
    # but i am just making a string with the right amount of question marks
    questionmarks = '?,' * len(headers)
    # create a base string that has everything but the values
    string = "insert or replace into {0} {2} values({1})".format(tablename, questionmarks[:-1], tuple(headers))
    # loop through each row of the infile
    for r in reader:
        # process each element of the row through the schema
        # so strs stay as strings, and ints get converted to integers
        vals = [schema[k](r[k]) for k in reader.fieldnames if k in schema]
        # execute
        cur.execute(string, vals)
    # commit and close
    conn.commit()
    conn.close()    
Esempio n. 27
0
    def createDB(self):
        self.setWindowTitle(self.tr("Crea il DB %s" % gw.GEOSISMA_DBNAME) )
        self.setRange( 0, 3 )

        if self.stopThread:
            return
        
        if os.path.exists(self.DATABASE_OUTNAME):
            os.unlink(self.DATABASE_OUTNAME)
        # read 
        geosismadb_schema = ""
        with open(self.DATABASE_SCHEMAFILE, 'r') as fs:
            geosismadb_schema += fs.read()
        self.onProgress()
        # connect spatialite db
        conn = db.connect(self.DATABASE_OUTNAME)
        # create DB
        try:
            self.resetMessage.emit(self.tr("Inizializza il DB Spatialite; %s" % self.DATABASE_OUTNAME), QgsMessageLog.INFO)
            conn.cursor().executescript(geosismadb_schema)
            self.onProgress()
        except db.Error as e:
            self.resetMessage.emit(e.message, QgsMessageLog.CRITICAL)
            raise e
        self.onProgress(-1)
Esempio n. 28
0
def get_connection(db_path):
    connection = spatialite.connect(db_path)
    if LOAD_AS_EXTENSION:
        # print('spatialite loaded as sqlite extension')

        connection.enable_load_extension(True)

        libspatialite_shared_libs = [
            'libspatialite.so',
            'libspatialite',
            'mod_spatialite',
            '/usr/local/opt/libspatialite/lib/mod_spatialite',
            'libspatialite.dll'
        ]

        load_successfull = False
        for lib_name in libspatialite_shared_libs:
            load_ext_query = 'SELECT load_extension("{0}")'.format(lib_name)
            try:
                connection.execute(load_ext_query)
                load_successfull = True
                break
            except:
                pass

        if not load_successfull:
            print("Unable to load spatialite sqlite3 extension")
            sys.exit(0)

    return connection
def extractData(spatiaLitePath, tableName, id, attributes):
    try:
        conn = db.connect(spatiaLitePath)
        cur = conn.cursor()
        constAttributes = getConstAttributes(cur, tableName)
        varAttributes = getVarAttributes(cur, tableName)
        constData = getConstData(cur, tableName, id)
        varData = getVarData(cur, tableName, id)
        image = getGeometryImage(cur, tableName, id)
        
        #Filtering stuff
        if attributes:
            varAttr_ = []
            constAttr_ = []
            constData_ = []
            varData_ = []
            for index, value in enumerate(constAttributes):
                if value in attributes[0]:
                    constAttr_.append(constAttributes[index])
                    constData_.append(constData[index])
        
            for index, value in enumerate(varAttributes):
                if value in attributes[1]:
                    varAttr_.append(varAttributes[index])
                    for i,v in enumerate(varData):
                        if len(varData_) <= i:
                            varData_.append([varData[i][index]])
                        else:
                            varData_[i].append(varData[i][index])
            
            return[constAttr_, constData_, varAttr_, varData_, image]
        
    except db.Error, e:
        print "Error %s:" % e.args[0]
        sys.exit()
Esempio n. 30
0
 def CheckFailedOpen(self):
     YOU_CANNOT_OPEN_THIS = "/foo/bar/bla/23534/mydb.db"
     try:
         con = sqlite.connect(YOU_CANNOT_OPEN_THIS)
     except sqlite.OperationalError:
         return
     self.fail("should have raised an OperationalError")
Esempio n. 31
0
def main():
    u"""Función centinela"""
    # try:
    parser = ArgumentParser(
        description='Genera un archivo GTFS estático para Santander',
        version='gtfs4sdr 1.0')
    parser.add_argument('-osm',
                        action="store_true",
                        default=False,
                        help='descarga nueva cartografia desde OpenStreetMap')
    parser.add_argument('-shp',
                        action="store_true",
                        help='crea un archivo shapes.txt')
    parser.add_argument(
        '-fv',
        action="store_true",
        default=False,
        help='comprueba el archivo GTFS generado mediante FeedValidator')
    parser.add_argument(
        '-sv',
        action="store_true",
        default=False,
        help='comprueba el archivo GTFS generado mediante ScheduleViewer')
    parser.add_argument('-g',
                        action="store_true",
                        default=False,
                        help='genera el grafo para OpenTripPlanner')
    parser.add_argument('-ops',
                        action="store_true",
                        default=False,
                        help='sube el archivo Graph.obj generado a OpenShift')

    argumentos = parser.parse_args()
    shapes = argumentos.shp
    openstreetmap = argumentos.osm
    feedv = argumentos.fv
    schedulev = argumentos.sv
    grafo = argumentos.g
    openshift = argumentos.ops

    if openstreetmap is True:
        downloadOSM()

    importOSM()
    con = db.connect('/var/tmp/gtfs.sqlite')
    cursor = con.cursor()
    cursor.execute("DROP TABLE IF EXISTS relaciones")
    cursor.execute('CREATE TABLE relaciones (relacion INT, id TEXT);')
    con.commit()
    cursor.close()
    cargar_relaciones(RELACIONES)
    relaciones_lista = csv_orden_relaciones()
    import_orden_relaciones()
    sentidos_relaciones()
    ordenar_vias(relaciones_lista)
    convertir_geometria()
    ptos_rutas()
    calcular_distancia(relaciones_lista)
    excepciones()
    exportar_shape()
    # COMIENZA LA CONSTRUCCION DEL GTFS EXCEPTO LOS SHAPES
    exportar_calc(GTFS_ODS)
    exportar_calc(TIEMPOS_PARADAS)
    importar_csv()
    errores_excepciones()
    tiempos_paradas()
    ajuste_tiempos_paradas()
    tiempos_iguales()
    vista_stop_times()
    exportar_csv(GTFS_DIR)  # Exporta la vista final a csv

    # Elimina shapes.txt que esta vacío para susutituirlo por shapes_tmp.txt
    # que es el bueno y se renombra
    if os.path.isfile(GTFS_DIR + "shapes_tmp.txt") is True:
        print(Fore.GREEN + "AVISO:" + Fore.RESET +
              " Renombrando shape_tmp.txt -> shape.txt")
        os.system("rm " + GTFS_DIR + "shapes.txt")
        os.system("mv " + GTFS_DIR + "shapes_tmp.txt " + GTFS_DIR +
                  "shapes.txt")

    comprimir_txt(GTFS_DIR, HOME)  # Comprime los archivos
    if feedv is True:
        feedvalidator(GTFS_DIR)

    if schedulev is True:
        scheduleviewer(GTFS_DIR)

    if grafo is True:
        generar_grafo(OTP)

    if openshift is True:
        subir_grafo(OPENSHIFT_DIR, OTP)

    pygame.init()
    pygame.mixer.Sound(
        "/usr/bin/cvlc /usr/share/sounds/KDE-Sys-App-Positive.ogg").play()
    print(Back.GREEN + "¡Finalizado!" + Back.RESET)
Esempio n. 32
0
 def setUp(self):
     self.con = sqlite.connect(":memory:")
Esempio n. 33
0
 def setUp(self):
     self.con = sqlite.connect(":memory:")
     self.cur = self.con.cursor()
Esempio n. 34
0
def GridFloodingAreas(mydb_path_user, PathFiles, DamID, UseEnergyHead):

    NotErr = bool('True')
    errMsg = 'OK'
    MatriceRisultati = []

    # ---------------------------------
    PathFiles = os.path.realpath(PathFiles)

    mydb_path_user = os.path.realpath(mydb_path_user)

    # polygon floodable area
    AreaInondabile = PathFiles + os.sep + 'AreaInondabile_tot.shp'

    # Polygon Areas1: first component of the floodable area
    # ------------------------------------------------------
    # area on the right and left of the evaluated river axis
    # based on the width in the right and left obtained
    # from model propagation calculation
    # one-dimensional
    AreaInondabile_1 = PathFiles + os.sep + 'AreaInondabile_1.shp'

    if not os.path.exists(PathFiles):
        errMsg = "There is no data for the dam num =%s \nEffettuare prima il calcolo delle sezioni a valle !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg
    else:
        os.chdir(PathFiles)
        log_file = open('log.txt', 'w')
        timenow_our = datetime.now().strftime('%y-%m-%d %H:%M')
        log_file.write('Start %s\n' % timenow_our)
        log_file.close()

    # trace intermediate sections representative of the section
    CrossMedie = PathFiles + os.sep + 'CrossSecMean.shp'
    if not os.path.exists(CrossMedie):
        errMsg = "Missing CrossSecMean.shp for the dam num =%s \nPerform the calculation of the downstream sections first!" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    StreamDH = PathFiles + os.sep + 'StreamDHFilled.tif'
    if not os.path.exists(StreamDH):
        errMsg = "Missing for the dam num =%s il StreamDH\nCarry out first ModificaDH !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    # poligoni tratti
    CrossSecPoly = PathFiles + os.sep + 'CrossSecPoly.shp'
    if not os.path.exists(CrossSecPoly):
        errMsg = "Missing CrossSecPoly.shp for the dam num =%s \nPerform the calculation of the downstream sections first!" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    # poligoni tratti divisi in destra e sinistra
    CrossSecPoly_2 = PathFiles + os.sep + 'CrossSecPoly_2.shp'
    if not os.path.exists(CrossSecPoly_2):
        errMsg = "Missing CrossSecPoly_2.shp for the dam num =%s \nPerform the calculation of the downstream sections first !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    # i due poligoni sinistra e destra idraulica
    PolySxDx = PathFiles + os.sep + 'PolySxDx.shp'
    if not os.path.exists(PolySxDx):
        errMsg = "Missing PolySxDx.shp for the dam num =%s \nPerform the calculation of the downstream sections first !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    FileMatricePixDestra = PathFiles + os.sep + 'MatricePixDestra.csv'
    if not os.path.exists(FileMatricePixDestra):
        errMsg = "Missing for the dam num =%s MatricePixDestra.csv\nPerform CreaCurveAreaAltezza first !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    # =======================================
    # Reading the characteristics of the GRID
    # =======================================

    gdal.AllRegister()

    indataset = gdal.Open(StreamDH, GA_ReadOnly)
    if indataset is None:
        errMsg = 'Could not open file %s' % StreamDH
        NotErr = bool()
        return NotErr, errMsg

    geotransform = indataset.GetGeoTransform()

    originX = geotransform[0]
    originY = geotransform[3]
    pixelWidth = geotransform[1]
    pixelHeight = geotransform[5]
    cols = indataset.RasterXSize
    rows = indataset.RasterYSize
    bands = indataset.RasterCount
    iBand = 1
    inband = indataset.GetRasterBand(iBand)
    inNoData = inband.GetNoDataValue()
    prj = indataset.GetProjectionRef()

    spatialRef = osr.SpatialReference()
    try:
        spatialRef.ImportFromWkt(prj)
    except:
        pass

    inband = None
    indataset = None

    # --------------------
    # reading from the database
    # --------------------
    conn = sqlite3.connect(mydb_path_user,
                           detect_types=sqlite3.PARSE_DECLTYPES
                           | sqlite3.PARSE_COLNAMES)

    cur = conn.cursor()

    # reading the data for the calculation of flooding widths
    NomeTabella = 'Q_H_max'

    sql = 'SELECT '
    sql += ' PixDist'
    sql += ', Progr_fiume'
    sql += ', Qmax'
    sql += ', Hmax'
    sql += ', Bmax'
    sql += ', Vmax'
    sql += ', Time'
    sql += ' FROM %s' % NomeTabella
    sql += ' WHERE DamID=%d' % (DamID)
    sql += ' ORDER BY PixDist;'
    cur.execute(sql)
    MatriceDati = cur.fetchall()

    if len(MatriceDati) == 0:
        errMsg = "Missing for the dam num =%s data Q_H_max\nCarry out first Calculation of propagation !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    ListaTratti = []
    Progr_fiume = []
    Qmax = []
    Hmax = []
    Bmax = []
    Vmax = []
    Time = []

    for row in MatriceDati:
        ListaTratti.append(int(row[0]))
        Progr_fiume.append(float(row[1]))
        Qmax.append(float(row[2]))
        Hmax.append(float(row[3]))
        Bmax.append(float(row[4]))
        Vmax.append(float(row[5]))
        Time.append(float(row[6]))

    #  array
    Progr_fiume_array = numpy.array(Progr_fiume, dtype=numpy.float)
    Qmax_array = numpy.array(Qmax, dtype=numpy.float)
    Hmax_array = numpy.array(Hmax, dtype=numpy.float)
    Bmax_array = numpy.array(Bmax, dtype=numpy.float)
    Vmax_array = numpy.array(Vmax, dtype=numpy.float)
    Time_array = numpy.array(Time, dtype=numpy.float)

    # finding the maximum depth value
    Hmax_tot = Hmax_array.max()

    # reading of the curves necessary to evaluate the shift to the right of the flood area
    fin = open(FileMatricePixDestra, 'r')
    reader = csv.reader(fin, delimiter=';')
    try:
        # python 2.7
        headers = reader.next()
    except:
        # python 3.4
        headers = reader.__next__()

    nn = len(headers)
    hvals = []
    Vettoreh = []
    for i in range(1, nn):
        hvals.append(headers[i])
        pp = headers[i].split('=')
        Vettoreh.append(float(pp[1]))

    # water depth array
    H_Array = numpy.array(Vettoreh, dtype=numpy.float)

    # dictionary of section numbers
    dic_PixDist = {}
    # matrix of the quantities
    MatricePix = []
    ii = -1
    for row in reader:
        ii += 1
        dic_PixDist[int(row[0])] = ii
        MatricePix.append(row[1:])

    fin.close()

    # matrix of the percentage of area on the right bank of the river for each height
    MatriceArray = numpy.array(MatricePix, dtype=numpy.float)

    NomeTabellaMatrice = 'MatriceAexp'

    sql = 'SELECT '
    sql += ' PixDist'
    sql += ', progr_lungo_fiume'
    sql += ', distanza_fiume'
    sql += ', distanza_linea_retta'
    sql += ', pend'
    sql += ', ka'
    sql += ', ma'
    sql += ', kq'
    sql += ', mq'
    sql += ', kcel'
    sql += ', mcel'
    sql += ' FROM %s' % NomeTabellaMatrice
    sql += ' WHERE DamID=%d' % (DamID)
    sql += ' ORDER BY PixDist;'
    cur.execute(sql)
    MatriceDati = cur.fetchall()

    if len(MatriceDati) == 0:
        errMsg = "Missing for the dam num =%s data MatriceAexp\nCarry out first calculation of geometric quantitiese !" % (
            DamID)
        NotErr = bool()
        return NotErr, errMsg

    # coefficient matrix: ka;ma;kq;mq;kcel;mcel for each section
    dic_MatriceCoeff = {}
    # list of section numbers
    ListaNumSez = []
    for row in MatriceDati:
        tratto_cur = int(row[0])
        ListaNumSez.append(tratto_cur)
        dic_MatriceCoeff[tratto_cur] = row[3:]

    # Close communication with the database
    cur.close()
    conn.close()

    # reading of the sections
    # ---------------------

    nomecampoAltezza = 'hmax'

    driver = ogr.GetDriverByName('ESRI Shapefile')

    ds = driver.Open(CrossMedie, 1)
    if ds is None:
        errMsg = 'Could not open ' + CrossMedie
        NotErr = bool()
        return NotErr, errMsg

    layer = ds.GetLayer()

    feat = layer.GetNextFeature()

    Spatialref = layer.GetSpatialRef()
    Spatialref.AutoIdentifyEPSG()
    SourceEPSG = int(Spatialref.GetAuthorityCode(None))

    # list of points in left and right
    ListaPtSx = []
    ListaPtDx = []

    # dictionary of flood limit distances in left and right
    dic_DistSx = {}
    dic_DistDx = {}

    DV_sez = {}
    Time_min_sez = {}

    while feat:

        NumSez = feat.GetField('id')

        if NumSez == 0:

            NumSez = ListaNumSez[0]
        # midpoint distance
        dist1 = feat.GetField('dist1')
        # progressive along the river path
        progr = feat.GetField('progr')

        linea = feat.GetGeometryRef()
        Length = linea.Length()

        pt1 = linea.GetPoint(0)
        pt2 = linea.GetPoint(1)

        Qsez = numpy.interp(progr, Progr_fiume_array, Qmax_array)
        Hsez = numpy.interp(progr, Progr_fiume_array, Hmax_array)
        Bsez = numpy.interp(progr, Progr_fiume_array, Bmax_array)
        Vsez = numpy.interp(progr, Progr_fiume_array, Vmax_array)
        Timesez = numpy.interp(progr, Progr_fiume_array, Time_array)

        # check if use energy elevation
        if UseEnergyHead:
            # instead of the depth of water use energy elevation
            hcinetica = Vsez**2 / 2.0 / 9.81
            Htot = Hsez + hcinetica
        else:
            Htot = Hsez

        # load the dictionary
        DV_sez[NumSez] = Qsez / Bsez
        Time_min_sez[NumSez] = int(Timesez / 60.0)

        feat.SetField(nomecampoAltezza, Htot)

        layer.SetFeature(feat)

        # reading the widths of the wet area on the right and left
        # ..........................................................
        try:
            MatriceCoeff = dic_MatriceCoeff[NumSez]
        except:
            pass

        ka = float(MatriceCoeff[2])
        ma = float(MatriceCoeff[3])
        mb = ma - 1.0
        # wet width for water level
        Bsez_tot = ka * ma * math.pow(Htot, mb)

        PercDx = numpy.interp(Htot, H_Array, MatriceArray[dic_PixDist[NumSez]])
        Bdx = Bsez_tot * PercDx
        Bsx = Bsez_tot - Bdx

        dic_DistSx[NumSez] = Bsx
        dic_DistDx[NumSez] = Bdx

        PercAscSx = (dist1 - Bsx) / Length
        PercAscDx = (dist1 + Bdx) / Length

        Pt_Sx = PuntoIntermedio(pt1, pt2, PercAscSx)
        Pt_Dx = PuntoIntermedio(pt1, pt2, PercAscDx)

        ListaPtSx.append(Pt_Sx)
        ListaPtDx.append(Pt_Dx)

        feat = layer.GetNextFeature()

    ds.Destroy()

    log_file = open('log.txt', 'a')
    log_file.write('End scrittura hmax\n')
    log_file.close()

    # making the polygon based on the river path
    # ......................................................

    try:
        # creating/connecting the db
        conn = db.connect(mydb_path_user)

    except:

        conn = sqlite3.connect(mydb_path_user,
                               detect_types=sqlite3.PARSE_DECLTYPES
                               | sqlite3.PARSE_COLNAMES)
        # import extention
        conn.enable_load_extension(True)
        conn.execute('SELECT load_extension("mod_spatialite")')

    # creating a Cursor
    cur = conn.cursor()

    NomeTabellaLinee = 'Downstreampath'

    sql = 'SELECT TotalLength,ST_AsText(geom) FROM %s WHERE DamID=%d' % (
        NomeTabellaLinee, DamID)
    cur.execute(sql)
    ChkDiga = cur.fetchone()

    if ChkDiga == None:
        errMsg = "Nella tabella= %s non ci sono dati per la diga num =%s \nEffettuare prima il calcolo della linea a valle !" % (
            NomeTabellaLinee, DamID)
        NotErr = bool()
        return NotErr, errMsg

    else:
        wkt_line = ChkDiga[1]
        TotalLength = ChkDiga[0]
        StreamLine = ogr.CreateGeometryFromWkt(wkt_line)
        StreamLine.FlattenTo2D()

        dic_StreamTratti = {}

        inDS1 = driver.Open(CrossSecPoly, 0)
        if inDS1 is None:
            errMsg = 'Could not open ' + CrossSecPoly
            NotErr = bool()
            return NotErr, errMsg

        InlayerCurve = inDS1.GetLayer()

        num_tratti = InlayerCurve.GetFeatureCount()

        feat = InlayerCurve.GetNextFeature()

        dic_NumTratto = {}

        ii = -1
        while feat:

            NumSez = feat.GetField('id')

            ii += 1
            dic_NumTratto[ii] = NumSez

            poly = feat.GetGeometryRef()

            line_curr = poly.Intersection(StreamLine)
            if line_curr != None:
                dic_StreamTratti[NumSez] = line_curr.ExportToWkt()
            else:
                txt = 'No intersection cross-sec num=%d' % NumSez
                print(txt)

            feat = InlayerCurve.GetNextFeature()

        inDS1.Destroy()

    # Close communication with the database
    cur.close()
    conn.close()

    ds = driver.Open(PolySxDx, 0)

    if ds is None:
        errMsg = 'Could not open ' + PolySxDx
        NotErr = bool()
        return NotErr, errMsg

    layer = ds.GetLayer()

    filtro = "lato = %d" % 0
    layer.SetAttributeFilter(filtro)

    feat = layer.GetNextFeature()

    PoligonoSx = feat.GetGeometryRef()
    PoligonoSx_wkt = PoligonoSx.ExportToWkt()

    layer.SetAttributeFilter(None)
    layer.ResetReading()
    filtro = "lato = %d" % 1
    layer.SetAttributeFilter(filtro)

    feat = layer.GetNextFeature()

    PoligonoDx = feat.GetGeometryRef()
    PoligonoDx_wkt = PoligonoDx.ExportToWkt()

    ds.Destroy()

    # initializing the polygon of the floodable area
    PoligonoAree1 = ogr.Geometry(ogr.wkbPolygon)

    PolySx = ogr.CreateGeometryFromWkt(PoligonoSx_wkt)
    PolyDx = ogr.CreateGeometryFromWkt(PoligonoDx_wkt)

    dist_min_pixel = pixelWidth

    for i in range(num_tratti):
        ii = dic_NumTratto[i]
        linea_curr_wkt = dic_StreamTratti[ii]
        linea_curr = ogr.CreateGeometryFromWkt(linea_curr_wkt)
        for lato in range(2):
            # check left
            if lato == 0:
                if dic_DistSx[ii] > dist_min_pixel:
                    polytratto = linea_curr.Buffer(dic_DistSx[ii])
                else:
                    polytratto = linea_curr.Buffer(dist_min_pixel)
                NewGeom = polytratto.Intersection(PolySx)
                if NewGeom != None:
                    PoligonoAree1 = PoligonoAree1.Union(NewGeom)
                    polytratto.Destroy()
                    NewGeom.Destroy()
                else:
                    PoligonoAree1 = PoligonoAree1.Union(polytratto)
            # check right
            elif lato == 1:
                if dic_DistDx[ii] > dist_min_pixel:
                    polytratto = linea_curr.Buffer(dic_DistDx[ii])
                else:
                    polytratto = linea_curr.Buffer(dist_min_pixel)
                NewGeom = polytratto.Intersection(PolyDx)
                if NewGeom != None:
                    PoligonoAree1 = PoligonoAree1.Union(NewGeom)
                    polytratto.Destroy()
                    NewGeom.Destroy()
                else:
                    PoligonoAree1 = PoligonoAree1.Union(polytratto)

    log_file = open('log.txt', 'a')
    log_file.write('End PoligonoAree1\n')
    log_file.close()

    # making a shapefile with the PolygonAree1
    # ----------------------------------------
    shpnew_1 = AreaInondabile_1
    if os.path.exists(shpnew_1):
        driver.DeleteDataSource(shpnew_1)

    outDS_1 = driver.CreateDataSource(shpnew_1)
    if outDS_1 is None:
        errMsg = 'Could not create file %s' % shpnew_1
        NotErr = bool()
        return NotErr, errMsg

    outLayer_1 = outDS_1.CreateLayer('AreaInondabile_1',
                                     Spatialref,
                                     geom_type=ogr.wkbMultiPolygon)

    fieldDefn2 = ogr.FieldDefn('id', ogr.OFTInteger)
    outLayer_1.CreateField(fieldDefn2)

    featureDefn_1 = outLayer_1.GetLayerDefn()

    feature = ogr.Feature(featureDefn_1)
    feature.SetField('id', 1)

    feature.SetGeometry(PoligonoAree1)
    outLayer_1.CreateFeature(feature)

    outDS_1.Destroy()

    # making the polygon # 2 based on the digital terrain model
    # ---------------------------------------------------------------------

    if not os.path.exists(StreamDH):
        errMsg = 'File StreamDHFilled %s does not exist' % os.path.realpath(
            StreamDH)
        NotErr = bool()
        return NotErr, errMsg

    infile = StreamDH

    indatasetElev = gdal.Open(infile, GA_ReadOnly)
    if indatasetElev is None:
        errMsg = 'Could not open ' + infile
        NotErr = bool()
        return NotErr, errMsg

    prj = indatasetElev.GetProjectionRef()

    geotransform = indatasetElev.GetGeoTransform()

    originXElev = geotransform[0]
    originYElev = geotransform[3]
    pixelWidthElev = geotransform[1]
    pixelHeightElev = geotransform[5]
    colsElev = indatasetElev.RasterXSize
    rowsElev = indatasetElev.RasterYSize
    bandsElev = indatasetElev.RasterCount
    iBand = 1
    inbandElev = indatasetElev.GetRasterBand(iBand)
    inNoDataElev = inbandElev.GetNoDataValue()

    # reading the entire file at once
    DH = inbandElev.ReadAsArray(0, 0, colsElev, rowsElev).astype(numpy.float32)

    mask_Nodata = DH == inNoDataElev

    inDS1 = driver.Open(CrossMedie, 0)
    if inDS1 is None:
        errMsg = 'Could not open ' + CrossMedie
        NotErr = bool()
        return NotErr, errMsg

    InlayerCurve = inDS1.GetLayer()

    spatialRef_sez = InlayerCurve.GetSpatialRef()

    feat_defn = InlayerCurve.GetLayerDefn()
    NumFields = feat_defn.GetFieldCount()

    # creates a grid with depth to cross sections
    GridSez = numpy.zeros((rowsElev, colsElev), numpy.float32)

    format = 'MEM'
    type = GDT_Float32

    driver2 = gdal.GetDriverByName(format)
    driver2.Register()

    gt = indatasetElev.GetGeoTransform()

    ds = driver2.Create('GridSez', indatasetElev.RasterXSize,
                        indatasetElev.RasterYSize, 1, type)
    if gt is not None and gt != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0):
        ds.SetGeoTransform(gt)

    if prj is not None and len(prj) > 0:
        ds.SetProjection(prj)
    else:
        prj = spatialRef.ExportToWkt()
        ds.SetProjection(prj)

    iBand = 1
    testo = "ATTRIBUTE=%s" % (nomecampoAltezza)
    # Rasterize
    outband = ds.GetRasterBand(iBand)

    outband.WriteArray(GridSez, 0, 0)
    CampoValore = [testo]

    err = gdal.RasterizeLayer(ds, [iBand],
                              InlayerCurve,
                              burn_values=[0],
                              options=CampoValore)
    if err != 0:
        raise Exception("error rasterizing layer: %s" % err)

    # Reading WL
    GridSezWL = outband.ReadAsArray().astype(numpy.float32)

    ds = None

    # INTERPOLATE Water Level Grid
    # ----------------------------

    #size of grid
    xmin = originXElev
    xmax = xmin + colsElev * pixelWidthElev
    ymax = originYElev
    ymin = originYElev + rowsElev * pixelHeightElev

    nx = int((xmax - xmin + 1) / pixelWidthElev)
    ny = int(-(ymax - ymin + 1) / pixelHeightElev)

    # Generate a regular grid to interpolate the data.
    xi = numpy.linspace(xmin, xmax, nx)
    yi = numpy.linspace(ymin, ymax, ny)
    xi, yi = numpy.meshgrid(xi, yi)

    # Reading x,y,z
    mask = GridSezWL > 0
    x = xi[mask]
    y = yi[mask]
    z = GridSezWL[mask]

    # Otherwise, try Method 2 - Interpolate  using scipy interpolate griddata
    WLArray = il.griddata(
        (x, y), z, (xi, yi), method='linear'
    )  #(may use 'nearest', 'linear' or 'cubic'  - although constant problems w linear)

    checkMask = numpy.isnan(WLArray)

    nnan = checkMask.sum()

    Nodata = -9999
    if nnan > 0:
        WLArray = numpy.choose(checkMask, (WLArray, Nodata))

    # WaterDepth calculation by difference between water and ground level
    Wdepth = WLArray - DH

    # filtering of isolated points and internal empty points
    Wdepth = signal.medfilt2d(Wdepth, kernel_size=7)

    # eliminates negative values
    maskWd = Wdepth <= 0.0
    Wdepth = numpy.choose(maskWd, (Wdepth, Nodata))

    # eliminate external anomalous values due to the filtering algorithm
    maskWd = Wdepth > 9999
    Wdepth = numpy.choose(maskWd, (Wdepth, Nodata))

    # adds the nodata of the terrain model
    Wdepth = numpy.choose(mask_Nodata, (Wdepth, Nodata))

    # output file
    FileDEM_out = PathFiles + os.sep + 'Hmax.tif'

    format = 'GTiff'
    driver = gdal.GetDriverByName(format)
    type = GDT_Float32
    gt = indatasetElev.GetGeoTransform()

    ds = driver.Create(FileDEM_out, indatasetElev.RasterXSize,
                       indatasetElev.RasterYSize, 1, type)
    if gt is not None and gt != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0):
        ds.SetGeoTransform(gt)

    # sets the reference system equal to the depth map of water: if it lacks sets the default
    if prj is not None and len(prj) > 0:
        ds.SetProjection(prj)
    else:
        prj = spatialRef.ExportToWkt()
        ds.SetProjection(prj)

    # writing raster
    iBand = 1
    outband = ds.GetRasterBand(iBand)
    outband.WriteArray(Wdepth, 0, 0)

    outband.FlushCache()
    outband.SetNoDataValue(Nodata)
    outband.GetStatistics(0, 1)
    outband = None

    ds = None

    inDS1.Destroy()

    log_file = open('log.txt', 'a')
    log_file.write('End Hmax.tif\n')
    log_file.close()

    # ----------------------------
    # Rasterize PoligonoAree1
    # ------------------------
    PoligonoAree1_Raster = PathFiles + os.sep + 'PoligonoAree1.tif'

    orig_data_source = ogr.Open(shpnew_1)
    source_ds = ogr.GetDriverByName("Memory").CopyDataSource(
        orig_data_source, "")
    source_layer = source_ds.GetLayer()

    format = 'Gtiff'
    type = GDT_Int16

    driver3 = gdal.GetDriverByName(format)
    driver3.Register()

    dsRaster = driver3.Create(PoligonoAree1_Raster, cols, rows, 1, type)
    gt1 = geotransform
    if gt1 is not None and gt1 != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0):
        dsRaster.SetGeoTransform(gt1)

    if prj is not None and len(prj) > 0:
        dsRaster.SetProjection(prj)
    else:
        prj = spatialRef.ExportToWkt()
        dsRaster.SetProjection(prj)

    # Rasterize
    iBand = 1
    outband = dsRaster.GetRasterBand(iBand)

    outNodata = -9999

    ClassTratti = numpy.zeros((rows, cols)).astype(numpy.int)

    outband.WriteArray(ClassTratti, 0, 0)

    # Rasterize
    err = gdal.RasterizeLayer(dsRaster, [1],
                              source_layer,
                              burn_values=[0],
                              options=["ATTRIBUTE=id"])
    if err != 0:
        raise Exception("error rasterizing layer: %s" % err)

    # Reading from the raster of the matrix with value 1 in a flooded area
    MatriceDatiArea1 = outband.ReadAsArray(0, 0, cols, rows)

    # eliminates any points with H greater than Hmax
    DH_MatriceDatiArea1 = DH * MatriceDatiArea1
    mask_greatHmax = DH_MatriceDatiArea1 > Hmax_tot
    nnn = mask_greatHmax.sum()
    MatriceDatiArea1 = numpy.choose(mask_greatHmax, (MatriceDatiArea1, 0))

    # writing Nodata
    mask_Nodata = MatriceDatiArea1 == 0
    MatriceDati = numpy.choose(mask_Nodata, (MatriceDatiArea1, outNodata))
    outband.WriteArray(MatriceDati, 0, 0)

    outband.FlushCache()
    outband.SetNoDataValue(outNodata)
    outband.GetStatistics(0, 1)

    outband = None

    dsRaster = None
    orig_data_source.Destroy()

    # ----------------------------

    # name of the output file with 1 in the wet cells
    FileDEM_out_1 = PathFiles + os.sep + 'HH.tif'

    format = 'GTiff'
    driver = gdal.GetDriverByName(format)
    type = GDT_Int16
    gt = indatasetElev.GetGeoTransform()

    ds = driver.Create(FileDEM_out_1, indatasetElev.RasterXSize,
                       indatasetElev.RasterYSize, 1, type)
    if gt is not None and gt != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0):
        ds.SetGeoTransform(gt)

    # sets the reference system equal to the depth map of water: if it lacks sets the default
    if prj is not None and len(prj) > 0:
        ds.SetProjection(prj)
    else:
        prj = spatialRef.ExportToWkt()
        ds.SetProjection(prj)

    # writing raster
    iBand = 1
    outband = ds.GetRasterBand(iBand)
    WW = Wdepth > 0
    # adding polygon areas 1
    # ---------------------------
    mask_Data1 = MatriceDatiArea1 == 1
    # saving in the raster
    WW = numpy.choose(mask_Data1, (WW, 1))

    outband.WriteArray(WW, 0, 0)

    outband.FlushCache()
    outband.SetNoDataValue(Nodata)
    outband.GetStatistics(0, 1)
    outband = None

    ds = None

    log_file = open('log.txt', 'a')
    log_file.write('End HH.tif\n')
    log_file.close()

    # Raster to vector
    # -------------------------

    # this allows GDAL to throw Python Exceptions
    gdal.UseExceptions()

    log_file = open('log.txt', 'a')
    log_file.write('End gdal.UseExceptions()\n')
    log_file.close()

    fileName = FileDEM_out_1
    src_ds = gdal.Open(fileName)
    if src_ds is None:
        errMsg = 'Could not open ' + fileName
        NotErr = bool()
        return NotErr, errMsg

    srcband = src_ds.GetRasterBand(1)
    srs = osr.SpatialReference()
    srs.ImportFromWkt(src_ds.GetProjection())

    log_file = open('log.txt', 'a')
    log_file.write('End srs.ImportFromWkt(src_ds.GetProjection()\n')
    log_file.close()

    dst_layername = "PolyFtr"
    drv = ogr.GetDriverByName("ESRI Shapefile")
    dst_filename = PathFiles + os.sep + dst_layername + ".shp"
    if os.path.exists(dst_filename):
        drv.DeleteDataSource(dst_filename)

    dst_ds = drv.CreateDataSource(dst_filename)
    dst_layer = dst_ds.CreateLayer(dst_layername, srs=srs)
    newField = ogr.FieldDefn('id', ogr.OFTInteger)
    dst_layer.CreateField(newField)

    log_file = open('log.txt', 'a')
    log_file.write('End dst_layer.CreateField(newField)\n')
    log_file.close()

    # con bandmask
    gdal.Polygonize(srcband, srcband, dst_layer, 0, [], callback=None)

    log_file = open('log.txt', 'a')
    log_file.write('End Polygonize\n')
    log_file.close()

    src_ds = None
    dst_ds.Destroy()

    # deleting the temporary grid
    os.remove(fileName)

    log_file = open('log.txt', 'a')
    log_file.write('End remove HH.tif\n')
    log_file.close()

    # performing the union of the polygons
    # ----------------------------------
    in_layername = PathFiles + os.sep + "PolyFtr.shp"

    shpdriver = ogr.GetDriverByName('ESRI Shapefile')

    inDS1 = shpdriver.Open(in_layername, 0)
    if inDS1 is None:
        errMsg = 'Could not open ' + in_layername
        NotErr = bool()
        return NotErr, errMsg

    InlayerCurve = inDS1.GetLayer()

    feat = InlayerCurve.GetNextFeature()

    poly_tot = ogr.Geometry(ogr.wkbMultiPolygon)

    while feat:

        poly = feat.GetGeometryRef()
        # aggiungo geometria poligonale
        poly_tot.AddGeometry(poly)

        feat = InlayerCurve.GetNextFeature()

    inDS1.Destroy()

    log_file = open('log.txt', 'a')
    log_file.write('End PolyFtr.shp\n')
    log_file.close()

    # creating the final flood area
    # -----------------------------

    # saving in the geodatabase
    # ---------------------------------------

    try:
        # creating/connecting the db
        conn = db.connect(mydb_path_user)

    except:

        conn = sqlite3.connect(mydb_path_user,
                               detect_types=sqlite3.PARSE_DECLTYPES
                               | sqlite3.PARSE_COLNAMES)
        # import extention
        conn.enable_load_extension(True)
        conn.execute('SELECT load_extension("mod_spatialite")')

    cur = conn.cursor()

    TargetTabella = 'FloodExtent'

    sql = "SELECT srid FROM geometry_columns WHERE f_table_name='%s'" % (
        TargetTabella.lower())
    cur.execute(sql)
    record = cur.fetchone()
    if record != None:
        OriginEPSG = record[0]
    else:
        OriginEPSG = 32632

    sql = 'SELECT PKUID,id FROM %s WHERE DamID=%d' % (TargetTabella, DamID)
    cur.execute(sql)
    ListaTratti = cur.fetchall()
    if len(ListaTratti) > 0:
        # delete previous data
        sql = 'DELETE FROM %s WHERE DamID=%d' % (TargetTabella, DamID)
        cur.execute(sql)
        conn.commit()

    inDS1 = shpdriver.Open(CrossSecPoly, 0)
    if inDS1 is None:
        errMsg = 'Could not open ' + CrossSecPoly
        NotErr = bool()
        return NotErr, errMsg

    InlayerCurve = inDS1.GetLayer()

    feat = InlayerCurve.GetNextFeature()

    while feat:

        NumSez = feat.GetField('id')

        poly = feat.GetGeometryRef()

        FloodSeverityString = FloodSeverity(DV_sez[NumSez])

        Factor = ConseqFactot(FloodSeverityString, Time_min_sez[NumSez])

        # making the intersection to get the polygon
        poly_curr = poly.Intersection(poly_tot)

        if poly_curr != None:

            sql = 'INSERT INTO %s (DamID,id,DV,FloodSeverity,WarningTimeMin,FatalityRate,geom) VALUES (%d' % (
                TargetTabella, DamID)
            sql += ',%d' % NumSez
            sql += ',%.2f' % DV_sez[NumSez]
            sql += ',"%s"' % FloodSeverityString
            sql += ',%d' % Time_min_sez[NumSez]
            sql += ',%.3f' % Factor

            poly_curr.FlattenTo2D()

            # check if MULTIPOLYGON
            TipoGeom = poly_curr.GetGeometryName()
            if TipoGeom == 'POLYGON':
                multipolygon = ogr.Geometry(ogr.wkbMultiPolygon)
                multipolygon.AddGeometry(poly_curr)
                wkt2 = multipolygon.ExportToWkt()
            elif poly_curr.GetGeometryName() == 'MULTIPOLYGON':
                wkt2 = poly_curr.ExportToWkt()
            poly2 = ogr.CreateGeometryFromWkt(wkt2)

            GeomWKT = "GeomFromText('%s',%d)" % (wkt2, OriginEPSG)
            sql += ',%s' % GeomWKT
            sql += ');'
            cur.execute(sql)

        else:

            log_file = open('log.txt', 'a')
            log_file.write('Err tratto n=%d\n' % NumSez)
            log_file.close()

        feat = InlayerCurve.GetNextFeature()

    inDS1.Destroy()

    log_file = open('log.txt', 'a')
    log_file.write('End routine\n')
    log_file.close()

    conn.commit()
    # Close communication with the database
    cur.close()
    conn.close()

    return NotErr, errMsg
Esempio n. 35
0
    def migrate(self):

        from_conn = spatialite.connect('navirec.sqlite')
        from_cursor = from_conn.cursor()
        to_cursor = self.cursor()

        # traces

        print "Migrating traces"

        sql = "select session_id, X(geom) as longitude, Y(geom) as latitude, altitude, timestamp from Traces"
        from_cursor.execute(sql)
        traces = from_cursor.fetchall()
        if traces is not None and len(traces) > 0:
            for trace in traces:
                journey_id = trace[0]
                longitude = trace[1]
                latitude = trace[2]
                altitude = trace[3]
                timestamp = trace[4]
                if altitude is None:
                    altitude = 0
                to_cursor.execute(
                    "SELECT * FROM trace WHERE journey_id=%s AND timestamp=%s AND ST_Equals(geometry, %s)",
                    (journey_id, timestamp,
                     ppygis.Point(longitude, latitude, altitude, srid=4326)))
                matching_traces = to_cursor.fetchall()
                if len(matching_traces) == 0:
                    sys.stdout.write('.')
                    to_cursor.execute(
                        "INSERT INTO trace (journey_id, timestamp, geometry) VALUES  (%s, %s, %s)",
                        (journey_id, timestamp,
                         ppygis.Point(longitude, latitude, altitude,
                                      srid=4326)))
                else:
                    sys.stdout.write('!')

        # routes

        print "Migrating routes"

        sql = "select session_id, X(PointN(geom, 1)) as longitude1, Y(PointN(geom, 1)) as latitude1, X(PointN(geom, 2)) as longitude2, Y(PointN(geom, 2)) as latitude2, timestamp, speed, mode from Routes"
        from_cursor.execute(sql)
        routes = from_cursor.fetchall()
        if routes is not None and len(routes) > 0:
            for route in routes:
                journey_id = route[0]
                longitude1 = route[1]
                latitude1 = route[2]
                longitude2 = route[3]
                latitude2 = route[4]
                timestamp = route[5]
                speed = route[6] / 3.6
                mode = route[7]
                altitude = 0
                point1 = ppygis.Point(longitude1,
                                      latitude1,
                                      altitude,
                                      srid=4326)
                point2 = ppygis.Point(longitude2,
                                      latitude2,
                                      altitude,
                                      srid=4326)
                line = ppygis.LineString((point1, point2), srid=4326)
                to_cursor.execute(
                    "SELECT * FROM route WHERE journey_id=%s AND timestamp=%s AND ST_Equals(geometry, %s)",
                    (journey_id, timestamp, line))
                matching_routes = to_cursor.fetchall()
                if len(matching_routes) == 0:
                    sys.stdout.write('.')
                    to_cursor.execute(
                        "INSERT INTO route (journey_id, timestamp, geometry, speed, mode) VALUES  (%s, %s, %s, %s, %s)",
                        (journey_id, timestamp, line, speed, mode))
                else:
                    sys.stdout.write('!')

        self.connection.commit()
        from_conn.close()
        return
Esempio n. 36
0
def excepciones():
    u"""Introduce excepciones para preparar los datos para su exportación."""
    con = db.connect('/var/tmp/gtfs.sqlite')
    cursor = con.cursor()
    # Crea una vista para organizar los campos que deseo exportar
    cursor.execute('DROP TABLE IF EXISTS shapes_csv')
    sql = (
        "CREATE TABLE shapes_csv AS SELECT b.id AS shape_id, a.shape_pt_lon AS shape_pt_lon, a.shape_pt_lat AS shape_pt_lat, "
        "a.shape_pt_sequence AS shape_pt_sequence, a.shape_dist_traveled AS shape_dist_traveled "
        "FROM shapes AS a JOIN relaciones_rutas AS b USING (relacion) ORDER BY b.id, a.shape_pt_sequence"
    )
    cursor.execute(sql)
    # Lineas de vuelta de los ferrocarriles
    print(Fore.GREEN + "AVISO:" + Fore.RESET +
          " Generando líneas de vuelta de los ferrocarriles.")
    ferrocarriles = [
        "FEVE-S1", "FEVE-S2", "FEVE-S2-1", "FEVE-S2-2", "RENFE-C1"
    ]
    for linea in ferrocarriles:
        distancia = 0
        sql = "SELECT * FROM shapes_csv WHERE shape_id = '" + \
            str(linea) + "' ORDER BY shape_pt_sequence DESC LIMIT 1"
        cursor.execute(sql)
        tramos = cursor.fetchone()
        dist_max = tramos[4]
        secuencia = int(tramos[3])
        print("Invirtiendo secuencia " + Fore.GREEN + str(linea) + Fore.RESET +
              " --> " + Fore.GREEN + str(tramos[4]) + Fore.GREEN + " puntos.")
        id_shape = linea + "-V"

        # Primer punto con distancia cero
        cursor.execute("SELECT * FROM shapes_csv WHERE shape_id = '" +
                       str(linea) + "' AND shape_pt_sequence = " +
                       str(secuencia) + ";")
        tramo = cursor.fetchone()
        cursor.execute(
            "INSERT INTO shapes_csv(shape_id, shape_pt_lon, shape_pt_lat, shape_pt_sequence, shape_dist_traveled) VALUES ('"
            + str(id_shape) + "', " + str(tramo[1]) + ", " + str(tramo[2]) +
            ", 1, 0);")

        for z in range(1, int(tramos[3])):
            # Obtengo un tramo y el anterior para poder calcular la distancia
            # del primero
            cursor.execute("SELECT * FROM shapes_csv WHERE shape_id = '" +
                           str(linea) + "' AND shape_pt_sequence = " +
                           str(secuencia) + ";")
            tramo = cursor.fetchone()
            # print("Secuencia: " + str(secuencia))
            cursor.execute("SELECT * FROM shapes_csv WHERE shape_id = '" +
                           str(linea) + "' AND shape_pt_sequence = " +
                           str(secuencia - 1) + ";")
            anterior_tramo = cursor.fetchone()
            distancia = distancia + (int(tramo[4]) - int(anterior_tramo[4]))
            # print("Distancia:" + str(distancia))
            cursor.execute(
                "INSERT INTO shapes_csv(shape_id, shape_pt_lon, shape_pt_lat, shape_pt_sequence, shape_dist_traveled) VALUES ('"
                + str(id_shape) + "', " + str(anterior_tramo[1]) + ", " +
                str(anterior_tramo[2]) + ", " + str(z + 1) + ", " +
                str(distancia) + ");")
            secuencia = secuencia - 1

        # Ñapa para que aparezca el ultimo punto, que si no no aparecia.
        cursor.execute("SELECT * FROM shapes_csv WHERE shape_id = '" +
                       str(linea) + "' ORDER BY shape_pt_sequence ASC LIMIT 1")
        tramos = cursor.fetchone()
        z = z + 1

    print(Fore.GREEN + "¡Secuencias " + str(ferrocarriles) + " invertidas!")
    con.commit()
    cursor.close()
    return
Esempio n. 37
0
    return True


# ----------------------------------------------------------------------------------------------------------------------

if __name__ in ('__main__', '__console__', '__builtin__'):

    # Verzeichnis der Testdaten
    pfad = 'C:/FHAC/jupiter/hoettges/team_data/Kanalprogramme/k_qkan/k_heqk/beispiele/modelldb_itwh'
    database_QKan = os.path.join(pfad, 'test1.sqlite')

    if os.path.exists(database_QKan):
        os.remove(database_QKan)

    consl = splite.connect(database=database_QKan)
    cursl = consl.cursor()

    # iface.messageBar().pushMessage("Information", "SpatiaLite-Datenbank wird erstellt. Bitte warten...",
    #     level=QgsMessageBar.INFO)
    progressMessageBar = iface.messageBar().createMessage(
        "Doing something boring...")
    progress = QProgressBar()
    progress.setMaximum(10)
    progress.setAlignment(Qt.AlignLeft | Qt.AlignVCenter)
    progressMessageBar.layout().addWidget(progress)
    iface.messageBar().pushWidget(progressMessageBar, iface.messageBar().INFO)
    progress.setValue(2)
    iface.messageBar().clearWidgets()

    iface.mainWindow().statusBar().showMessage(
Esempio n. 38
0
def csv_orden_relaciones():
    u"""Ordena la vías que forman cada ruta.

    Analiza los XML de OSM para determinar el orden de las
    vías de cada relación y los exporta a un CSV.

    """
    con = db.connect('/var/tmp/gtfs.sqlite')
    cursor = con.cursor()
    # Al haber lineas que comparten la misma relación hay que seleccionar solo
    # los IDs unicos
    cursor.execute("SELECT DISTINCT(relacion) FROM relaciones_rutas;")
    relaciones = cursor.fetchall()
    relaciones_lista = []
    for fila in relaciones:
        # Volca el resultado de la consulta a una lista para utilizar en el
        # análisis del XML
        relaciones_lista.append(fila[0])
    print(Fore.GREEN + "AVISO:" + Fore.RESET +
          " Creándo el archivo /var/tmp/orden_relaciones.csv")

    # Crea primero un csv con una fila que servirá de cabecera
    with open("/var/tmp/orden_relaciones.csv", "wb") as archivo_texto:
        writer = csv.writer(archivo_texto, quoting=csv.QUOTE_NONE)
        writer.writerow(["relacion", "orden", "way"])

    # Analizo todas las relaciones y actualizo el csv
    print(Fore.GREEN + "AVISO:" + Fore.RESET +
          " Descarga y analiza los archivos XML de las relaciones de OSM")
    for r in relaciones_lista:
        lista = []

        print(Fore.GREEN + "AVISO:" + Fore.RESET + " Obteniendo el archivo: " +
              str(r) + ".xml de OpenStreetMap")
        url = 'http://api.openstreetmap.org/api/0.6/relation/' + str(r)
        urllib.urlretrieve(url, str(r) + ".xml")  # Descargamos el archivo.
        # Cargamos en objeto xmldoc el documento xml
        xmldoc = minidom.parse(str(r) + '.xml')
        itemlist = xmldoc.getElementsByTagName('member')

        for s in itemlist:
            if s.attributes['type'].value == "way":
                referencia = s.attributes['ref'].value
                lista.append(referencia)

                # Cargamos en la tabla 'relaciones' el id de las relaciones de OSM con la
                # referencia de la linea
                itemlist = xmldoc.getElementsByTagName('tag')

                for s in itemlist:
                    if s.attributes['k'].value == "ref":
                        referencia = s.attributes['v'].value
                        cursor.execute(
                            "INSERT INTO relaciones (relacion, id) VALUES (" +
                            str(r) + ",'" + str(referencia) + "');")

                # Abro el csv creado y agrego nuevas filas con las relaciones
                with open("/var/tmp/orden_relaciones.csv",
                          "a") as archivo_texto:
                    writer = csv.writer(archivo_texto, quoting=csv.QUOTE_NONE)
                    i = 1
                    for val in lista:
                        writer.writerow([r, i, val])
                        i = i + 1

                removefile(str(r) + ".xml")  # Se elimina el archivo xml

    print(
        Fore.GREEN + "AVISO:" + Fore.RESET +
        " Se terminó de crear el archivo /var/tmp/orden_relaciones.csv con el orden de las relaciones."
    )
    con.commit()
    cursor.close()
    return (relaciones_lista)
Esempio n. 39
0
def connect_db(db_file):
    #print db_file
    conn = db.connect(db_file)
    # creating a Cursor
    cur = conn.cursor()
    return conn, cur
Esempio n. 40
0
os.system("dropdb epanet_test_db")
os.system("createdb epanet_test_db")
os.system("psql epanet_test_db -c 'CREATE EXTENSION postgis'")
os.system("psql epanet_test_db -f " + test_data_dir + "/epanet_test_db.sql")

# try the update
wc = [tmp_dir + "/issue357_wc0.sqlite", tmp_dir + "/issue357_wc1.sqlite"]
for f in wc:
    if os.path.isfile(f): os.remove(f)
    versioning_base.checkout(
        "dbname=epanet_test_db",
        ['epanet_trunk_rev_head.junctions', 'epanet_trunk_rev_head.pipes'], f)

scur = []
for f in wc:
    scur.append(versioning_base.Db(dbapi2.connect(f)))

scur[0].execute(
    "INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('2','1','2',GeomFromText('LINESTRING(1 1,0 1)',2154))"
)
scur[0].execute(
    "INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('3','1','2',GeomFromText('LINESTRING(1 -1,0 1)',2154))"
)
scur[0].commit()

versioning_base.commit(wc[0], 'commit 1 wc0', "dbname=epanet_test_db")
versioning_base.update(wc[1], "dbname=epanet_test_db")

scur[0].execute("UPDATE pipes_view SET length = 1")
scur[0].commit()
scur[1].execute("UPDATE pipes_view SET length = 2")
Esempio n. 41
0
def test(host, pguser):
    pg_conn_info = "dbname=epanet_test_db host=" + host + " user="******"dbname=epanet_test_copy_db host=" + host + " user="******"dropdb --if-exists -h " + host + " -U " + pguser +
              " epanet_test_db")
    os.system("dropdb --if-exists -h " + host + " -U " + pguser +
              " epanet_test_copy_db")
    os.system("createdb -h " + host + " -U " + pguser + " epanet_test_db")
    os.system("createdb -h " + host + " -U " + pguser + " epanet_test_copy_db")
    os.system("psql -h " + host + " -U " + pguser +
              " epanet_test_db -c 'CREATE EXTENSION postgis'")
    os.system("psql -h " + host + " -U " + pguser +
              " epanet_test_copy_db -c 'CREATE EXTENSION postgis'")
    os.system("psql -h " + host + " -U " + pguser + " epanet_test_db -f " +
              test_data_dir + "/epanet_test_db.sql")

    # chechout
    #tables = ['epanet_trunk_rev_head.junctions','epanet_trunk_rev_head.pipes']
    tables = ['epanet_trunk_rev_head.junctions', 'epanet_trunk_rev_head.pipes']
    pgversioning = versioning.pgLocal(pg_conn_info, 'epanet_trunk_rev_head',
                                      pg_conn_info_cpy)
    pgversioning.checkout(tables)

    pcurcpy = versioning.Db(psycopg2.connect(pg_conn_info_cpy))
    pcur = versioning.Db(psycopg2.connect(pg_conn_info))

    pcurcpy.execute(
        "INSERT INTO epanet_trunk_rev_head.pipes_view(id, start_node, end_node, wkb_geometry) VALUES ('2','1','2',ST_GeometryFromText('LINESTRING(1 1,0 1)',2154))"
    )
    pcurcpy.execute(
        "INSERT INTO epanet_trunk_rev_head.pipes_view(id, start_node, end_node, wkb_geometry) VALUES ('3','1','2',ST_GeometryFromText('LINESTRING(1 -1,0 1)',2154))"
    )
    pcurcpy.commit()

    prtHid(pcurcpy, 'epanet_trunk_rev_head.pipes_view')

    pcurcpy.execute("SELECT * FROM epanet_trunk_rev_head.pipes_view")
    assert (len(pcurcpy.fetchall()) == 3)
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 1)
    pgversioning.commit('INSERT')
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 3)

    pcurcpy.execute(
        "UPDATE epanet_trunk_rev_head.pipes_view SET start_node = '2' WHERE id = '0'"
    )
    pcurcpy.commit()
    pcurcpy.execute("SELECT * FROM epanet_trunk_rev_head.pipes_view")
    assert (len(pcurcpy.fetchall()) == 3)
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 3)
    pgversioning.commit('UPDATE')
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 4)

    pcurcpy.execute(
        "DELETE FROM epanet_trunk_rev_head.pipes_view WHERE id = '2'")
    pcurcpy.commit()
    pcurcpy.execute("SELECT * FROM epanet_trunk_rev_head.pipes_view")
    assert (len(pcurcpy.fetchall()) == 2)
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 4)
    pgversioning.commit('DELETE')
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 4)

    sqlite_test_filename1 = os.path.join(tmp_dir,
                                         "versioning_base_test1.sqlite")
    if os.path.isfile(sqlite_test_filename1): os.remove(sqlite_test_filename1)
    spversioning1 = versioning.spatialite(sqlite_test_filename1, pg_conn_info)
    spversioning1.checkout(
        ['epanet_trunk_rev_head.pipes', 'epanet_trunk_rev_head.junctions'])
    scon = dbapi2.connect(sqlite_test_filename1)
    scur = scon.cursor()
    scur.execute(
        "INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('4', '10','100',GeomFromText('LINESTRING(2 0, 0 2)',2154))"
    )
    scon.commit()
    spversioning1.commit("sp commit")

    pgversioning.update()
    pcur.execute("SELECT * FROM epanet.pipes")
    assert (len(pcur.fetchall()) == 5)
    pcurcpy.execute("SELECT * FROM epanet_trunk_rev_head.pipes")
    assert (len(pcurcpy.fetchall()) == 5)

    pcur.execute("SELECT * FROM epanet_trunk_rev_head.pipes")
    assert (len(pcur.fetchall()) == 3)
    pcurcpy.execute("SELECT * FROM epanet_trunk_rev_head.pipes_view")
    assert (len(pcurcpy.fetchall()) == 3)

    pcur.execute("SELECT pid FROM epanet_trunk_rev_head.pipes ORDER BY pid")
    ret = pcur.fetchall()
    assert (list(zip(*ret)[0]) == [3, 4, 5])
    pcurcpy.execute(
        "SELECT ogc_fid FROM epanet_trunk_rev_head.pipes_view ORDER BY ogc_fid"
    )
    ret = pcurcpy.fetchall()
    assert (list(zip(*ret)[0]) == [3, 4, 5])

    pcurcpy.execute(
        "INSERT INTO epanet_trunk_rev_head.pipes_view(id, start_node, end_node, wkb_geometry) VALUES ('4','1','2',ST_GeometryFromText('LINESTRING(3 2,0 1)',2154))"
    )
    pcurcpy.commit()
    pgversioning.commit('INSERT AFTER UPDATE')

    pcurcpy.close()
    pcur.close()
Esempio n. 42
0
    'fs': {
        'label': 'fs_fs',
        'ids': ['01','02','08','11','14','24','30']
    },
    'oc': {
        'label': 'oc_oc',
        'ids': ['02','03','04','05','06']
    },
    'vpm': {
        'label': 'vpm_vp',
        'ids': ['01','02','03','04','05','06','07','09','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','31','32','33','34','35','36','37','38','39','40','41','42','43','44','45','46','49','50','51','52','53','54','55','56']
    }
}

# creating/connecting the test_db
conn = db.connect(spatialite_file)

# creating a Cursor
cur = conn.cursor()

try:
    print 'Dropping old table...'
    cur.execute('DROP TABLE %s;' % out_table)
except:
    pass

try:
    cur.execute('DROP TABLE %s;' % feature_data_table)
except:
    pass
Esempio n. 43
0
    def save_to_sqlite(self, source, name=None, overwrite=False):
        """Save the oat object to sqlite

        Args:
            source (str): the sqlite file (including path)
            name (list): the sensor name to be used (it shall be unique)
        """
        try:
            from pyspatialite import dbapi2 as db
        except ImportError:
            raise ImportError('<pyspatialite> package not installed')

        #connect / create the DB
        con = db.connect(source)
        cur = con.cursor()

        if name is None:
            name = self.name

        #Check if DB is spatial otherwise enable it
        sql = "SELECT * FROM spatial_ref_sys;"
        try:
            res = cur.execute(sql).fetchone()
        except:
            cur.execute('SELECT InitSpatialMetadata(1)')

        #Check if table <freewat_sensors> already exists
        sql = "SELECT * FROM freewat_sensors;"
        try:
            res = cur.execute(sql).fetchone()
        except:
            #create spatial table for SENSORS if not exists
            sql = "CREATE TABLE IF NOT EXISTS freewat_sensors ("
            sql += "id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
            sql += "name TEXT NOT NULL UNIQUE,"
            sql += "desc TEXT,"
            sql += "tz INTEGER,"
            sql += "unit TEXT NOT NULL,"
            sql += "prop TEXT NOT NULL,"
            sql += "freq TEXT,"
            # add time
            sql += "begin_pos DATETIME,"
            sql += "end_pos DATETIME,"
            #add statflag and use
            sql += "statflag TEXT,"
            sql += "use INTEGER DEFAULT 0,"
            sql += "topscreen REAL,"
            sql += "bottomscreen REAL,"
            sql += "altitude REAL )"
            res = cur.execute(sql).fetchall()
            #add geometry column
            sql = "SELECT AddGeometryColumn('freewat_sensors',"
            sql += "'geom', %s, 'POINT', 'XY')" % (self.srid)
            res = cur.execute(sql).fetchall()

        # check if altitude exists
        tmp = cur.execute('PRAGMA table_info(freewat_sensors)').fetchall()
        tmp_name = [i[1] for i in tmp]

        if 'altitude' in tmp_name:
            pass
        else:
            print("add missing table from save....")
            cur.execute('ALTER TABLE freewat_sensors ADD COLUMN altitude REAL')

        #check if sensor exists
        sql = "SELECT id FROM freewat_sensors WHERE name=?;"
        res_e = cur.execute(sql, (name, )).fetchall()

        if res_e and overwrite:
            #update sensor metadata
            print("sensor exists")
            sql = "UPDATE freewat_sensors"
            sql += " SET name=?,desc=?,tz=?,unit=?,prop=?,freq=?, geom=%s, begin_pos=?, end_pos=?, statflag=?, use=?, topscreen=?, bottomscreen=?,"
            sql += "altitude=? WHERE name=?"
            geom = "GeomFromText('POINT(%s %s)',%s)" % (self.lon, self.lat,
                                                        self.srid)
            params = (name, self.desc, self.tz, self.unit, self.prop,
                      self.freq, self.data_availability[0],
                      self.data_availability[1], self.statflag, self.use,
                      self.topscreen, self.bottomscreen, self.alt, name)
        elif not res_e:
            print("sensor NOT exists")
            #insert sensor metadata
            sql = "INSERT INTO freewat_sensors"
            sql += " (name, desc, tz, unit, prop, freq, geom, begin_pos, end_pos, statflag, use, topscreen, bottomscreen, altitude)"
            sql += " VALUES (?,?,?,?,?,?,%s,?,?,?,?,?,?,?)"
            geom = "GeomFromText('POINT(%s %s)',%s)" % (self.lon, self.lat,
                                                        self.srid)
            params = (name, self.desc, self.tz, self.unit, self.prop,
                      self.freq, self.data_availability[0],
                      self.data_availability[1], self.statflag, self.use,
                      self.topscreen, self.bottomscreen, self.alt)
        else:
            raise IOError(
                "<sensor '%s' already exists> set parameter 'overwrite=True' to allow overwrite"
                % name)

        #print(sql, params)
        cur.execute(sql % geom, params).fetchall()

        if not res_e:
            sql = "SELECT id FROM freewat_sensors WHERE name=?;"
            res_e = cur.execute(sql, (name, )).fetchall()

        # Add column use (at observation level)
        if not 'use' in self.ts.columns:
            self.ts['use'] = True

        # add column index if doeasn't exists
        if not 'obs_index' in self.ts.columns:
            idx_list = []
            for i in range(0, len(self.ts.index)):
                idx_list.append(self.name[0:3] + '_' + str(res_e[0][0]) + '_' +
                                str(i + 1))

            self.ts['obs_index'] = idx_list

        #print (self.ts)
        self.ts.to_sql(name=name, con=con, if_exists='replace')

        print("table updated")
        cur.close()
        con.commit()
        con.close()
Esempio n. 44
0
        sys.stdout.flush()


try:
    optlist, args = getopt.getopt(sys.argv[1:], 'vd:f:')
    # print optlist
    dirname = filter(lambda item: item[0] == '-d', optlist)[0][1]
    db_file = filter(lambda item: item[0] == '-f', optlist)[0][1]
except:
    print 'Usage %s [-v] -d <dat_files_dir> -f <db_file>' % sys.argv[0]
    exit(1)

if '-v' in map(lambda item: item[0], optlist):
    print help1

conn = db.connect(db_file)
# creating a Cursor
cur = conn.cursor()
cur.execute(
    'CREATE TABLE IF NOT EXISTS city(id INTEGER PRIMARY KEY AUTOINCREMENT, city_name, city_lastname, geometry, min_lng DOUBLE, min_lat DOUBLE, max_lng DOUBLE, max_lat DOUBLE, country)'
)
'''получаем список файлов с данными'''
dat_files = os.listdir(dirname)
'''перебираем файлы с данными'''
for dat_file in dat_files:
    if dat_file.split('.').pop() != 'dat':
        continue
    try:
        f = open(dirname + '/' + dat_file, 'r')
        print 'Processing file: %s' % dat_file
    except:
Esempio n. 45
0
    def test_export_spatialite_zz_tables(self, mock_skip_popup, mock_iface,
                                         mock_find_layer, mock_newdbpath,
                                         mock_verify, mock_locale,
                                         mock_createdb_crs_question,
                                         mock_messagebar):
        mock_find_layer.return_value.crs.return_value.authid.return_value = u'EPSG:3006'
        mock_createdb_crs_question.return_value = [3006, True]
        dbconnection = db_utils.DbConnectionManager()
        mock_newdbpath.return_value = EXPORT_DB_PATH
        mock_verify.return_value = 0
        """
        insert into zz_strat(geoshort,strata) values('land fill','fyll');
        insert into zz_stratigraphy_plots (strata,color_mplot,hatch_mplot,color_qt,brush_qt) values('torv','DarkGray','+','darkGray','NoBrush');
        insert into zz_capacity (capacity,explanation) values('6 ','mycket god');
        insert into zz_capacity (capacity,explanation) values('6+','mycket god');
        insert into zz_capacity_plots (capacity,color_qt) values('', 'gray');
        """

        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))''',
            dbconnection=dbconnection)
        dbconnection.execute(u'''PRAGMA foreign_keys='off' ''')
        dbconnection.execute(
            u'''UPDATE zz_strat SET strata = 'filling' WHERE geoshort = 'land fill' '''
        )
        dbconnection.execute(
            u'''INSERT INTO zz_stratigraphy_plots (strata,color_mplot,hatch_mplot,color_qt,brush_qt) values ('filling','Yellow','+','darkGray','NoBrush') '''
        )
        dbconnection.execute(
            u'''UPDATE zz_stratigraphy_plots SET color_mplot = 'OrangeFIX' WHERE strata = 'made ground' '''
        )
        dbconnection.execute(
            u'''UPDATE zz_capacity SET explanation = 'anexpl' WHERE capacity = 0 '''
        )
        dbconnection.execute(
            u'''UPDATE zz_capacity_plots SET color_qt = 'whiteFIX' WHERE capacity = 0 '''
        )

        dbconnection.commit_and_closedb()

        mock_locale.return_value.answer = u'ok'
        mock_locale.return_value.value = u'en_US'
        self.midvatten.export_spatialite()

        sql_list = [
            u'''SELECT geoshort, strata FROM zz_strat WHERE geoshort IN ('land fill', 'rock') ''',
            u'''SELECT strata, color_mplot FROM zz_stratigraphy_plots WHERE strata IN ('made ground', 'rock', 'filling') ''',
            u'''SELECT capacity, explanation FROM zz_capacity WHERE capacity IN (0, 1)''',
            u'''SELECT capacity, color_qt FROM zz_capacity_plots WHERE capacity IN (0, 1) '''
        ]

        conn = sqlite.connect(EXPORT_DB_PATH,
                              detect_types=sqlite.PARSE_DECLTYPES
                              | sqlite.PARSE_COLNAMES)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)

        reference_string = [
            u'''[''',
            u'''SELECT geoshort, strata FROM zz_strat WHERE geoshort IN ('land fill', 'rock') ''',
            u''', [(land fill, filling), (rock, rock)], ''',
            u'''SELECT strata, color_mplot FROM zz_stratigraphy_plots WHERE strata IN ('made ground', 'rock', 'filling') ''',
            u''', [(filling, Yellow), (made ground, OrangeFIX), (rock, red)], ''',
            u'''SELECT capacity, explanation FROM zz_capacity WHERE capacity IN (0, 1)''',
            u''', [(0, anexpl), (1, above gwl)], ''',
            u'''SELECT capacity, color_qt FROM zz_capacity_plots WHERE capacity IN (0, 1) ''',
            u''', [(0, whiteFIX), (1, red)]]'''
        ]

        reference_string = u'\n'.join(reference_string)
        assert test_string == reference_string
def test(host, pguser):
    pg_conn_info = "dbname=epanet_test_db host=" + host + " user="******"dropdb --if-exists -h " + host + " -U "+pguser+" epanet_test_db")
    os.system("createdb -h " + host + " -U "+pguser+" epanet_test_db")
    os.system("psql -h " + host + " -U "+pguser+" epanet_test_db -c 'CREATE EXTENSION postgis'")
    os.system("psql -h " + host + " -U "+pguser+" epanet_test_db -f "+test_data_dir+"/epanet_test_db.sql")

    # try the update
    wc = [os.path.join(tmp_dir,"issue437_wc0.sqlite"), os.path.join(tmp_dir,"issue437_wc1.sqlite")]
    spversioning0 = versioning.spatialite(wc[0], pg_conn_info)
    spversioning1 = versioning.spatialite(wc[1], pg_conn_info)
    for i, f in enumerate(wc):
        if os.path.isfile(f): os.remove(f) 
        sp = spversioning0 if i == 0 else spversioning1
        sp.checkout(['epanet_trunk_rev_head.junctions', 'epanet_trunk_rev_head.pipes'])

    scur = []
    for f in wc: scur.append(versioning.Db( dbapi2.connect( f ) ))

    scur[0].execute("INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('2','1','2',GeomFromText('LINESTRING(1 1,0 1)',2154))")
    scur[0].execute("INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('3','1','2',GeomFromText('LINESTRING(1 -1,0 1)',2154))")
    scur[0].commit()


    spversioning0.commit( 'commit 1 wc0')
    spversioning1.update(  )

    scur[0].execute("UPDATE pipes_view SET length = 1")
    scur[0].commit()
    scur[1].execute("UPDATE pipes_view SET length = 2")
    scur[1].execute("UPDATE pipes_view SET length = 3")
    scur[1].commit()

    spversioning0.commit( "commit 2 wc0" )
    scur[0].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes")
    print '################'
    for r in scur[0].fetchall():
        print r

    scur[0].execute("UPDATE pipes_view SET length = 2")
    scur[0].execute("DELETE FROM pipes_view WHERE OGC_FID = 6")
    scur[0].commit()
    spversioning0.commit( "commit 3 wc0" )

    scur[0].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes")
    print '################'
    for r in scur[0].fetchall():
        print r

    spversioning1.update(  )

    scur[1].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes_diff")
    print '################ diff'
    for r in scur[1].fetchall():
        print r

    scur[1].execute("SELECT conflict_id FROM pipes_conflicts")
    assert( len(scur[1].fetchall()) == 6 ) # there must be conflicts

    scur[1].execute("SELECT conflict_id,origin,action,OGC_FID,trunk_parent,trunk_child FROM pipes_conflicts")
    print '################'
    for r in scur[1].fetchall():
        print r

    scur[1].execute("DELETE FROM pipes_conflicts WHERE origin='theirs' AND conflict_id=1")
    scur[1].commit()
    scur[1].execute("SELECT conflict_id FROM pipes_conflicts")
    assert( len(scur[1].fetchall()) == 4 ) # there must be two removed entries

    scur[1].execute("SELECT conflict_id,origin,action,OGC_FID,trunk_parent,trunk_child FROM pipes_conflicts")
    print '################'
    for r in scur[1].fetchall():
        print r

    scur[1].execute("DELETE FROM pipes_conflicts WHERE origin='mine' AND OGC_FID = 11")
    scur[1].execute("DELETE FROM pipes_conflicts WHERE origin='theirs'")
    scur[1].commit()
    scur[1].execute("SELECT conflict_id FROM pipes_conflicts")
    assert( len(scur[1].fetchall()) == 0 ) # there must be no conflict


    scur[1].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes")
    print '################'
    for r in scur[1].fetchall():
        print r
Esempio n. 47
0
    def setUpClass(cls):
        """Run before all tests"""
        # setup provider for base tests
        cls.vl = QgsVectorLayer(
            'dbname=\'{}/provider/spatialite.db\' table="somedata" (geom) sql='
            .format(TEST_DATA_DIR), 'test', 'spatialite')
        assert (cls.vl.isValid())
        cls.provider = cls.vl.dataProvider()

        cls.vl_poly = QgsVectorLayer(
            'dbname=\'{}/provider/spatialite.db\' table="somepolydata" (geom) sql='
            .format(TEST_DATA_DIR), 'test', 'spatialite')
        assert (cls.vl_poly.isValid())
        cls.poly_provider = cls.vl_poly.dataProvider()

        # create test db
        cls.dbname = os.path.join(tempfile.gettempdir(), "test.sqlite")
        if os.path.exists(cls.dbname):
            os.remove(cls.dbname)
        con = sqlite3.connect(cls.dbname, isolation_level=None)
        cur = con.cursor()
        cur.execute("BEGIN")
        sql = "SELECT InitSpatialMetadata()"
        cur.execute(sql)

        # simple table with primary key
        sql = "CREATE TABLE test_pg (id INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL)"
        cur.execute(sql)
        sql = "SELECT AddGeometryColumn('test_pg', 'geometry', 4326, 'POLYGON', 'XY')"
        cur.execute(sql)
        sql = "INSERT INTO test_pg (id, name, geometry) "
        sql += "VALUES (1, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)

        # table with multiple column primary key
        sql = "CREATE TABLE test_pg_mk (id INTEGER NOT NULL, name TEXT NOT NULL, PRIMARY KEY(id,name))"
        cur.execute(sql)
        sql = "SELECT AddGeometryColumn('test_pg_mk', 'geometry', 4326, 'POLYGON', 'XY')"
        cur.execute(sql)
        sql = "INSERT INTO test_pg_mk (id, name, geometry) "
        sql += "VALUES (1, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)

        # simple table with primary key
        sql = "CREATE TABLE test_q (id INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL)"
        cur.execute(sql)
        sql = "SELECT AddGeometryColumn('test_q', 'geometry', 4326, 'POLYGON', 'XY')"
        cur.execute(sql)
        sql = "INSERT INTO test_q (id, name, geometry) "
        sql += "VALUES (11, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)
        sql = "INSERT INTO test_q (id, name, geometry) "
        sql += "VALUES (21, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)

        # simple table with a geometry column named 'Geometry'
        sql = "CREATE TABLE test_n (Id INTEGER NOT NULL PRIMARY KEY, name TEXT NOT NULL)"
        cur.execute(sql)
        sql = "SELECT AddGeometryColumn('test_n', 'Geometry', 4326, 'POLYGON', 'XY')"
        cur.execute(sql)
        sql = "INSERT INTO test_n (id, name, geometry) "
        sql += "VALUES (1, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)
        sql = "INSERT INTO test_n (id, name, geometry) "
        sql += "VALUES (2, 'toto', GeomFromText('POLYGON((0 0,1 0,1 1,0 1,0 0))', 4326))"
        cur.execute(sql)

        cur.execute("COMMIT")
        con.close()
Esempio n. 48
0
    def test_export_spatialite_transform_coordinates(
            self, mock_skip_popup, mock_iface, mock_find_layer, mock_newdbpath,
            mock_verify, mock_locale, mock_createdb_crs_question,
            mock_messagebar):
        mock_find_layer.return_value.crs.return_value.authid.return_value = u'EPSG:3006'
        mock_createdb_crs_question.return_value = [3010, True]

        mock_newdbpath.return_value = EXPORT_DB_PATH
        mock_verify.return_value = 0

        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(1 1)', 3006))'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO zz_staff (staff) VALUES ('s1')''')
        db_utils.sql_alter_db(
            u'''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'par1', 's1', '2015-01-01 01:00:00', 'unit1')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO stratigraphy (obsid, stratid) VALUES ('P1', 1)''')
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_lines (obsid) VALUES ('L1')''')
        db_utils.sql_alter_db(
            u'''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''')
        db_utils.sql_alter_db(
            u'''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')'''
        )

        mock_locale.return_value.answer = u'ok'
        mock_locale.return_value.value = u'sv_SE'
        self.midvatten.export_spatialite()

        sql_list = [
            u'''select obsid, ST_AsText(geometry) from obs_points''',
            u'''select staff from zz_staff''',
            u'''select obsid, date_time, staff, comment from comments''',
            u'''select obsid, parameter, report, staff from w_qual_lab''',
            u'''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            u'''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            u'''select obsid, date_time, meas from w_levels''',
            u'''select obsid, stratid from stratigraphy''',
            u'''select obsid from obs_lines''',
            u'''select obsid, length from seismic_data''',
            u'''select obsid, instrumentid, parameter, date_time from meteo'''
        ]

        conn = sqlite.connect(EXPORT_DB_PATH,
                              detect_types=sqlite.PARSE_DECLTYPES
                              | sqlite.PARSE_COLNAMES)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)
        reference_string = [
            u'''[''', u'''select obsid, ST_AsText(geometry) from obs_points''',
            u''', [(P1, POINT(-517888.383773 1.002821))], ''',
            u'''select staff from zz_staff''', u''', [(s1)], ''',
            u'''select obsid, date_time, staff, comment from comments''',
            u''', [(P1, 2015-01-01 00:00:00, s1, comment1)], ''',
            u'''select obsid, parameter, report, staff from w_qual_lab''',
            u''', [(P1, labpar1, report1, s1)], ''',
            u'''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            u''', [(P1, par1, s1, 2015-01-01 01:00:00, None)], ''',
            u'''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            u''', [(P1, inst1, Momflow, 2015-04-13 00:00:00, l/s)], ''',
            u'''select obsid, date_time, meas from w_levels''',
            u''', [(P1, 2015-01-02 00:00:01, 2.0)], ''',
            u'''select obsid, stratid from stratigraphy''',
            u''', [(P1, 1)], ''', u'''select obsid from obs_lines''',
            u''', [(L1)], ''', u'''select obsid, length from seismic_data''',
            u''', [(L1, 5.0)], ''',
            u'''select obsid, instrumentid, parameter, date_time from meteo''',
            u''', [(P1, meteoinst, precip, 2017-01-01 00:19:00)]]'''
        ]
        reference_string = u'\n'.join(reference_string)
        assert test_string == reference_string
Esempio n. 49
0
 def setUp(self):
     self.con = sqlite.connect(":memory:", factory=MyConnection)
def processSpatial():
    # DB Connection
    conn = db.connect('C:/Temp/Prioritization/Prior.sqlite')

    # creating a Cursor
    cur = conn.cursor()

    # Run both buffering functions if they don't already exist
    if (u'proj10', ) in cur.execute(
            "SELECT name FROM sqlite_master WHERE type='table' AND name='proj10'"
    ):
        pass
    else:
        buffproj10(cur)

    if (u'proj1320', ) in cur.execute(
            "SELECT name FROM sqlite_master WHERE type='table' AND name='proj1320'"
    ):
        pass
    else:
        buffproj1320(cur)

    if (u'proj5280', ) in cur.execute(
            "SELECT name FROM sqlite_master WHERE type='table' AND name='proj5280'"
    ):
        pass
    else:
        buffproj5280(cur)

    # Stuff a dictionary with our results. The key is the question ID.
    results = {}

    # 98 - Air quality: is the project on a freight route?
    print("Processing 98")
    results['98'] = multiOrSect(cur, "proj10", "truck_bottlenecks_20120620",
                                "t1t2")

    # 111 - Air quality: is the project within 1/4 mi buffer of schools?
    print("Processing 111")
    results['111'] = multiOrSect(cur, "proj1320", "school_location2")

    # 69 - Is the project on an identified truck bottleneck?
    print("Processing 69")
    results['69'] = multiOrSect(cur, "proj10", "truck_bottlenecks_20120620")

    # 72 - Is the project in an MIC? Does it connect two MICs or a MIC and an RGC?
    # NOTE: The first condition supercedes the remainder. Suspect this isn't what is wanted
    print("Processing 72")
    results['72'] = multiOrSect(cur, "proj10", "micto_urbcen_micnet3")

    # 73 - Is the project in an MIC?
    # NOTE: This and 72 are dupes
    print("Processing 73")
    results['73'] = multiOrSect(cur, "proj10", "micen")

    # 74 - Is the project within a TAZ identified as a freight generator
    print("Processing 74")
    results['74'] = multiOrSect(cur, "proj10", "freight_gen_taz2010")

    # 114 - Is the project on a T1/T2 route
    print("Processing 114")
    results['114'] = multiOrSect(cur, "proj10", "t1t2")

    # 66 - Within identified areas (18 jobs/acre density AND zoning)
    print("Processing 66")
    results['66'] = multiAndSect(cur, "proj10", "flu_jobs_32_lyr")

    # 67 - Within identified areas (18 jobs/acre density)
    print("Processing 67")
    results['67'] = multiOrSect(cur, "proj10", "all_jobs_18_lyr")

    # 68 - Within identified areas (15 jobs/acre density; cluster employment)
    print("Processing 68")
    results['68'] = multiOrSect(cur, "proj10", "cluster_15_lyr")

    # 106 - Within identified areas (15 jobs/acre density; family-wage)
    print("Processing 106")
    results['106'] = multiOrSect(cur, "proj10", "fmlywg_15_lyr")

    # 107 - Within some reasonable distance (1/4mi?) from identified economic foundation points
    print("Processing 107")
    results['107'] = multiOrSect(cur, "proj1320", "economic_foundations")

    # 116 - On the regional bicycle network
    print("Processing 116")
    results['116'] = multiOrSect(cur, "proj1320", "regional_bicycle_network")

    # 120 - Within 1/4mi of MTS transit stops
    print("Processing 120")
    results['120'] = multiOrSect(cur, "proj1320", "regional_transit")

    # 101 - Project is in a critical area
    print("Processing 101")
    results['101'] = notProj(
        cur, multiOrSect(cur, "proj10", "caoall", "priority_hab"))

    # 122 - Project is within identified resource lands
    results['122'] = multiOrSect(cur, "proj10", "reszone08_region")

    # 89 - On a facility with fatality, injury, or property damage incidents
    print("Processing 89")
    results['89'] = multiOrSect(cur, "proj10", "all_collisions")

    # 141 - On security recovery annex facility
    print("Processing 141")
    results['141'] = multiOrSect(cur, "proj10", "security")

    # 93 - In special needs area (NOTE: need guidance)
    print("Processing 93")
    results['93'] = multiOrSect(cur, "proj10", "all_four")

    # 150 - Connects area of low and very low to high or very high opp index
    print("Processing 150")
    results['150'] = multiOrSect(cur, "proj10", "low_to_highOP_net")

    # 151 - Connects to an area of low or very low
    print("Processing 151")
    results['151'] = multiOrSect(cur, "proj10", "low_verylow")

    # 152 - Connects to an area of high or very high
    print("Processing 152")
    results['152'] = multiOrSect(cur, "proj10", "high_veryhigh_opp")

    # 59 - Within an RGC
    print("Processing 59")
    results['59'] = multiOrSect(cur, "proj10", "urbcen")

    # 60 - Connect an RGC to RGC or MIC
    print("Processing 60")
    results['61'] = multiOrSect(cur, "proj10", "regcenToRegCenMICs_Net2")

    # 61 - Connect to RGC (1 mi)
    print("Processing 61")
    results['61'] = multiOrSect(cur, "proj5280", "urbcen")

    # 62 - In an area with housing density > 15
    print("Processing 62")
    results['62'] = multiOrSect(cur, "proj10", "housing_density_15_plus")

    # 63 - In an area with housing density > 8
    print("Processing 63")
    results['63'] = multiOrSect(cur, "proj10", "housing_density_8_to_15")

    # 75 - On an identified facility (bottleneck, chokepoint, congestion, etc.
    print("Processing 75")
    results['75'] = multiOrSect(cur, "proj10", "chokepoints_and_bottlenecks",
                                "congested_transit_corridors",
                                "its_key_arterial_corridors")

    return (results)