Exemplo n.º 1
0
def db_load(database: str, folder: str):
    """ Load up a .SQL file created by another process """

    folder = Path(folder)

    # Find the one with the highest version tag

    all_db_files = [x for x in folder.rglob("*.sql")]

    max_version = -1
    latest_file = None

    for db_file in all_db_files:
        v_number = db_file.name[:-4].split("_")[-1]

        version = int(v_number[1:])

        if version > max_version:
            max_version = version
            latest_file = db_file

    print(f"Loading db version {max_version} from \n\t-> {latest_file}")

    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])
    db.db_load_pgdump_file(latest_file)
Exemplo n.º 2
0
def db_freeze(database: str, folder: str):
    """ Export a .SQL file of the database """

    folder = Path(folder)

    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])

    db.db_export_pgdump_file(folder)
Exemplo n.º 3
0
def db_setup(database: str, folder: str):
    """Roll a starter database from DVRPC's production DB"""

    folder = Path(folder)

    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])

    create_project_database(db, folder)
Exemplo n.º 4
0
def analyze_network(schema: str, database: str, speed: str):
    """Run the sidewalk network analysis with Pandana"""

    try:
        speed = float(speed)
    except ValueError:
        speed = None

    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])

    network = SidewalkNetwork(db, schema, walking_mph=speed)
Exemplo n.º 5
0
def database_2():

    # Set up the database
    db = PostgreSQL("test_from_ward_2",
                    verbosity="minimal",
                    **configurations()["localhost"])

    # Yield to the test
    yield db

    # Tear down this database automatically
    db.db_delete()
Exemplo n.º 6
0
def clip_data(state: str,
              municipality: str,
              buffer: str,
              database):
    """Clip source data down to a single municipality"""

    if municipality == "":
        municipality = None

    try:
        buffer = float(buffer)
    except ValueError:
        buffer = None

    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])
    clip_inputs(db, state, municipality=municipality, buffer_meters=buffer)
Exemplo n.º 7
0
def create_project_database(local_db: PostgreSQL, shp_folder: Path):
    """ Batch execute the whole process:
            1) copy SQL data
            2) import shapefiles
            3) load a median() function
            4) make some helper GIS data
            5) import transit data from OpenData portals
            6) save pg_dump of database
    """

    if platform.system() in ["Linux", "Windows"] \
       and "dvrpc.org" in socket.getfqdn():

        dvrpc_credentials = pGIS.configurations()["dvrpc_gis"]
        remote_db = PostgreSQL("gis", **dvrpc_credentials)

        import_production_sql_data(remote_db, local_db)
        import_shapefiles(shp_folder, local_db)
        load_helper_functions(local_db)
        create_new_geodata(local_db)
        import_transit_data(local_db)

    else:
        print("\n-> !!!Initial DB setup can only be executed from a DVRPC workstation!!!")
Exemplo n.º 8
0
def database_1():

    # Set up the database
    db = PostgreSQL("test_from_ward",
                    verbosity="minimal",
                    **configurations()["localhost"])

    # Import CSV and shapefile data sources
    db.import_csv(test_csv_data.NAME,
                  test_csv_data.PATH_URL,
                  if_exists="replace")
    db.import_geodata(test_shp_data.NAME,
                      test_shp_data.PATH_URL,
                      if_exists="replace")

    # Yield to the test
    yield db

    # Don't tear down this database!!!
    # This is done later as part of test_final_cleanup.py

    # Delete temp shapefiles
    test_shp_data.flush_local_data()
    test_csv_data.flush_local_data()
Exemplo n.º 9
0
def generate_nodes(database: str, tablename: str):
    """ Generate topologically-sound nodes for the sidewalk lines """

    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])
    generate_sidewalk_nodes(db, tablename)
Exemplo n.º 10
0
        poi_node_pairs = poi_node_pairs.set_geometry("flow")

        poi_node_pairs['geom'] = poi_node_pairs["flow"].apply(
            lambda x: WKTElement(x.wkt, srid=self.epsg))

        for col in ["flow", "geom_from", "geom_to"]:
            poi_node_pairs.drop(col, inplace=True, axis=1)

        engine = create_engine(self.db.uri())
        poi_node_pairs.to_sql(
            f"poi_{self.themes[theme]}_qa",
            engine,
            schema=self.schema,
            if_exists="replace",
            dtype={'geom': Geometry("LineString", srid=self.epsg)})
        engine.dispose()

        self.poi_gdf = poi_gdf


if __name__ == "__main__":
    from sidewalk_gaps import CREDENTIALS

    schema = "camden"

    db = PostgreSQL("sidewalk_gaps",
                    verbosity="minimal",
                    **CREDENTIALS["localhost"])

    network = SidewalkNetwork(db, schema)
Exemplo n.º 11
0
def summarize_into_hexagons(database: str):
    """ Classify centerlines w/ length of parallel sidewalks """
    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])
    hexagon_summary(db)
Exemplo n.º 12
0
    db.make_geotable_from_query(query,
                                "sidewalks_and_trails",
                                geom_type="LINESTRING",
                                epsg=26918)


def cleanup_temp_tables(db: PostgreSQL):
    """ Delete the intermediate tables to keep the DB clean """
    for tbl in [
            "trail_splits", "trail_merged", "sidewalk_splits",
            "sidewalk_merged"
    ]:
        db.table_delete(tbl)


def merge_topologies(db: PostgreSQL):
    add_segmentation_to_trails(db)
    add_segmentation_to_sidewalks(db)
    merge_sidewalks_and_trails(db)
    cleanup_temp_tables(db)


if __name__ == "__main__":
    from sidewalk_gaps import CREDENTIALS, PROJECT_DB_NAME

    db = PostgreSQL(PROJECT_DB_NAME,
                    verbosity="minimal",
                    **CREDENTIALS["localhost"])

    merge_topologies(db)
Exemplo n.º 13
0
def identify_islands(schema: str, database):
    """ Join intersecting sidewalks to create 'islands' """
    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])
    generate_islands(db, schema)
Exemplo n.º 14
0
def analyze_segments(schema: str, database):
    """ Classify centerlines w/ length of parallel sidewalks """
    db = PostgreSQL(database, verbosity="minimal", **CREDENTIALS["localhost"])
    classify_centerlines(db, schema, "centerlines")
Exemplo n.º 15
0
def db_connection(db_name: str = DB_NAME) -> PostgreSQL:
    return PostgreSQL(db_name, verbosity="minimal", **configurations()["localhost"])
Exemplo n.º 16
0
def db_connection(db_name: str = DB_NAME) -> PostgreSQL:
    return PostgreSQL(db_name, verbosity="minimal")
Exemplo n.º 17
0
            2) import shapefiles
            3) load a median() function
            4) make some helper GIS data
            5) import transit data from OpenData portals
            6) save pg_dump of database
    """

    if platform.system() in ["Linux", "Windows"] \
       and "dvrpc.org" in socket.getfqdn():

        dvrpc_credentials = pGIS.configurations()["dvrpc_gis"]
        remote_db = PostgreSQL("gis", **dvrpc_credentials)

        import_production_sql_data(remote_db, local_db)
        import_shapefiles(shp_folder, local_db)
        load_helper_functions(local_db)
        create_new_geodata(local_db)
        import_transit_data(local_db)

    else:
        print("\n-> !!!Initial DB setup can only be executed from a DVRPC workstation!!!")


if __name__ == "__main__":

    from sidewalk_gaps import CREDENTIALS, FOLDER_SHP_INPUT

    db = PostgreSQL('my_db_name', **CREDENTIALS["localhost"])

    create_project_database(db, FOLDER_SHP_INPUT)