예제 #1
0
    def __save_existing_link(self):
        data = []
        if self.link_id != self.__original__['link_id']:
            raise ValueError('One cannot change the link_id')

        txts = []
        for key, val in self.__dict__.items():
            if key not in self.__original__:
                continue
            if val != self.__original__[key]:
                if key == 'geometry' and val is not None:
                    data.extend([val.wkb, self.__srid__])
                    txts.append('geometry=GeomFromWKB(?, ?)')
                else:
                    data.append(val)
                    txts.append(f'"{key}"=?')

        if not data:
            logger.warning(f'Nothing to update for link {self.link_id}')
            return [], ''

        txts = ','.join(txts) + ' where link_id=?'
        data.append(self.link_id)
        sql = f'Update Links set {txts}'
        return data, sql
예제 #2
0
    def disconnect_mode(self, mode_id: str) -> None:
        """Removes centroid connectors for the desired mode from the network file

               Args:
                   *mode_id* (:obj:`str`): Mode ID we are trying to disconnect from this zone
        """

        curr = self.conn.cursor()
        data = [self.zone_id, mode_id]
        curr.execute("Delete from links where a_node=? and modes=?", data)
        row_count = curr.rowcount

        data = [mode_id, self.zone_id, mode_id]
        curr.execute(
            'Update links set modes = replace(modes, ?, "") where a_node=? and instr(modes,?) > 0',
            data)
        row_count += curr.rowcount

        if row_count:
            logger.warning(
                f"Deleted {row_count} connectors for mode {mode_id} for zone {self.zone_id}"
            )
        else:
            warn("No centroid connectors for this mode")
        self.conn.commit()
예제 #3
0
    def new(self, link_type_id: str) -> LinkType:
        if link_type_id in self.__items:
            raise ValueError(f'Link Type ID ({link_type_id}) already exists in the model. It must be unique.')

        tp = {key: None for key in self.__fields}
        tp['link_type_id'] = link_type_id
        lt = LinkType(tp)
        self.__items[link_type_id] = lt
        logger.warning('Link type has not yet been saved to the database. Do so explicitly')
        return lt
예제 #4
0
 def delete(self, mode_id: str) -> None:
     """Removes the mode with **mode_id** from the project"""
     try:
         self.curr.execute(f'delete from modes where mode_id="{mode_id}"')
         self.conn.commit()
     except IntegrityError as e:
         logger.error(f'Failed to remove mode {mode_id}. {e.args}')
         raise e
     logger.warning(
         f'Mode {mode_id} was successfully removed from the database')
     self.__update_list_of_modes()
예제 #5
0
def spatialite_connection(conn):
    conn.enable_load_extension(True)
    par = Parameters()
    spatialite_path = par.parameters["system"]["spatialite_path"]
    if spatialite_path not in os.environ['PATH']:
        os.environ['PATH'] = spatialite_path + ';' + os.environ['PATH']
    try:
        conn.load_extension("mod_spatialite")
    except Exception as e:
        logger.warning(f"AequilibraE might not work as intended without spatialite. {e.args}")
    return conn
예제 #6
0
 def delete(self, link_type_id: str) -> None:
     """Removes the link_type with **link_type_id** from the project"""
     try:
         lt = self.__items[link_type_id]  # type: LinkType
         lt.delete()
         del self.__items[link_type_id]
         self.conn.commit()
     except IntegrityError as e:
         logger.error(f'Failed to remove link_type {link_type_id}. {e.args}')
         raise e
     logger.warning(f'Link type {link_type_id} was successfully removed from the project database')
예제 #7
0
    def add_mode(self, mode: [str, Mode]):
        """Adds a new mode to this link

        Raises a warning if mode is already allowed on the link, and fails if mode does not exist

        Args:
            *mode_id* (:obj:`str` or `Mode`): Mode_id of the mode or mode object to be added to the link
        """
        mode_id = self.__validate(mode)

        if mode_id in self.modes:
            logger.warning('Mode already active for this link')
            return

        self.__dict__["modes"] += mode_id
예제 #8
0
    def drop_mode(self, mode: [str, Mode]):
        """Removes a mode from this link

        Raises a warning if mode is already NOT allowed on the link, and fails if mode does not exist

        Args:
            *mode_id* (:obj:`str` or `Mode`): Mode_id of the mode or mode object to be removed from the link
        """

        mode_id = self.__validate(mode)

        if mode_id not in self.modes:
            logger.warning('Mode already inactive for this link')
            return

        if len(self.modes) == 1:
            raise ValueError('Link needs to have at least one mode')

        self.__dict__['modes'] = self.modes.replace(mode_id, '')
예제 #9
0
    def delete(self, link_id: int) -> None:
        """Removes the link with **link_id** from the project

        Args:
            *link_id* (:obj:`int`): Id of a link to delete"""
        d = 1
        link_id = int(link_id)
        if link_id in self.__items:
            link = self.__items.pop(link_id)  # type: Link
            link.delete()
        else:
            self.curr.execute("Delete from Links where link_id=?", [link_id])
            d = self.curr.rowcount
            self.conn.commit()
        if d:
            logger.warning(
                f"Link {link_id} was successfully removed from the project database"
            )
        else:
            self.__existence_error(link_id)
예제 #10
0
    def __save_existing_node(self):
        data = []
        txts = []
        for key, val in self.__dict__.items():
            if key not in self.__original__:
                continue
            if val != self.__original__[key]:
                if key == "geometry" and val is not None:
                    data.append(val.wkb)
                    txts.append(f"geometry=GeomFromWKB(?, {self.__srid__})")
                else:
                    data.append(val)
                    txts.append(f'"{key}"=?')

        if not data:
            logger.warning(f"Nothing to update for node {self.node_id}")
            return [], ""

        txts = ",".join(txts) + " where node_id=?"
        data.append(self.node_id)
        sql = f"Update Nodes set {txts}"
        return data, sql
    def calculate_stepsize(self):
        """Calculate optimal stepsize in descent direction"""
        if self.algorithm == "msa":
            self.stepsize = 1.0 / self.iter
            return

        def derivative_of_objective(stepsize):
            x = self.fw_total_flow + stepsize * (self.step_direction_flow - self.fw_total_flow)

            self.vdf.apply_vdf(self.congested_value, x, self.capacity, self.free_flow_tt, *self.vdf_parameters)
            return np.sum(self.congested_value * (self.step_direction_flow - self.fw_total_flow))

        try:
            if recent_scipy:
                min_res = root_scalar(derivative_of_objective, bracket=[0, 1])
                self.stepsize = min_res.root
                if not min_res.converged:
                    logger.warning("Descent direction stepsize finder is not converged")
            else:
                min_res = root_scalar(derivative_of_objective, 1 / self.iter)
                if not min_res.success:
                    logger.warning("Descent direction stepsize finder is not converged")
                self.stepsize = min_res.x[0]
                if self.stepsize <= 0.0 or self.stepsize >= 1.0:
                    raise ValueError('wrong root')

            self.conjugate_failed = False

        except ValueError:
            # We can have iterations where the objective function is not *strictly* convex, but the scipy method cannot deal
            # with this. Stepsize is then either given by 1 or 0, depending on where the objective function is smaller.
            # However, using zero would mean the overall solution would not get updated, and therefore we assert the stepsize
            # in order to add a small fraction of the AoN. A heuristic value equal to the corresponding MSA step size
            # seems to work well in practice.
            if self.algorithm == 'bfw':
                self.betas.fill(-1)
            if derivative_of_objective(0.0) < derivative_of_objective(1.0):
                if self.algorithm == "frank-wolfe" or self.conjugate_failed:
                    msa_step = 1.0 / self.iter
                    logger.warning(f"# Alert: Adding {msa_step} to stepsize to make it non-zero")
                    self.stepsize = msa_step
                else:
                    self.stepsize = 0.0
                    # need to reset conjugate / bi-conjugate direction search
                    self.do_fw_step = True
                    self.conjugate_failed = True
                    self.iteration_issue.append('Found bad conjugate direction step. Performing FW search')
                    # By doing it recursively, we avoid doing the same AoN again
                    self.__calculate_step_direction()
                    self.calculate_stepsize()

            else:
                # Do we want to keep some of the old solution, or just throw away everything?
                self.stepsize = 1.0

        assert 0 <= self.stepsize <= 1.0
예제 #12
0
    def create_from_osm(
            self,
            west: float = None,
            south: float = None,
            east: float = None,
            north: float = None,
            place_name: str = None,
            modes=["car", "transit", "bicycle", "walk"],
            spatial_index=False,
    ) -> None:
        """
        Downloads the network from Open-Street Maps

        Args:
            *west* (:obj:`float`, Optional): West most coordinate of the download bounding box

            *south* (:obj:`float`, Optional): South most coordinate of the download bounding box

            *east* (:obj:`float`, Optional): East most coordinate of the download bounding box

            *place_name* (:obj:`str`, Optional): If not downloading with East-West-North-South boundingbox, this is
            required

            *modes* (:obj:`list`, Optional): List of all modes to be downloaded. Defaults to the modes in the parameter
            file

            *spatial_index* (:obj:`bool`, Optional): Creates spatial index. Defaults to zero. REQUIRES SQLITE WITH RTREE
        """

        if self._check_if_exists():
            raise FileExistsError("You can only import an OSM network into a brand new model file")

        self.create_empty_tables()

        curr = self.conn.cursor()
        curr.execute("""ALTER TABLE links ADD COLUMN osm_id integer""")
        curr.execute("""ALTER TABLE nodes ADD COLUMN osm_id integer""")
        self.conn.commit()

        if isinstance(modes, (tuple, list)):
            modes = list(modes)
        elif isinstance(modes, str):
            modes = [modes]
        else:
            raise ValueError("'modes' needs to be string or list/tuple of string")

        if place_name is None:
            if min(east, west) < -180 or max(east, west) > 180 or min(north, south) < -90 or max(north, south) > 90:
                raise ValueError("Coordinates out of bounds")
            bbox = [west, south, east, north]
        else:
            bbox, report = placegetter(place_name)
            west, south, east, north = bbox
            if bbox is None:
                msg = f'We could not find a reference for place name "{place_name}"'
                warn(msg)
                logger.warning(msg)
                return
            for i in report:
                if "PLACE FOUND" in i:
                    logger.info(i)

        # Need to compute the size of the bounding box to not exceed it too much
        height = haversine((east + west) / 2, south, (east + west) / 2, north)
        width = haversine(east, (north + south) / 2, west, (north + south) / 2)
        area = height * width

        if area < max_query_area_size:
            polygons = [bbox]
        else:
            polygons = []
            parts = math.ceil(area / max_query_area_size)
            horizontal = math.ceil(math.sqrt(parts))
            vertical = math.ceil(parts / horizontal)
            dx = east - west
            dy = north - south
            for i in range(horizontal):
                xmin = max(-180, west + i * dx)
                xmax = min(180, west + (i + 1) * dx)
                for j in range(vertical):
                    ymin = max(-90, south + j * dy)
                    ymax = min(90, south + (j + 1) * dy)
                    box = [xmin, ymin, xmax, ymax]
                    polygons.append(box)

        logger.info("Downloading data")
        self.downloader = OSMDownloader(polygons, modes)
        self.downloader.doWork()

        logger.info("Building Network")
        self.builder = OSMBuilder(self.downloader.json, self.source)
        self.builder.doWork()

        if spatial_index:
            logger.info("Adding spatial indices")
            self.add_spatial_index()

        self.add_triggers()
        logger.info("Network built successfully")
예제 #13
0
import numpy as np
from aequilibrae.paths.graph import Graph
from aequilibrae import logger

try:
    from aequilibrae.paths.AoN import update_path_trace, path_computation
except ImportError as ie:
    logger.warning(f'Could not import procedures from the binary. {ie.args}')


class PathResults:
    """
    Path computation result holder

    ::

          from aequilibrae.project import Project
          from aequilibrae.paths.results import PathResults

          proj = Project()
          proj.load('path/to/project.sqlite')
          proj.network.build_graphs()
          # Mode c is car in this project
          car_graph = proj.network.graphs['c']

          # minimize distance
          car_graph.set_graph('distance')

          res = PathResults()
          res.prepare(car_graph)
          res.compute_path(17, 13199)
예제 #14
0
import importlib.util as iutil
import numpy as np
from typing import List, Dict
from warnings import warn
from ..utils import WorkerThread
from aequilibrae.paths.traffic_class import TrafficClass
from aequilibrae.paths.results import AssignmentResults
from aequilibrae.paths.all_or_nothing import allOrNothing
from aequilibrae import logger

try:
    from aequilibrae.paths.AoN import linear_combination, linear_combination_skims, aggregate_link_costs
    from aequilibrae.paths.AoN import triple_linear_combination, triple_linear_combination_skims
    from aequilibrae.paths.AoN import copy_one_dimension, copy_two_dimensions, copy_three_dimensions
except ImportError as ie:
    logger.warning(f"Could not import procedures from the binary. {ie.args}")

import scipy

if int(scipy.__version__.split(".")[1]) >= 3:
    from scipy.optimize import root_scalar

    recent_scipy = True
else:
    from scipy.optimize import root as root_scalar

    recent_scipy = False
    logger.warning(
        "Using older version of Scipy. For better performance, use Scipy >= 1.4"
    )
    def calculate_stepsize(self):
        """Calculate optimal stepsize in descent direction"""
        if self.algorithm == "msa":
            self.stepsize = 1.0 / self.iter
            return

        class_specific_term = self.__derivative_of_objective_stepsize_independent(
        )
        derivative_of_objective = partial(
            self.__derivative_of_objective_stepsize_dependent,
            const_term=class_specific_term)

        x_tol = max(min(1e-6, self.rgap * 1e-5), 1e-12)

        try:
            if recent_scipy:
                min_res = root_scalar(derivative_of_objective,
                                      bracket=[0, 1],
                                      xtol=x_tol)
                self.stepsize = min_res.root
                if not min_res.converged:
                    logger.warning(
                        "Descent direction stepsize finder has not converged")
            else:
                min_res = root_scalar(derivative_of_objective,
                                      1 / self.iter,
                                      xtol=x_tol)
                if not min_res.success:
                    logger.warning(
                        "Descent direction stepsize finder has not converged")
                self.stepsize = min_res.x[0]
                if self.stepsize <= 0.0 or self.stepsize >= 1.0:
                    raise ValueError("wrong root")

            self.conjugate_failed = False

        except ValueError as e:
            # We can have iterations where the objective function is not *strictly* convex, but the scipy method cannot deal
            # with this. Stepsize is then either given by 1 or 0, depending on where the objective function is smaller.
            # However, using zero would mean the overall solution would not get updated, and therefore we assert the stepsize
            # in order to add a small fraction of the AoN. A heuristic value equal to the corresponding MSA step size
            # seems to work well in practice.
            if self.algorithm == "bfw":
                self.betas.fill(-1)
            if derivative_of_objective(0.0) < derivative_of_objective(1.0):
                if self.algorithm == "frank-wolfe" or self.conjugate_failed:
                    tiny_step = 1e-2 / self.iter  # use a fraction of the MSA stepsize. We observe that using 1e-4
                    # works well in practice, however for a large number of iterations this might be too much so
                    # use this heuristic instead.
                    logger.warning(
                        f"# Alert: Adding {tiny_step} as step size to make it non-zero. {e.args}"
                    )
                    self.stepsize = tiny_step
                else:
                    self.stepsize = 0.0
                    # need to reset conjugate / bi-conjugate direction search
                    self.do_fw_step = True
                    self.conjugate_failed = True
                    self.iteration_issue.append(
                        "Found bad conjugate direction step. Performing FW search. {e.args}"
                    )
                    # By doing it recursively, we avoid doing the same AoN again
                    self.__calculate_step_direction()
                    self.calculate_stepsize()

            else:
                # Do we want to keep some of the old solution, or just throw away everything?
                self.stepsize = 1.0

        assert 0 <= self.stepsize <= 1.0
예제 #16
0
    def create_from_osm(
        self,
        west: float = None,
        south: float = None,
        east: float = None,
        north: float = None,
        place_name: str = None,
        modes=["car", "transit", "bicycle", "walk"],
    ) -> None:
        """
        Downloads the network from Open-Street Maps

        Args:
            *west* (:obj:`float`, Optional): West most coordinate of the download bounding box

            *south* (:obj:`float`, Optional): South most coordinate of the download bounding box

            *east* (:obj:`float`, Optional): East most coordinate of the download bounding box

            *place_name* (:obj:`str`, Optional): If not downloading with East-West-North-South boundingbox, this is
            required

            *modes* (:obj:`list`, Optional): List of all modes to be downloaded. Defaults to the modes in the parameter
            file

            p = Project()
            p.new(nm)

        ::

            from aequilibrae import Project, Parameters
            p = Project()
            p.new('path/to/project')

            # We now choose a different overpass endpoint (say a deployment in your local network)
            par = Parameters()
            par.parameters['osm']['overpass_endpoint'] = "http://192.168.1.234:5678/api"

            # Because we have our own server, we can set a bigger area for download (in M2)
            par.parameters['osm']['max_query_area_size'] = 10000000000

            # And have no pause between successive queries
            par.parameters['osm']['sleeptime'] = 0

            # Save the parameters to disk
            par.write_back()

            # And do the import
            p.network.create_from_osm(place_name=my_beautiful_hometown)
            p.close()
        """

        if self.count_links() > 0:
            raise FileExistsError(
                "You can only import an OSM network into a brand new model file"
            )

        curr = self.conn.cursor()
        curr.execute("""ALTER TABLE links ADD COLUMN osm_id integer""")
        curr.execute("""ALTER TABLE nodes ADD COLUMN osm_id integer""")
        self.conn.commit()

        if isinstance(modes, (tuple, list)):
            modes = list(modes)
        elif isinstance(modes, str):
            modes = [modes]
        else:
            raise ValueError(
                "'modes' needs to be string or list/tuple of string")

        if place_name is None:
            if min(east, west) < -180 or max(east, west) > 180 or min(
                    north, south) < -90 or max(north, south) > 90:
                raise ValueError("Coordinates out of bounds")
            bbox = [west, south, east, north]
        else:
            bbox, report = placegetter(place_name)
            west, south, east, north = bbox
            if bbox is None:
                msg = f'We could not find a reference for place name "{place_name}"'
                warn(msg)
                logger.warning(msg)
                return
            for i in report:
                if "PLACE FOUND" in i:
                    logger.info(i)

        # Need to compute the size of the bounding box to not exceed it too much
        height = haversine((east + west) / 2, south, (east + west) / 2, north)
        width = haversine(east, (north + south) / 2, west, (north + south) / 2)
        area = height * width

        par = Parameters().parameters["osm"]
        max_query_area_size = par["max_query_area_size"]

        if area < max_query_area_size:
            polygons = [bbox]
        else:
            polygons = []
            parts = math.ceil(area / max_query_area_size)
            horizontal = math.ceil(math.sqrt(parts))
            vertical = math.ceil(parts / horizontal)
            dx = (east - west) / horizontal
            dy = (north - south) / vertical
            for i in range(horizontal):
                xmin = max(-180, west + i * dx)
                xmax = min(180, west + (i + 1) * dx)
                for j in range(vertical):
                    ymin = max(-90, south + j * dy)
                    ymax = min(90, south + (j + 1) * dy)
                    box = [xmin, ymin, xmax, ymax]
                    polygons.append(box)
        logger.info("Downloading data")
        self.downloader = OSMDownloader(polygons, modes)
        self.downloader.doWork()

        logger.info("Building Network")
        self.builder = OSMBuilder(self.downloader.json, self.source)
        self.builder.doWork()

        logger.info("Network built successfully")
def connector_creation(geo,
                       zone_id: int,
                       srid: int,
                       mode_id: str,
                       link_types="",
                       connectors=1):
    if len(mode_id) > 1:
        raise Exception(
            "We can only add centroid connectors for one mode at a time")

    conn = database_connection()
    curr = conn.cursor()

    curr.execute("select count(*) from nodes where node_id=?", [zone_id])
    if curr.fetchone() is None:
        warn("This centroid does not exist. Please create it first")
        return

    proj_nodes = network.Nodes()
    node = proj_nodes.get(zone_id)
    curr.execute(
        "select count(*) from links where a_node=? and instr(modes,?) > 0",
        [zone_id, mode_id])
    if curr.fetchone()[0] > 0:
        warn("Mode is already connected")
        return

    if len(link_types) > 0:
        lt = f"*[{link_types}]*"
    else:
        curr.execute("Select link_type_id from link_types")
        lt = "".join([x[0] for x in curr.fetchall()])
        lt = f"*[{lt}]*"

    sql = """select node_id, ST_asBinary(geometry), modes, link_types from nodes where ST_Within(geometry, GeomFromWKB(?, ?)) and
                    (nodes.rowid in (select rowid from SpatialIndex where f_table_name = 'nodes' and
                    search_frame = GeomFromWKB(?, ?)))
            and link_types glob ? and instr(modes, ?)>0"""

    # We expand the area by its average radius until it is 20 times
    # beginning with a strict search within the zone
    buffer = 0
    increase = sqrt(geo.area / pi)
    dt = []
    while dt == [] and buffer <= increase * 10:
        wkb = geo.buffer(buffer).wkb
        curr.execute(sql, [wkb, srid, wkb, srid, lt, mode_id])
        dt = curr.fetchall()
        buffer += increase

    if buffer > increase:
        msg = f"Could not find node inside zone {zone_id}. Search area was expanded until we found a suitable node"
        logger.warning(msg)
    if dt == []:
        warn(
            f"FAILED! Could not find suitable nodes to connect within 5 times the diameter of zone {zone_id}."
        )
        return

    coords = []
    nodes = []
    for node_id, wkb, modes, link_types in dt:
        geo = shapely.wkb.loads(wkb)
        coords.append([geo.x, geo.y])
        nodes.append(node_id)

    num_connectors = connectors
    if len(nodes) == 0:
        raise Exception(
            "We could not find any candidate nodes that satisfied your criteria"
        )
    elif len(nodes) < connectors:
        warn(
            f"We have fewer possible nodes than required connectors for zone {zone_id}. Will connect all of them."
        )
        num_connectors = len(nodes)

    if num_connectors == len(coords):
        all_nodes = nodes
    else:
        features = np.array(coords)
        whitened = whiten(features)
        centroids, allocation = kmeans2(whitened, num_connectors)

        all_nodes = set()
        for i in range(num_connectors):
            nds = [x for x, y in zip(nodes, list(allocation)) if y == i]
            centr = centroids[i]
            positions = [x for x, y in zip(whitened, allocation) if y == i]
            if positions:
                dist = cdist(np.array([centr]), np.array(positions)).flatten()
                node_to_connect = nds[dist.argmin()]
                all_nodes.add(node_to_connect)

    nds = list(all_nodes)
    data = [zone_id] + nds
    curr.execute(
        f'select b_node from links where a_node=? and b_node in ({",".join(["?"] * len(nds))})',
        data)

    data = [x[0] for x in curr.fetchall()]

    if data:
        qry = ",".join(["?"] * len(data))
        dt = [mode_id, zone_id] + data
        curr.execute(
            f"Update links set modes=modes || ? where a_node=? and b_node in ({qry})",
            dt)
        nds = [x for x in nds if x not in data]
        logger.warning(
            f"Mode {mode_id} added to {len(data)} existing centroid connectors for zone {zone_id}"
        )
        conn.commit()

    curr.close()
    links = network.Links()

    for node_to_connect in nds:
        link = links.new()
        node_to = proj_nodes.get(node_to_connect)
        link.geometry = LineString([node.geometry, node_to.geometry])
        link.modes = mode_id
        link.direction = 0
        link.link_type = "centroid_connector"
        link.name = f"centroid connector zone {zone_id}"
        link.capacity_ab = INFINITE_CAPACITY
        link.capacity_ba = INFINITE_CAPACITY
        link.save()
    if nds:
        logger.warning(
            f"{len(nds)} new centroid connectors for mode {mode_id} added for centroid {zone_id}"
        )

    conn.commit()