Exemple #1
0
    def renumber(self, new_id: int):
        """Renumbers the node in the network

        Raises a warning if another node already exists with this node_id

        Args:
            *new_id* (:obj:`int`): New node_id
        """

        new_id = int(new_id)
        if new_id == self.node_id:
            warn("This is already the node number")
            return

        conn = database_connection()
        curr = conn.cursor()

        curr.execute("BEGIN;")
        curr.execute("Update Nodes set node_id=? where node_id=?",
                     [new_id, self.node_id])
        curr.execute("Update Links set a_node=? where a_node=?",
                     [new_id, self.node_id])
        curr.execute("Update Links set b_node=? where b_node=?",
                     [new_id, self.node_id])
        curr.execute("COMMIT;")
        conn.close()
        logger.info(f"Node {self.node_id} was renumbered to {new_id}")
        self.__dict__["node_id"] = new_id
        self.__original__["node_id"] = new_id
Exemple #2
0
    def open(self, project_path: str) -> None:
        """
        Loads project from disk

        Args:
            *project_path* (:obj:`str`): Full path to the project data folder. If the project inside does
            not exist, it will fail.
        """

        if self.__other_project_still_open():
            raise Exception("You already have a project open. Close that project before opening another one")

        file_name = os.path.join(project_path, "project_database.sqlite")
        if not os.path.isfile(file_name):
            raise FileNotFoundError("Model does not exist. Check your path and try again")

        self.project_base_path = project_path
        self.path_to_file = file_name
        self.source = self.path_to_file
        os.environ[ENVIRON_VAR] = self.project_base_path
        self.conn = database_connection()

        self.__load_objects()
        self.__set_logging_path()
        logger.info(f"Opened project on {self.project_base_path}")
        self.logger = logger
        clean()
Exemple #3
0
    def data_structures(self):

        osmi = []
        logger.info("Consolidating geo elements")
        self.__emit_all(["text", "Consolidating geo elements"])
        self.__emit_all(["maxValue", len(self.osm_items)])

        for i, x in enumerate(self.osm_items):
            osmi.append(x["elements"])
            self.__emit_all(["Value", i])
        self.osm_items = sum(osmi, [])

        logger.info("Separating nodes and links")
        self.__emit_all(["text", "Separating nodes and links"])
        self.__emit_all(["maxValue", len(self.osm_items)])

        alinks = []
        n = []
        for i, x in enumerate(self.osm_items):
            if x["type"] == "way":
                alinks.append(x)
            elif x["type"] == "node":
                n.append(x)
            self.__emit_all(["Value", i])

        self.osm_items = None
        logger.info("Setting data structures for nodes")
        self.__emit_all(["text", "Setting data structures for nodes"])
        self.__emit_all(["maxValue", len(n)])

        for i, node in enumerate(n):
            nid = node.pop("id")
            _ = node.pop("type")
            self.nodes[nid] = node
            self.__emit_all(["Value", i])
        del n

        logger.info("Setting data structures for links")
        self.__emit_all(["text", "Setting data structures for links"])
        self.__emit_all(["maxValue", len(alinks)])

        all_nodes = []
        for i, link in enumerate(alinks):
            osm_id = link.pop("id")
            _ = link.pop("type")
            all_nodes.extend(link["nodes"])
            self.links[osm_id] = link
            self.__emit_all(["Value", i])
        del alinks

        logger.info("Finalizing data structures")
        self.__emit_all(["text", "Finalizing data structures"])

        node_count = self.unique_count(np.array(all_nodes))

        return node_count
Exemple #4
0
    def save(self) -> None:
        """Saves any field descriptions which my have been changed to the database"""

        qry = 'update attributes_documentation set description="{}" where attribute="{}" and name_table="{}"'
        for key, val in self._original_values.items():
            new_val = self.__dict__[key]
            if new_val != val:
                self.__run_query_commit(qry.format(new_val, key, self._table))
                logger.info(
                    f"Metadata for field {key} on table {self._table} was updated to {new_val}"
                )
Exemple #5
0
    def add(self, mode: Mode) -> None:
        """ We add a mode to the project"""
        self.__update_list_of_modes()
        if mode.mode_id in self.__all_modes:
            raise ValueError("Mode already exists in the model")

        self.curr.execute(
            "insert into 'modes'(mode_id, mode_name) Values(?,?)",
            [mode.mode_id, mode.mode_name])
        self.conn.commit()
        logger.info(
            f'mode {mode.mode_name}({mode.mode_id}) was added to the project')
        mode.save()
        self.__update_list_of_modes()
Exemple #6
0
 def close(self) -> None:
     """Safely closes the project"""
     if ENVIRON_VAR in os.environ:
         self.conn.commit()
         clean()
         self.conn.close()
         for obj in [self.parameters, self.network]:
             del obj
         del os.environ[ENVIRON_VAR]
         del self.network.link_types
         del self.network.modes
         logger.info(f"Closed project on {self.project_base_path}")
     else:
         warnings.warn("There is no Aequilibrae project open that you could close")
def run_queries_from_sql_file(conn: Connection, qry_file: str) -> None:
    curr = conn.cursor()

    with open(qry_file, "r") as sql_file:
        query_list = sql_file.read()

    # Running one query/command at a time helps debugging in the case a particular command fails
    for cmd in query_list.split("--#"):
        try:
            curr.execute(cmd)
        except Exception as e:
            msg = f"Error running SQL command: {e.args}"
            logger.error(msg)
            logger.info(cmd)
            raise e
Exemple #8
0
    def __add_trigger_from_file(self, qry_file: str):
        curr = self.conn.cursor()
        sql_file = open(qry_file, "r")
        query_list = sql_file.read()
        sql_file.close()

        # Run one query/command at a time
        for cmd in query_list.split("#"):
            try:
                curr.execute(cmd)
            except Exception as e:
                msg = f"Error creating trigger: {e.args}"
                logger.error(msg)
                logger.info(cmd)
        self.conn.commit()
Exemple #9
0
    def new(self, zone_id: int) -> Zone:
        """Creates a new zone

        Returns:
            *zone* (:obj:`Zone`): A new zone object populated only with zone_id (but not saved in the model yet)
            """

        if zone_id in self.__items:
            raise Exception(f"Zone ID {zone_id} already exists")

        data = {key: None for key in self.__fields}
        data["zone_id"] = zone_id

        logger.info(f"Zone with id {zone_id} was created")
        return self.__create_return_zone(data)
    def data_structures(self):
        logger.info("Separating nodes and links")
        self.__emit_all(["text", "Separating nodes and links"])
        self.__emit_all(["maxValue", len(self.osm_items)])

        alinks = []
        n = []
        tot_items = len(self.osm_items)
        # When downloading data for entire countries, memory consumption can be quite intensive
        # So we get rid of everything we don't need
        for i in range(tot_items, 0, -1):
            item = self.osm_items.pop(-1)
            if item['type'] == "way":
                alinks.append(item)
            elif item['type'] == "node":
                n.append(item)
            self.__emit_all(["Value", tot_items - i])
        gc.collect()

        logger.info("Setting data structures for nodes")
        self.__emit_all(["text", "Setting data structures for nodes"])
        self.__emit_all(["maxValue", len(n)])

        for i, node in enumerate(n):
            nid = node.pop("id")
            _ = node.pop("type")
            self.nodes[nid] = node
            self.__emit_all(["Value", i])
        del n

        logger.info("Setting data structures for links")
        self.__emit_all(["text", "Setting data structures for links"])
        self.__emit_all(["maxValue", len(alinks)])

        all_nodes = []
        for i, link in enumerate(alinks):
            osm_id = link.pop("id")
            _ = link.pop("type")
            all_nodes.extend(link["nodes"])
            self.links[osm_id] = link
            self.__emit_all(["Value", i])
        del alinks

        logger.info("Finalizing data structures")
        self.__emit_all(["text", "Finalizing data structures"])

        node_count = self.unique_count(np.array(all_nodes))

        return node_count
Exemple #11
0
    def new(self, project_path: str) -> None:
        """Creates a new project

        Args:
            *project_path* (:obj:`str`): Full path to the project data folder. If folder exists, it will fail
        """
        if self.__other_project_still_open():
            raise Exception("You already have a project open. Close that project before creating a new one")

        self.project_base_path = project_path
        self.path_to_file = os.path.join(self.project_base_path, "project_database.sqlite")
        self.source = self.path_to_file

        if os.path.isdir(project_path):
            raise FileNotFoundError("Location already exists. Choose a different name or remove the existing directory")
        os.environ[ENVIRON_VAR] = self.project_base_path

        self.__create_empty_project()
        self.__load_objects()
        self.about.create()
        self.__set_logging_path()
        self.logger = logger
        logger.info(f"Created project on {self.project_base_path}")
Exemple #12
0
    def import_nodes(self, nodes_to_add, node_ids):
        table = "nodes"
        fields = self.get_node_fields()
        field_names = ",".join(fields)
        field_names = ",".join(
            ['"{}"'.format(x) for x in field_names.split(",")])

        logger.info("Adding network nodes")
        self.__emit_all(["text", "Adding network nodes"])
        self.__emit_all(["maxValue", len(nodes_to_add)])

        vars = {}
        for counter, osm_id in enumerate(nodes_to_add):
            self.__emit_all(["Value", counter])
            vars["node_id"] = node_ids[osm_id]
            vars["osm_id"] = osm_id
            vars["is_centroid"] = 0
            geometry = "POINT({} {})".format(self.nodes[osm_id]["lon"],
                                             self.nodes[osm_id]["lat"])

            attributes = [vars.get(x) for x in fields]
            attributes = ", ".join([str(x) for x in attributes])
            sql = self.insert_qry.format(table, field_names, attributes,
                                         geometry)
            sql = sql.replace("None", "null")

            try:
                self.curr.execute(sql)
            except Exception as e:
                data = list(vars.values())
                logger.error("error when inserting NODE {}. Error {}".format(
                    data, e.args))
                logger.error(sql)

        self.conn.commit()
        self.curr.close()
        self.__emit_all(["finished_threaded_procedure", 0])
Exemple #13
0
    def transfer_layer_features(self, table, layer, layer_fields):
        self.emit_messages(message=f"Transferring features from {table} layer",
                           value=0,
                           max_val=layer.featureCount())
        curr = self.project.conn.cursor()

        field_titles = ", ".join(list(layer_fields.keys()))
        all_modes = set()
        for j, f in enumerate(layer.getFeatures()):
            self.emit_messages(value=j)
            attrs = []
            for k, val in layer_fields.items():
                if val < 0:
                    attrs.append("NULL")
                else:
                    attr_val = self.convert_data(f.attributes()[val])
                    if not str(attr_val).isnumeric():
                        attrs.append(f'"{attr_val}"')
                    else:
                        attrs.append(attr_val)

            attrs = ", ".join(attrs)
            geom = f.geometry().asWkt().upper()
            crs = str(layer.crs().authid().split(":")[1])

            sql = f"INSERT INTO {table} ({field_titles} , geometry)  VALUES ({attrs} , GeomFromText('{geom}', {crs}))"

            if table == 'links':
                all_modes.update(list(f.attributes()[layer_fields['modes']]))
            try:
                curr.execute(sql)
            except:
                logger.info(f'Failed inserting link {f.id()}')
                logger.info(sql)
                if f.id():
                    msg = f"feature with id {f.id()} could not be added to layer {table}"
                else:
                    msg = f"feature with no node id present. It could not be added to layer {table}"
                self.report.append(msg)

        # We check if all modes exist
        a = self.project.network.modes()
        for x in all_modes:
            if x not in a:
                par = [
                    f'"automatic_{x}"', f'"{x}"',
                    '"Mode automatically added during project creation from layers"'
                ]
                curr.execute(
                    f'INSERT INTO "modes" (mode_name, mode_id, description) VALUES({",".join(par)})'
                )
                logger.info(f'New mode inserted during project creation {x}')
        self.project.conn.commit()
        curr.close()
Exemple #14
0
 def __add_mode_triggers(self) -> None:
     logger.info("Adding mode table triggers")
     pth = os.path.dirname(os.path.realpath(__file__))
     qry_file = os.path.join(pth, "database_triggers", "modes_table_triggers.sql")
     self.__add_trigger_from_file(qry_file)
Exemple #15
0
    def execute(self):
        for c in self.traffic_classes:
            c.graph.set_graph(self.time_field)

        logger.info(f"{self.algorithm} Assignment STATS")
        logger.info("Iteration, RelativeGap, stepsize")
        for self.iter in range(1, self.max_iter + 1):
            self.iteration_issue = []
            if pyqt:
                self.equilibration.emit(["rgap", self.rgap])
                self.equilibration.emit(["iterations", self.iter])

            aon_flows = []
            for c in self.traffic_classes:
                aon = allOrNothing(c.matrix, c.graph, c._aon_results)
                if pyqt:
                    aon.assignment.connect(self.signal_handler)
                aon.execute()
                aon_flows.append(c._aon_results.total_link_loads * c.pce)
            self.aon_total_flow = np.sum(aon_flows, axis=0)

            flows = []
            if self.iter == 1:
                for c in self.traffic_classes:
                    copy_two_dimensions(c.results.link_loads,
                                        c._aon_results.link_loads, self.cores)
                    copy_one_dimension(c.results.total_link_loads,
                                       c._aon_results.total_link_loads,
                                       self.cores)
                    if c.results.num_skims > 0:
                        copy_three_dimensions(c.results.skims.matrix_view,
                                              c._aon_results.skims.matrix_view,
                                              self.cores)
                    flows.append(c.results.total_link_loads * c.pce)
            else:
                self.__calculate_step_direction()
                self.calculate_stepsize()
                for c in self.traffic_classes:
                    stp_dir = self.step_direction[c.mode]
                    cls_res = c.results
                    linear_combination(cls_res.link_loads, stp_dir.link_loads,
                                       cls_res.link_loads, self.stepsize,
                                       self.cores)
                    if cls_res.num_skims > 0:
                        linear_combination_skims(
                            cls_res.skims.matrix_view,
                            stp_dir.skims.matrix_view,
                            cls_res.skims.matrix_view,
                            self.stepsize,
                            self.cores,
                        )
                    cls_res.total_flows()
                    flows.append(cls_res.total_link_loads * c.pce)

            self.fw_total_flow = np.sum(flows, axis=0)

            # Check convergence
            # This needs to be done with the current costs, and not the future ones
            converged = False
            if self.iter > 1:
                converged = self.check_convergence()

            self.convergence_report["iteration"].append(self.iter)
            self.convergence_report["rgap"].append(self.rgap)
            self.convergence_report["warnings"].append("; ".join(
                self.iteration_issue))
            self.convergence_report["alpha"].append(self.stepsize)

            if self.algorithm == "bfw":
                self.convergence_report["beta0"].append(self.betas[0])
                self.convergence_report["beta1"].append(self.betas[1])
                self.convergence_report["beta2"].append(self.betas[2])

            logger.info(f"{self.iter},{self.rgap},{self.stepsize}")
            if converged:
                if self.steps_below >= self.steps_below_needed_to_terminate:
                    break
                else:
                    self.steps_below += 1

            self.vdf.apply_vdf(
                self.congested_time,
                self.fw_total_flow,
                self.capacity,
                self.free_flow_tt,
                *self.vdf_parameters,
                self.cores,
            )

            for c in self.traffic_classes:
                aggregate_link_costs(self.congested_time, c.graph.compact_cost,
                                     c.results.crosswalk)
                if self.time_field in c.graph.skim_fields:
                    idx = c.graph.skim_fields.index(self.time_field)
                    c.graph.skims[:, idx] = self.congested_time[:]
                c._aon_results.reset()

        if self.rgap > self.rgap_target:
            logger.error(f"Desired RGap of {self.rgap_target} was NOT reached")
        logger.info(
            f"{self.algorithm} Assignment finished. {self.iter} iterations and {self.rgap} final gap"
        )
        if pyqt:
            self.equilibration.emit(["rgap", self.rgap])
            self.equilibration.emit(["iterations", self.iter])
            self.equilibration.emit(["finished_threaded_procedure"])
    def execute(self):
        # We build the fixed cost field
        for c in self.traffic_classes:
            if c.fixed_cost_field:
                # divide fixed cost by volume-dependent prefactor (vot) such that we don't have to do it for
                # each occurence in the objective funtion. TODO: Need to think about cost skims here, we do
                # not want this there I think
                c.fixed_cost[c.graph.graph.__supernet_id__] = (
                    c.graph.graph[c.fixed_cost_field].values[:] *
                    c.fc_multiplier / c.vot)
                c.fixed_cost[np.isnan(c.fixed_cost)] = 0

        # TODO: Review how to eliminate this. It looks unnecessary
        # Just need to create some arrays for cost
        for c in self.traffic_classes:
            c.graph.set_graph(self.time_field)

        logger.info(f"{self.algorithm} Assignment STATS")
        logger.info("Iteration, RelativeGap, stepsize")
        for self.iter in range(1, self.max_iter + 1):
            self.iteration_issue = []
            if pyqt:
                self.equilibration.emit(["rgap", self.rgap])
                self.equilibration.emit(["iterations", self.iter])

            aon_flows = []

            self.__maybe_create_path_file_directories()

            for c in self.traffic_classes:  # type: TrafficClass
                # cost = c.fixed_cost / c.vot + self.congested_time #  now only once
                cost = c.fixed_cost + self.congested_time
                aggregate_link_costs(cost, c.graph.compact_cost,
                                     c.results.crosswalk)

                aon = allOrNothing(c.matrix, c.graph, c._aon_results)
                if pyqt:
                    aon.assignment.connect(self.signal_handler)
                aon.execute()
                c._aon_results.link_loads *= c.pce
                c._aon_results.total_flows()
                aon_flows.append(c._aon_results.total_link_loads)

            self.aon_total_flow = np.sum(aon_flows, axis=0)

            flows = []
            if self.iter == 1:
                for c in self.traffic_classes:
                    copy_two_dimensions(c.results.link_loads,
                                        c._aon_results.link_loads, self.cores)
                    c.results.total_flows()
                    if c.results.num_skims > 0:
                        copy_three_dimensions(c.results.skims.matrix_view,
                                              c._aon_results.skims.matrix_view,
                                              self.cores)
                    flows.append(c.results.total_link_loads)

                if self.algorithm == "all-or-nothing":
                    break

            else:
                self.__calculate_step_direction()
                self.calculate_stepsize()
                for c in self.traffic_classes:
                    stp_dir = self.step_direction[c.__id__]
                    cls_res = c.results
                    linear_combination(cls_res.link_loads, stp_dir.link_loads,
                                       cls_res.link_loads, self.stepsize,
                                       self.cores)

                    if cls_res.num_skims > 0:
                        linear_combination_skims(
                            cls_res.skims.matrix_view,
                            stp_dir.skims.matrix_view,
                            cls_res.skims.matrix_view,
                            self.stepsize,
                            self.cores,
                        )
                    cls_res.total_flows()
                    flows.append(cls_res.total_link_loads)
            self.fw_total_flow = np.sum(flows, axis=0)

            # Check convergence
            # This needs to be done with the current costs, and not the future ones
            converged = self.check_convergence() if self.iter > 1 else False

            self.vdf.apply_vdf(
                self.congested_time,
                self.fw_total_flow,
                self.capacity,
                self.free_flow_tt,
                *self.vdf_parameters,
                self.cores,
            )

            self.convergence_report["iteration"].append(self.iter)
            self.convergence_report["rgap"].append(self.rgap)
            self.convergence_report["warnings"].append("; ".join(
                self.iteration_issue))
            self.convergence_report["alpha"].append(self.stepsize)

            if self.algorithm in ["cfw", "bfw"]:
                self.convergence_report["beta0"].append(self.betas[0])
                self.convergence_report["beta1"].append(self.betas[1])
                self.convergence_report["beta2"].append(self.betas[2])

            for c in self.traffic_classes:
                c._aon_results.reset()
                if self.time_field not in c.graph.skim_fields:
                    continue
                idx = c.graph.skim_fields.index(self.time_field)
                c.graph.skims[:, idx] = self.congested_time[:]

            logger.info(f"{self.iter},{self.rgap},{self.stepsize}")
            if converged:
                self.steps_below += 1
                if self.steps_below >= self.steps_below_needed_to_terminate:
                    break
            else:
                self.steps_below = 0

        for c in self.traffic_classes:
            c.results.link_loads /= c.pce
            c.results.total_flows()

        # TODO (Jan 18/4/21): Do we want to blob store path files (by iteration, class, origin, destination) in sqlite?
        # or do we just use one big hdf5 file?

        if (self.rgap > self.rgap_target) and (self.algorithm !=
                                               "all-or-nothing"):
            logger.error(f"Desired RGap of {self.rgap_target} was NOT reached")
        logger.info(
            f"{self.algorithm} Assignment finished. {self.iter} iterations and {self.rgap} final gap"
        )
        if pyqt:
            self.equilibration.emit(["rgap", self.rgap])
            self.equilibration.emit(["iterations", self.iter])
            self.equilibration.emit(["finished_threaded_procedure"])
Exemple #17
0
    def create_from_osm(
        self,
        west: float = None,
        south: float = None,
        east: float = None,
        north: float = None,
        place_name: str = None,
        modes=["car", "transit", "bicycle", "walk"],
    ) -> None:
        """
        Downloads the network from Open-Street Maps

        Args:
            *west* (:obj:`float`, Optional): West most coordinate of the download bounding box

            *south* (:obj:`float`, Optional): South most coordinate of the download bounding box

            *east* (:obj:`float`, Optional): East most coordinate of the download bounding box

            *place_name* (:obj:`str`, Optional): If not downloading with East-West-North-South boundingbox, this is
            required

            *modes* (:obj:`list`, Optional): List of all modes to be downloaded. Defaults to the modes in the parameter
            file

            p = Project()
            p.new(nm)

        ::

            from aequilibrae import Project, Parameters
            p = Project()
            p.new('path/to/project')

            # We now choose a different overpass endpoint (say a deployment in your local network)
            par = Parameters()
            par.parameters['osm']['overpass_endpoint'] = "http://192.168.1.234:5678/api"

            # Because we have our own server, we can set a bigger area for download (in M2)
            par.parameters['osm']['max_query_area_size'] = 10000000000

            # And have no pause between successive queries
            par.parameters['osm']['sleeptime'] = 0

            # Save the parameters to disk
            par.write_back()

            # And do the import
            p.network.create_from_osm(place_name=my_beautiful_hometown)
            p.close()
        """

        if self.count_links() > 0:
            raise FileExistsError(
                "You can only import an OSM network into a brand new model file"
            )

        curr = self.conn.cursor()
        curr.execute("""ALTER TABLE links ADD COLUMN osm_id integer""")
        curr.execute("""ALTER TABLE nodes ADD COLUMN osm_id integer""")
        self.conn.commit()

        if isinstance(modes, (tuple, list)):
            modes = list(modes)
        elif isinstance(modes, str):
            modes = [modes]
        else:
            raise ValueError(
                "'modes' needs to be string or list/tuple of string")

        if place_name is None:
            if min(east, west) < -180 or max(east, west) > 180 or min(
                    north, south) < -90 or max(north, south) > 90:
                raise ValueError("Coordinates out of bounds")
            bbox = [west, south, east, north]
        else:
            bbox, report = placegetter(place_name)
            west, south, east, north = bbox
            if bbox is None:
                msg = f'We could not find a reference for place name "{place_name}"'
                warn(msg)
                logger.warning(msg)
                return
            for i in report:
                if "PLACE FOUND" in i:
                    logger.info(i)

        # Need to compute the size of the bounding box to not exceed it too much
        height = haversine((east + west) / 2, south, (east + west) / 2, north)
        width = haversine(east, (north + south) / 2, west, (north + south) / 2)
        area = height * width

        par = Parameters().parameters["osm"]
        max_query_area_size = par["max_query_area_size"]

        if area < max_query_area_size:
            polygons = [bbox]
        else:
            polygons = []
            parts = math.ceil(area / max_query_area_size)
            horizontal = math.ceil(math.sqrt(parts))
            vertical = math.ceil(parts / horizontal)
            dx = (east - west) / horizontal
            dy = (north - south) / vertical
            for i in range(horizontal):
                xmin = max(-180, west + i * dx)
                xmax = min(180, west + (i + 1) * dx)
                for j in range(vertical):
                    ymin = max(-90, south + j * dy)
                    ymax = min(90, south + (j + 1) * dy)
                    box = [xmin, ymin, xmax, ymax]
                    polygons.append(box)
        logger.info("Downloading data")
        self.downloader = OSMDownloader(polygons, modes)
        self.downloader.doWork()

        logger.info("Building Network")
        self.builder = OSMBuilder(self.downloader.json, self.source)
        self.builder.doWork()

        logger.info("Network built successfully")
Exemple #18
0
    def create_from_osm(
            self,
            west: float = None,
            south: float = None,
            east: float = None,
            north: float = None,
            place_name: str = None,
            modes=["car", "transit", "bicycle", "walk"],
            spatial_index=False,
    ) -> None:

        if self._check_if_exists():
            raise FileExistsError("You can only import an OSM network into a brand new model file")

        self.create_empty_tables()

        curr = self.conn.cursor()
        curr.execute("""ALTER TABLE links ADD COLUMN osm_id integer""")
        curr.execute("""ALTER TABLE nodes ADD COLUMN osm_id integer""")
        self.conn.commit()

        if isinstance(modes, (tuple, list)):
            modes = list(modes)
        elif isinstance(modes, str):
            modes = [modes]
        else:
            raise ValueError("'modes' needs to be string or list/tuple of string")

        if place_name is None:
            if min(east, west) < -180 or max(east, west) > 180 or min(north, south) < -90 or max(north, south) > 90:
                raise ValueError("Coordinates out of bounds")
            bbox = [west, south, east, north]
        else:
            bbox, report = placegetter(place_name)
            west, south, east, north = bbox
            if bbox is None:
                msg = f'We could not find a reference for place name "{place_name}"'
                warn(msg)
                logger.warn(msg)
                return
            for i in report:
                if "PLACE FOUND" in i:
                    logger.info(i)

        # Need to compute the size of the bounding box to not exceed it too much
        height = haversine((east + west) / 2, south, (east + west) / 2, north)
        width = haversine(east, (north + south) / 2, west, (north + south) / 2)
        area = height * width

        if area < max_query_area_size:
            polygons = [bbox]
        else:
            polygons = []
            parts = math.ceil(area / max_query_area_size)
            horizontal = math.ceil(math.sqrt(parts))
            vertical = math.ceil(parts / horizontal)
            dx = east - west
            dy = north - south
            for i in range(horizontal):
                xmin = max(-180, west + i * dx)
                xmax = min(180, west + (i + 1) * dx)
                for j in range(vertical):
                    ymin = max(-90, south + j * dy)
                    ymax = min(90, south + (j + 1) * dy)
                    box = [xmin, ymin, xmax, ymax]
                    polygons.append(box)

        logger.info("Downloading data")
        self.downloader = OSMDownloader(polygons, modes)
        self.downloader.doWork()

        logger.info("Building Network")
        self.builder = OSMBuilder(self.downloader.json, self.conn)
        self.builder.doWork()

        if spatial_index:
            logger.info("Adding spatial indices")
            self.add_spatial_index()

        self.add_triggers()
        logger.info("Network built successfully")
    def importing_links(self, node_count):
        node_ids = {}

        vars = {}
        vars["link_id"] = 1
        table = "links"
        fields = self.get_link_fields()
        self.__update_table_structure()
        field_names = ",".join(fields)

        logger.info("Adding network links")
        self.__emit_all(["text", "Adding network links"])
        L = len(list(self.links.keys()))
        self.__emit_all(["maxValue", L])

        counter = 0
        mode_codes, not_found_tags = self.modes_per_link_type()
        owf, twf = self.field_osm_source()

        for osm_id, link in self.links.items():
            self.__emit_all(["Value", counter])
            counter += 1
            if counter % 1000 == 0:
                logger.info(
                    f'Inserting segments from {counter:,} out of {L:,} OSM link objects'
                )
            vars["osm_id"] = osm_id
            vars['link_type'] = 'default'
            linknodes = link["nodes"]
            linktags = link["tags"]

            indices = np.searchsorted(node_count[:, 0], linknodes)
            nodedegree = node_count[indices, 1]

            # Makes sure that beginning and end are end nodes for a link
            nodedegree[0] = 2
            nodedegree[-1] = 2

            intersections = np.where(nodedegree > 1)[0]
            segments = intersections.shape[0] - 1

            # Attributes that are common to all individual links/segments
            vars["direction"] = (linktags.get("oneway") == "yes") * 1

            for k, v in owf.items():
                vars[k] = linktags.get(v)

            for k, v in twf.items():
                val = linktags.get(v["osm_source"])
                if vars["direction"] == 0:
                    for d1, d2 in [("ab", "forward"), ("ba", "backward")]:
                        vars[f"{k}_{d1}"] = self.__get_link_property(
                            d2, val, linktags, v)
                elif vars["direction"] == -1:
                    vars[f"{k}_ba"] = linktags.get(
                        f"{v['osm_source']}:{'backward'}", val)
                elif vars["direction"] == 1:
                    vars[f"{k}_ab"] = linktags.get(
                        f"{v['osm_source']}:{'forward'}", val)

            vars["modes"] = mode_codes.get(linktags.get("highway"),
                                           not_found_tags)

            vars['link_type'] = self.__link_type_quick_reference.get(
                vars['link_type'].lower(),
                self.__repair_link_type(vars['link_type']))

            if len(vars["modes"]) > 0:
                for i in range(segments):
                    attributes = self.__build_link_data(
                        vars, intersections, i, linknodes, node_ids, fields)
                    sql = self.insert_qry.format(
                        table, field_names,
                        ','.join(['?'] * (len(attributes) - 1)))
                    try:
                        self.curr.execute(sql, attributes)
                        self.curr.execute(
                            'Select a_node, b_node from links where link_id=?',
                            [vars["link_id"]])
                        a, b = self.curr.fetchone()
                        self.curr.executemany(
                            'update nodes set osm_id=? where node_id=?',
                            [[linknodes[intersections[i]], a],
                             [linknodes[intersections[i + 1]], b]])
                    except Exception as e:
                        data = list(vars.values())
                        logger.error(
                            "error when inserting link {}. Error {}".format(
                                data, e.args))
                        logger.error(sql)
                    vars["link_id"] += 1
                self.conn.commit()
            self.__emit_all(
                ["text", f"{counter:,} of {L:,} super links added"])
            self.links[osm_id] = []
        self.conn.commit()
        self.curr.close()
# %%

proj_matrices.list()

# %% md

## Future traffic assignment

# %%

from aequilibrae.paths import TrafficAssignment, TrafficClass
from aequilibrae import logger

# %%

logger.info("\n\n\n TRAFFIC ASSIGNMENT FOR FUTURE YEAR")

# %%

demand = proj_matrices.get_matrix("demand_power_modeled")

# let's see what is the core we ended up getting. It should be 'gravity'
demand.names

# %%

# Let's use the IPF matrix
demand.computational_view("gravity")

assig = TrafficAssignment()
Exemple #21
0
    def overpass_request(self,
                         data,
                         pause_duration=None,
                         timeout=180,
                         error_pause_duration=None):
        """
        Send a request to the Overpass API via HTTP POST and return the JSON
        response.

        Parameters
        ----------
        data : dict or OrderedDict
            key-value pairs of parameters to post to the API
        pause_duration : int
            how long to pause in seconds before requests, if None, will query API
            status endpoint to find when next slot is available
        timeout : int
            the timeout interval for the requests library
        error_pause_duration : int
            how long to pause in seconds before re-trying requests if error

        Returns
        -------
        dict
        """

        # define the Overpass API URL, then construct a GET-style URL as a string to
        url = overpass_endpoint.rstrip("/") + "/interpreter"
        if pause_duration is None:
            time.sleep(5)
        start_time = time.time()
        self.report.append(
            f'Posting to {url} with timeout={timeout}, "{data}"')
        response = requests.post(url,
                                 data=data,
                                 timeout=timeout,
                                 headers=http_headers)

        # get the response size and the domain, log result
        size_kb = len(response.content) / 1000.0
        domain = re.findall(r"(?s)//(.*?)/", url)[0]
        msg = "Downloaded {:,.1f}KB from {} in {:,.2f} seconds".format(
            size_kb, domain,
            time.time() - start_time)
        self.report.append(msg)
        logger.info(msg)

        try:
            response_json = response.json()
            if "remark" in response_json:
                msg = f'Server remark: "{response_json["remark"]}"'
                self.report.append(msg)
                logger.info(msg)
        except Exception:
            # 429 is 'too many requests' and 504 is 'gateway timeout' from server
            # overload - handle these errors by recursively calling
            # overpass_request until we get a valid response
            if response.status_code in [429, 504]:
                # pause for error_pause_duration seconds before re-trying request
                if error_pause_duration is None:
                    error_pause_duration = 5
                msg = "Server at {} returned status code {} and no JSON data. Re-trying request in {:.2f} seconds.".format(
                    domain, response.status_code, error_pause_duration)
                self.report.append(msg)
                logger.info(msg)
                time.sleep(error_pause_duration)
                response_json = self.overpass_request(
                    data=data, pause_duration=pause_duration, timeout=timeout)

            # else, this was an unhandled status_code, throw an exception
            else:
                self.report.append(
                    f"Server at {domain} returned status code {response.status_code} and no JSON data"
                )
                raise Exception(
                    f"Server returned no JSON data.\n{response} {response.reason}\n{response.text}"
                )

        return response_json
Exemple #22
0
    def execute(self):

        for c in self.traffic_classes:
            c.graph.set_graph(self.time_field)

        logger.info("{} Assignment STATS".format(self.algorithm))
        logger.info("Iteration, RelativeGap, stepsize")
        for self.iter in range(1, self.max_iter + 1):
            flows = []
            aon_flows = []

            for c in self.traffic_classes:
                aon = allOrNothing(c.matrix, c.graph, c._aon_results)
                aon.execute()
                c._aon_results.total_flows()
                aon_flows.append(c._aon_results.total_link_loads * c.pce)
            self.aon_total_flow = np.sum(aon_flows, axis=0)

            if self.iter == 1:
                for c in self.traffic_classes:
                    copy_two_dimensions(c.results.link_loads, c._aon_results.link_loads, self.cores)
                    c.results.total_flows()
                    copy_one_dimension(c.results.total_link_loads, c._aon_results.total_link_loads, self.cores)
                    if c.results.num_skims > 0:
                        copy_three_dimensions(c.results.skims.matrix_view, c._aon_results.skims.matrix_view, self.cores)
                    flows.append(c.results.total_link_loads * c.pce)
            else:
                self.__calculate_step_direction()
                self.calculate_stepsize()
                for c in self.traffic_classes:
                    stp_dir = self.step_direction[c.mode]
                    cls_res = c.results
                    linear_combination(cls_res.link_loads, stp_dir.link_loads, cls_res.link_loads, self.stepsize,
                                       self.cores)
                    # TODO: We need to compute the step direction for skims as well.
                    #       It is probably a matter of transforming the step_direction values from numpy arrays to
                    #       full AssignmentResults() ones, and cleaning the stuff we don't need
                    if cls_res.num_skims > 0:
                        linear_combination_skims(cls_res.skims.matrix_view,
                                                 stp_dir.skims.matrix_view,
                                                 cls_res.skims.matrix_view,
                                                 self.stepsize,
                                                 self.cores)
                    cls_res.total_flows()
                    flows.append(cls_res.total_link_loads * c.pce)

            self.fw_total_flow = np.sum(flows, axis=0)

            # Check convergence
            # This needs ot be done with the current costs, and not the future ones
            if self.iter > 1:
                if self.check_convergence():
                    if self.steps_below >= self.steps_below_needed_to_terminate:
                        break
                    else:
                        self.steps_below += 1

            self.vdf.apply_vdf(
                self.congested_time, self.fw_total_flow, self.capacity, self.free_flow_tt, *self.vdf_parameters
            )

            for c in self.traffic_classes:
                c.graph.cost = self.congested_time
                c._aon_results.reset()
            logger.info("{},{},{}".format(self.iter, self.rgap, self.stepsize))

        if self.rgap > self.rgap_target:
            logger.error("Desired RGap of {} was NOT reached".format(self.rgap_target))
        logger.info(
            "{} Assignment finished. {} iterations and {} final gap".format(self.algorithm, self.iter, self.rgap)
        )
Exemple #23
0
    def execute(self):
        for c in self.traffic_classes:
            c.graph.set_graph(self.time_field)

        logger.info("{} Assignment STATS".format(self.algorithm))
        logger.info("Iteration, RelativeGap, stepsize")
        for self.iter in range(1, self.max_iter + 1):
            if pyqt:
                self.equilibration.emit(['rgap', self.rgap])
                self.equilibration.emit(['iterations', self.iter])
            flows = []
            aon_flows = []

            for c in self.traffic_classes:
                aon = allOrNothing(c.matrix, c.graph, c._aon_results)
                if pyqt:
                    aon.assignment.connect(self.signal_handler)
                aon.execute()
                c._aon_results.total_flows()
                aon_flows.append(c._aon_results.total_link_loads * c.pce)
            self.aon_total_flow = np.sum(aon_flows, axis=0)

            if self.iter == 1:
                for c in self.traffic_classes:
                    copy_two_dimensions(c.results.link_loads,
                                        c._aon_results.link_loads, self.cores)
                    c.results.total_flows()
                    copy_one_dimension(c.results.total_link_loads,
                                       c._aon_results.total_link_loads,
                                       self.cores)
                    if c.results.num_skims > 0:
                        copy_three_dimensions(c.results.skims.matrix_view,
                                              c._aon_results.skims.matrix_view,
                                              self.cores)
                    flows.append(c.results.total_link_loads * c.pce)
            else:
                self.__calculate_step_direction()
                self.calculate_stepsize()
                for c in self.traffic_classes:
                    stp_dir = self.step_direction[c.mode]
                    cls_res = c.results
                    linear_combination(cls_res.link_loads, stp_dir.link_loads,
                                       cls_res.link_loads, self.stepsize,
                                       self.cores)
                    if cls_res.num_skims > 0:
                        linear_combination_skims(cls_res.skims.matrix_view,
                                                 stp_dir.skims.matrix_view,
                                                 cls_res.skims.matrix_view,
                                                 self.stepsize, self.cores)
                    cls_res.total_flows()
                    flows.append(cls_res.total_link_loads * c.pce)

            self.fw_total_flow = np.sum(flows, axis=0)

            # Check convergence
            # This needs ot be done with the current costs, and not the future ones
            if self.iter > 1:
                if self.check_convergence():
                    if self.steps_below >= self.steps_below_needed_to_terminate:
                        break
                    else:
                        self.steps_below += 1

            self.vdf.apply_vdf(self.congested_time, self.fw_total_flow,
                               self.capacity, self.free_flow_tt,
                               *self.vdf_parameters)

            for c in self.traffic_classes:
                c.graph.cost = self.congested_time
                if self.time_field in c.graph.skim_fields:
                    idx = c.graph.skim_fields.index(self.time_field)
                    c.graph.skims[:, idx] = self.congested_time[:]
                c._aon_results.reset()
            logger.info("{},{},{}".format(self.iter, self.rgap, self.stepsize))

        if self.rgap > self.rgap_target:
            logger.error("Desired RGap of {} was NOT reached".format(
                self.rgap_target))
        logger.info(
            f"{self.algorithm} Assignment finished. {self.iter} iterations and {self.rgap} final gap"
        )
        if pyqt:
            self.equilibration.emit(['rgap', self.rgap])
            self.equilibration.emit(['iterations', self.iter])
            self.equilibration.emit(['finished_threaded_procedure'])
Exemple #24
0
    def importing_links(self, node_count):
        node_ids = {}

        vars = {}
        vars["link_id"] = 1
        table = "links"
        fields = self.get_link_fields()
        field_names = ",".join(fields)
        fn = ",".join(['"{}"'.format(x) for x in field_names.split(",")])

        logger.info("Adding network links")
        self.__emit_all(["text", "Adding network links"])
        L = len(list(self.links.keys()))
        self.__emit_all(["maxValue", L])

        nodes_to_add = set()
        counter = 0
        mode_codes, not_found_tags = self.modes_per_link_type()

        for osm_id, link in self.links.items():
            self.__emit_all(["Value", counter])
            counter += 1
            vars["osm_id"] = osm_id
            linknodes = link["nodes"]
            linktags = link["tags"]

            indices = np.searchsorted(node_count[:, 0], linknodes)
            nodedegree = node_count[indices, 1]

            # Makes sure that beginning and end are end nodes for a link
            nodedegree[0] = 2
            nodedegree[-1] = 2

            intersections = np.where(nodedegree > 1)[0]
            segments = intersections.shape[0] - 1

            owf, twf = self.field_osm_source()

            # Attributes that are common to all individual links/segments
            vars["direction"] = (linktags.get("oneway") == "yes") * 1

            for k, v in owf.items():
                attr_value = linktags.get(v)
                if isinstance(attr_value, str):
                    attr_value = attr_value.replace('"', "'")
                    attr_value = '"{}"'.format(attr_value)

                vars[k] = attr_value

            for k, v in twf.items():
                val = linktags.get(v["osm_source"])
                for d1, d2 in [("ab", "forward"), ("ba", "backward")]:
                    vars["{}_{}".format(k, d1)] = self.__get_link_property(
                        d2, val, linktags, v)

            vars["modes"] = mode_codes.get(linktags.get("highway"),
                                           not_found_tags)

            if len(vars["modes"]) > 0:
                for i in range(segments):
                    geometry, attributes = self.__build_link_data(
                        vars, intersections, i, linknodes, node_ids, fields)
                    sql = self.insert_qry.format(table, fn, attributes,
                                                 geometry)
                    sql = sql.replace("None", "null")
                    try:
                        self.curr.execute(sql)
                        nodes_to_add.update([
                            linknodes[intersections[i]],
                            linknodes[intersections[i + 1]]
                        ])
                    except Exception as e:
                        data = list(vars.values())
                        logger.error(
                            "error when inserting link {}. Error {}".format(
                                data, e.args))
                        logger.error(sql)
                    vars["link_id"] += 1
            self.__emit_all(
                ["text", f"{counter:,} of {L:,} super links added"])

        return nodes_to_add, node_ids