def new_record(self, name: str, file_name: str, matrix=AequilibraeMatrix()) -> MatrixRecord: """Creates a new record for a matrix in disk, but does not save it If the matrix file is not already on disk, it will fail Args: *name* (:obj:`str`): Name of the matrix *file_name* (:obj:`str`): Name of the file on disk Return: *matrix_record* (:obj:`MatrixRecord`): A matrix record that can be manipulated in memory before saving """ if name in self.__items: raise ValueError( f"There is already a matrix of name ({name}). It must be unique." ) for mat in self.__items.values(): if mat.file_name == file_name: raise ValueError( f"There is already a matrix record for file name ({file_name}). It must be unique." ) if matrix.cores > 0: if isfile(join(self.fldr, file_name)): raise FileExistsError( f"{file_name} already exists. Choose a different name or matrix format" ) mat_format = file_name.split(".")[-1].lower() if mat_format not in ["omx", "aem"]: raise ValueError( "Matrix needs to be either OMX or native AequilibraE") matrix.export(join(self.fldr, file_name)) cores = matrix.cores else: if not isfile(join(self.fldr, file_name)): raise FileExistsError( f"{file_name} does not exist. Cannot create this matrix record" ) mat = AequilibraeMatrix() mat.load(join(self.fldr, file_name)) cores = mat.cores mat.close() del mat tp = {key: None for key in self.__fields} tp["name"] = name tp["file_name"] = file_name tp["cores"] = cores mr = MatrixRecord(tp) mr.save() self.__items[name.lower()] = mr logger.warning("Matrix Record has been saved to the database") return mr
def clear_database(self) -> None: """Removes records from the matrices database that do not exist in disk""" self.curr.execute("Select name, file_name from matrices;") remove = [ nm for nm, file in self.curr.fetchall() if not isfile(join(self.fldr, file)) ] if remove: logger.warning( f'Matrix records not found in disk cleaned from database: {",".join(remove)}' ) remove = [[x] for x in remove] self.curr.executemany("DELETE from matrices where name=?;", remove) self.conn.commit()
def set_fixed_cost(self, field_name: str, multiplier=1): """Sets value of time Args: field_name (:obj:`str`): Name of the graph field with fixed costs for this class multiplier (:obj:`Union[float, int]`): Multiplier for the fixed cost. Defaults to 1 if not set """ self.fc_multiplier = float(multiplier) if field_name not in self.graph.graph.columns: raise ValueError('Field does not exist in the graph') self.fixed_cost_field = field_name if np.any(np.isnan(self.graph.graph[field_name].values)): logger.warning( f'Cost field {field_name} has NaN values. Converted to zero') if self.graph.graph[field_name].min() < 0: msg = f'Cost field {field_name} has negative values. That is not allowed' logger.error(msg) raise ValueError(msg)
def update_database(self) -> None: """Adds records to the matrices database for matrix files found on disk""" existing_files = os.listdir(self.fldr) paths_for_existing = [mat.file_name for mat in self.__items.values()] new_files = [x for x in existing_files if x not in paths_for_existing] new_files = [ x for x in new_files if os.path.splitext(x.lower())[1] in [".omx", ".aem"] ] if new_files: logger.warning( f'New matrix found on disk. Added to the database: {",".join(new_files)}' ) for fl in new_files: mat = AequilibraeMatrix() mat.load(join(self.fldr, fl)) name = None if not mat.is_omx(): name = str(mat.name).lower() if not name: name = fl.lower() name = name.replace(".", "_").replace(" ", "_") if name in self.__items: i = 0 while f"{name}_{i}" in self.__items: i += 1 name = f"{name}_{i}" rec = self.new_record(name, fl) rec.save()
def __build_directed_graph(self, network: pd.DataFrame, centroids: np.ndarray): all_titles = list(network.columns) not_pos = network.loc[network.direction != 1, :] not_negs = network.loc[network.direction != -1, :] names, types = self.__build_column_names(all_titles) neg_names = [] for name in names: if name in not_pos.columns: neg_names.append(name) elif name + "_ba" in not_pos.columns: neg_names.append(name + "_ba") not_pos = pd.DataFrame(not_pos, copy=True)[neg_names] not_pos.columns = names not_pos.loc[:, "direction"] = -1 aux = np.array(not_pos.a_node.values, copy=True) not_pos.loc[:, "a_node"] = not_pos.loc[:, "b_node"] not_pos.loc[:, "b_node"] = aux[:] del aux pos_names = [] for name in names: if name in not_negs.columns: pos_names.append(name) elif name + "_ab" in not_negs.columns: pos_names.append(name + "_ab") not_negs = pd.DataFrame(not_negs, copy=True)[pos_names] not_negs.columns = names not_negs.loc[:, "direction"] = 1 df = pd.concat([not_negs, not_pos]) # Now we take care of centroids nodes = np.unique(np.hstack( (df.a_node.values, df.b_node.values))).astype(self.__integer_type) nodes = np.setdiff1d(nodes, centroids, assume_unique=True) all_nodes = np.hstack((centroids, nodes)).astype(self.__integer_type) num_nodes = all_nodes.shape[0] nodes_to_indices = np.repeat(-1, int(all_nodes.max()) + 1) nlist = np.arange(num_nodes) nodes_to_indices[all_nodes] = nlist df.loc[:, "a_node"] = nodes_to_indices[df.a_node.values][:] df.loc[:, "b_node"] = nodes_to_indices[df.b_node.values][:] df = df.sort_values(by=["a_node", "b_node"]) df.index = np.arange(df.shape[0]) df.loc[:, "id"] = np.arange(df.shape[0]) fs = np.empty(num_nodes + 1, dtype=self.__integer_type) fs.fill(-1) y, x, _ = np.intersect1d(df.a_node.values, nlist, assume_unique=False, return_indices=True) fs[y] = x[:] fs[-1] = df.shape[0] for i in range(num_nodes, 1, -1): if fs[i - 1] == -1: fs[i - 1] = fs[i] nans = ", ".join([i for i in df.columns if df[i].isnull().any().any()]) if nans: logger.warning( f"Field(s) {nans} has(ve) at least one NaN value. Check your computations" ) df.loc[:, "b_node"] = df.b_node.values.astype(self.__integer_type) df.loc[:, "id"] = df.id.values.astype(self.__integer_type) df.loc[:, "link_id"] = df.link_id.values.astype(self.__integer_type) df.loc[:, "direction"] = df.direction.values.astype(np.int8) return all_nodes, num_nodes, nodes_to_indices, fs, df