def get_solution(self) -> Graph: if self._graph.is_empty(): return Graph() solver = cp_model.CpSolver() solver.parameters.max_time_in_seconds = 60.0 status = solver.Solve(self._model) if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE: return self._solution_to_graph(solver) else: return Graph()
def testExtendSubgraph1(): graph = Graph(5, 5, [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]) extender = ExtendSubgraph(graph, 101, 3) task = Task([0, 1, 1, 0, 0], [0, 3], 0, 0) newTasks = extender.generateNewTasks(task) for task in newTasks: print task.vertices, task.edges
def generate_unbalanced_graph(self, nodes_in_cluster): nodes = [] for i in range(0, self.clusters): nodes.extend([i] * nodes_in_cluster[i]) random.shuffle(nodes) edges = np.zeros((self.n, self.n)) random_pin = np.random.choice([0, 1], edges.shape, p=[1 - self.p_in, self.p_in]) random_pout = np.random.choice([0, 1], edges.shape, p=[1 - self.p_out, self.p_out]) for i in range(0, self.n): for j in range(0, self.n): is_same_cluster = nodes[i] == nodes[j] if is_same_cluster: edges[i][j] = random_pin[i][j] else: edges[i][j] = random_pout[i][j] return Graph(self.n, nodes, edges)
def testExtendSubgraph3(): graph = Graph(3, 3, [(0, 1), (1, 2), (0, 2)]) extender = ExtendSubgraph(graph, 101, 3) task = Task([1, 1, 1], [1], 0, 0) newTasks = extender.generateNewTasks(task) for task in newTasks: print task.vertices, task.edges
def __init__(self, view): self.__view = view self.__file_view = FileReader() self.__database_view = DatabaseView() self.__graph_view = Graph() self.__query_creator = QueryCreator() self.__logger = Logger() with open('src\config.json') as json_data_file: data = json.load(json_data_file) self.__serial_file = data['pickle']['file'] self.__database_name = data['mysql']['db']
def readInput(): '''First line contains two arguments n = number of vertices m = number of edges in the graph next m line conatins (a,b) reresenting an edge.''' n, m = map(int, raw_input().split(" ")) edges = [] for i in range(0, m): a, b = map(int, raw_input().split(" ")) edges.append((a, b)) graph = Graph(n, m, edges) print graph.toString()
def testInitalTaskGeneration2(): graph = Graph(3, 3, [(0, 1), (1, 2), (0, 2)]) initalTasks = genInitalTasks(graph, 101, 5) for task in initalTasks: print task.vertices, task.edges
def testInitalTaskGeneration1(): graph = Graph(5, 5, [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]) initalTasks = genInitalTasks(graph, 101, 5) for task in initalTasks: print task.vertices, task.edges
from src.graph.graph import Graph from src.graph.graph import stringToGraph assert stringToGraph("3$011101110").toString()\ == Graph(3,3,[(0,1),(1,2),(0,2)]).toString()
def frontend_clear_graph(): global graph graph = Graph() return Response(status=200)
def backend_clear_graph(): global graph, graph_file_path graph = Graph() graph.save_as_json(graph_file_path) return render_template("backend-graph-editor.html", last_updated=dir_last_updated('data'))
import re from pathlib import Path from flask import Flask, render_template, send_from_directory, request, Response, flash, redirect, url_for from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from tornado.wsgi import WSGIContainer from src.graph.graph import Graph from src.graph.vmtl_problem import VmtlProblem app = Flask(__name__) app.secret_key = b'blablabla' app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 graph = Graph() Path("data").mkdir(parents=True, exist_ok=True) graph_file_path: str = 'data/graph.json' def dir_last_updated(folder): return str(max(os.path.getmtime(os.path.join(root_path, f)) for root_path, dirs, files in os.walk(folder) for f in files)) @app.route("/") def backend_index(): global graph, graph_file_path graph.save_as_json(graph_file_path, id_as_label=True)