def create_new_experiment(link_id): session = Session() logging.info("Unexpected error:", sys.exc_info()[0]) logging.debug("Failed to read data_sum from DB, reloading from file") if link_id == "dummy": print("powerlaw graph selecteds") rs, su = clean_and_create_experiment(("powerlaw", (50, 2, 0.3, 1, 500000000, 20, 200,)), seed=5) else: print("links %s graph selected" % link_id) rs, su = clean_and_create_experiment(("links", (link_id,)), 5) tenant = Tenant(name=link_id) session.add(tenant) session.add(su) session.flush() return tenant, su, rs
type=float) parser.add_argument('--sourcebw', help="cumulated source bw from every source (default 100 bits) ", default=10000, type=float) parser.add_argument('--topo', help="specify topo to use", default=('grid', ["5", "5", "100000000", "10", "200"]), type=valid_topo) parser.add_argument('--plot', dest="plot", action="store_true") parser.add_argument('--disable-heuristic', dest="disable_heuristic", action="store_true") parser.add_argument('--dest_folder', help="destination folder for restults", default=RESULTS_FOLDER) parser.add_argument('--json', help='display json results in stdout', dest="json", action="store_true") parser.add_argument('--base64', help='display json results in base64', dest="b64", action="store_true") args = parser.parse_args() if args.disable_embedding: rs, su = clean_and_create_experiment(args.topo, 0) su.write(RESULTS_FOLDER) plotsol_from_db(service_link_linewidth=5, net=True, substrate=su) subprocess.Popen( ["neato", os.path.join(RESULTS_FOLDER, "./substrate.dot"), "-Tsvg", "-o", os.path.join(args.dest_folder, "topo.svg")]).wait() shutil.copy(os.path.join(RESULTS_FOLDER, "./substrate.dot"), os.path.join(args.dest_folder, "substrate.dot")) else: if args.auto is False and (args.vhg is None or args.vcdn is None): parser.error('please specify --vhg and --vcdn args if not automatic calculation') elif args.auto is True and (args.vhg is not None or args.vcdn is not None): parser.error("can't specify vhg count of vcdn count in --auto mode")
parser.add_argument('--dest_folder', help="destination folder for restults", default=RESULTS_FOLDER) parser.add_argument('--json', help='display json results in stdout', dest="json", action="store_true") parser.add_argument('--base64', help='display json results in base64', dest="b64", action="store_true") args = parser.parse_args() if args.disable_embedding: rs, su = clean_and_create_experiment(args.topo, 0) su.write(RESULTS_FOLDER) if args.json: topo = su.get_json() if args.b64: sys.stdout.write(base64.b64encode(json.dumps(topo))) else: sys.stdout.write(json.dumps(topo)) sys.stdout.flush() if args.plot: plotsol_from_db(service_link_linewidth=5, net=True, substrate=su) subprocess.Popen([ "neato", os.path.join(RESULTS_FOLDER, "./substrate.dot"), "-Tsvg", "-o", os.path.join(args.dest_folder, "topo.svg")
# print("loading db done") # generate SLAS # sla = create_sla(client_count, cdn_count, vcdn_count) import networkx as nx import operator import os import pandas as pd g = nx.Graph() # load all the links if link_id == "dummy": print("powerlaw graph selecteds") _, su = clean_and_create_experiment(("powerlaw", (2000, 3, 0.5, 1, 1000000000, 20, 200,)), seed=6) g = su.get_nxgraph() else: print("links %s graph selected" % link_id) with open(os.path.join("offline/data", "links", "operator-%s.links" % link_id)) as f: for line in f.read().split("\n"): nodes = line.strip().split(" ") while len(nodes) >= 2: root = nodes.pop(0) for node in nodes: g.add_edge(root, node) # take the biggest connected subgraph g = max(list({sg: len(sg.nodes()) for sg in nx.connected_component_subgraphs(g)}.items()), key=operator.itemgetter(1))[0]