def printConnectivity(baselineConnections,dir,scaler): ''' prints original connectivity matrix prints all optimized connectivity matrices for log files in dir. Prints => generates .mat file in matlab directory scaler is the multiplier for BUs (larger scaler => finer granularity of provisioning) ''' #Create Lists for Data fileList = np.zeros((len(os.listdir(dir)),), dtype=np.object) percentageList = np.zeros((len(os.listdir(dir)),)) #Run Optimization Algorithm baseLineTopology = topology("./connectionFiles/" + baselineConnections) for i,filename in enumerate(os.listdir(dir)): if filename.endswith(".log"): TM_original = trafficManager("./traceFiles/" + filename.strip(".log"),baseLineTopology,1) RTL_original = TM_original.returnRTL() TM_optimized = TM_original.genOptimizedTopology("./connectionFiles/connectionsOptimized_scaler" + str(scaler) + "_" +filename.strip(".log"),scaler) TM_optimized.generateConnectivityMat() percentageList[i] = ((RTL_original - TM_optimized.returnRTL())/RTL_original) * 100 print filename.strip('log') + ': ' + str(percentageList[i]) + '% Decrease' fileList[i] = filename.strip(".log") #Generate mat for baseline topology TM_original.generateConnectivityMat() #Generate mat for percentage decrease data dict = {'filename': fileList,'percentDec':percentageList} scipy.io.savemat('/mnt/c/Users/bnsc5/Documents/MatlabProjects/OpticalInterconnects/Project/percentDec', mdict=dict)
def convert(geojson, topojson, object_name=False, *args, **kwargs): if isinstance(geojson, dict): input_dict = geojson elif isinstance(geojson, str) or isinstance(geojson, unicode): inFile = open(geojson) input_dict = load(inFile) if not object_name and 'type' in input_dict and hasattr( inFile, 'name') and inFile.name.lower().endswith('.geojson'): input_dict = {inFile.name[:-8].split('/')[-1]: input_dict} elif isinstance(geojson, file): input_dict = load(geojson) if 'type' in input_dict: if object_name: input_dict = {object_name: input_dict} else: input_dict = {'name': input_dict} output_dict = topology(input_dict, *args, **kwargs) if isinstance(topojson, str) or isinstance(topojson, unicode): with open(topojson, 'w') as f: dump(output_dict, f) elif isinstance(topojson, file): dump(output_dict, topojson) else: return output_dict
def genOptimizedTopology_LIMIT(self,filename,scaler): ''' Generates the optimized topology filename => File that new connections file will be written to ONLY USE W/ SCALER EQUAL TO 1 ''' ##TO DO #1. Verify scaler functionality for recursive function #2. Optimize?? #initial recursive function call self.recursiveFunc(0,np.array([[0 for x in range(self.topology.dim)] for y in range(self.topology.dim)]),scaler) #print 'Total Matrices:\t\t' + str(len(globals.matrices)) #print 'Total Duplicate Matrices:\t' + str(len(globals.duplicates)) #Generate connections file from the optimizedMatrix member of class f = open(filename,'w') f.write("//Optimized Connections File\n//TraceFile: " + self.filename + '\n') for i in range(self.dim): f.write(str(i)) for j,element in enumerate(self.optimizedMatrix[:,i]): if element != 0: f.write(' ' + str(j) + '(' + str(element) + ')') f.write('\n') f.close() return trafficManager(self.filename,topology(filename),scaler)
def convert(geojson, topojson, object_name=False, *args, **kwargs): if isinstance(geojson, dict): input_dict = geojson elif isinstance(geojson, str) or isinstance(geojson, unicode): inFile = open(geojson) input_dict = load(inFile) is_geojson_file = inFile.name.lower().endswith('.geojson') has_type = 'type' in input_dict has_name = hasattr(inFile, 'name') if not object_name and has_type and has_name and is_geojson_file: input_dict = {inFile.name[:-8].split('/')[-1]: input_dict} elif isinstance(geojson, file): input_dict = load(geojson) if 'type' in input_dict: if object_name: input_dict = {'object_name': input_dict} else: input_dict = {'name': input_dict} output_dict = topology(input_dict, *args, **kwargs) if isinstance(topojson, str) or isinstance(topojson, unicode): with open(topojson, 'w') as f: dump(output_dict, f) elif isinstance(topojson, file): dump(output_dict, topojson) else: return output_dict
def __init__(self, molecule1, molecule2, covRadFactor=1.3): errors = {} requirements_for_comparison(molecule1, molecule2) self.molecule1 = molecule1 self.molecule2 = molecule2 self.topology1 = topo.topology(molecule1, covRadFactor) self.topology2 = topo.topology(molecule2, covRadFactor) self.orderedBonds1 = self.topology1.order_convalentBondDistances() self.orderedBonds2 = self.topology2.order_convalentBondDistances() #print "\n".join([str(elem) for elem in self.orderedBonds2]) self.orderedAngles1 = self.topology1.order_angles() self.orderedAngles2 = self.topology2.order_angles() self.orderedDihedral1 = self.topology1.order_dihedralAngles() self.orderedDihedral2 = self.topology2.order_dihedralAngles() self.error_bonds = self.compare_bonds(percentLargest = 1.5) self.error_angles = self.compare_angles() self.error_dihedrals = self.compare_dihedralAngles()
def seg_topology_list(self, re_prob, names): '''Detect and list all topology relationships in the target image ''' s_t_list = [] for i_re_prob in tqdm(re_prob): top_tmp = topology(i_re_prob['pred_result']) print(top_tmp.display_stage(names)) s_t_list.append(top_tmp.combine_seg_result(i_re_prob['seg_re'])) return s_t_list
def printTraffic(baselineConnections,dir): ''' prints all traffic matrices as .mat files to matlab directory ''' for filename in os.listdir(dir): if filename.endswith(".log"): TM_original = trafficManager("./traceFiles/" + filename.strip(".log"),topology("./connectionFiles/" + baselineConnections),1) TM_optimized = TM_original.genOptimizedTopology("./connectionFiles/connectionsOptimized_"+ filename.strip(".log"),1) #TM_optimized.topology.printConnectivity() TM_optimized.generateTrafficMat()
def setUp(self): self.path_to_XYZ_format = "../../files/histidine.xyz" self.histidine_moleculeObject = xyz.parse_XYZ(self.path_to_XYZ_format) self.histidine_topologyObject = topology.topology( self.histidine_moleculeObject) #self.histidine_topologyObject.build_topology() [ self.filename_config, self.filename_bonds, self.filename_angles, self.filename_dihedralAngles ] = self.histidine_topologyObject.write_topology_files() self.path_to_stored_files = "./files/"
def convert(in_file, output=None, *args, **kwargs): with open(in_file) as f: data = json.load(f) data = topology(data, *args, **kwargs) if output is None: return data with open(output, 'w') as f: json.dump(data, f)
def test_sourceRoute(expectedSourceRoute): ''' This tests the following topology MOTE_A <- MOTE_B <- MOTE_C <- MOTE_D ''' sourceRoute = SourceRoute.SourceRoute() topo = topology.topology() sourceRoute.dispatch( signal='updateParents', data=(tuple(MOTE_B), [MOTE_A]), ) sourceRoute.dispatch( signal='updateParents', data=(tuple(MOTE_C), [MOTE_B]), ) sourceRoute.dispatch( signal='updateParents', data=(tuple(MOTE_D), [MOTE_C]), ) expectedDestination = json.loads(expectedSourceRoute)[0] expectedRoute = json.loads(expectedSourceRoute)[1] calculatedRoute = sourceRoute.getSourceRoute(expectedDestination) # log if log.isEnabledFor(logging.DEBUG): output = [] output += ['\n'] output += [ 'expectedDestination: {0}'.format( u.formatAddr(expectedDestination)) ] output += ['expectedRoute:'] for m in expectedRoute: output += ['- {0}'.format(u.formatAddr(m))] output += ['calculatedRoute:'] for m in calculatedRoute: output += ['- {0}'.format(u.formatAddr(m))] output = '\n'.join(output) log.debug(output) assert calculatedRoute == expectedRoute
def test_sourceRoute(expectedSourceRoute): ''' This tests the following topology MOTE_A <- MOTE_B <- MOTE_C <- MOTE_D ''' sourceRoute = SourceRoute.SourceRoute() topo = topology.topology() sourceRoute.dispatch( signal = 'updateParents', data = (tuple(MOTE_B),[MOTE_A]), ) sourceRoute.dispatch( signal = 'updateParents', data = (tuple(MOTE_C),[MOTE_B]), ) sourceRoute.dispatch( signal = 'updateParents', data = (tuple(MOTE_D),[MOTE_C]), ) expectedDestination = json.loads(expectedSourceRoute)[0] expectedRoute = json.loads(expectedSourceRoute)[1] calculatedRoute = sourceRoute.getSourceRoute(expectedDestination) # log if log.isEnabledFor(logging.DEBUG): output = [] output += ['\n'] output += ['expectedDestination: {0}'.format(u.formatAddr(expectedDestination))] output += ['expectedRoute:'] for m in expectedRoute: output += ['- {0}'.format(u.formatAddr(m))] output += ['calculatedRoute:'] for m in calculatedRoute: output += ['- {0}'.format(u.formatAddr(m))] output = '\n'.join(output) log.debug(output) assert calculatedRoute==expectedRoute
def convert(inThing,outThing=None,options={}): if isinstance(inThing,dict): inpt = inThing elif isinstance(inThing,str) or isinstance(inThing,unicode): inFile = open(inThing) inpt = load(inFile) if not options.has_key('name') and inpt.has_key('type') and hasattr(inFile,'name') and inFile.name.lower().endswith('.geojson'): inpt = {inFile.name[:-8].split('/')[-1]:inpt} elif isinstance(inThing,file): inpt=load(inThing) if not options.has_key('name') and inpt.has_key('type') and hasattr(inThing,'name') and inThing.name.lower().endswith('.geojson'): inpt = {inThing.name[:-8].split('/')[-1]:inpt} out = topology(inpt,options) if isinstance(outThing,str) or isinstance(outThing,unicode): with open(outThing,'w') as f: dump(out,f) elif isinstance(outThing,file): dump(out,outThing) else: return out
def genOptimizedTopology(self,filename,scaler): ''' Generates the optimized topology filename => File that new connections file will be written to ''' newCM = copy(self.topology.connectivityMatrix) newCM = np.multiply(newCM,scaler) #Iterate through sources (columns) f = open(filename,'w') f.write("//Optimized Connections File\n//TraceFile: " + self.filename + '\n') for i in range(self.dim): newCM[:,i] = self.optimizeConnectivity(newCM[:,i],i) f.write(str(i)) for j,element in enumerate(newCM[:,i]): if element != 0: f.write(' ' + str(j) + '(' + str(element) + ')') f.write('\n') f.close() # return trafficManager(self.filename,topology(filename),scaler)
def convert(inThing, outThing=None, options={}): if isinstance(inThing, dict): inpt = inThing elif isinstance(inThing, str) or isinstance(inThing, unicode): inFile = open(inThing) inpt = load(inFile) if not options.has_key('name') and inpt.has_key('type') and hasattr( inFile, 'name') and inFile.name.lower().endswith('.geojson'): inpt = {inFile.name[:-8].split('/')[-1]: inpt} elif isinstance(inThing, file): inpt = load(inThing) if not options.has_key('name') and inpt.has_key('type') and hasattr( inThing, 'name') and inThing.name.lower().endswith('.geojson'): inpt = {inThing.name[:-8].split('/')[-1]: inpt} out = topology(inpt, options) if isinstance(outThing, str) or isinstance(outThing, unicode): with open(outThing, 'w') as f: dump(out, f) elif isinstance(outThing, file): dump(out, outThing) else: return out
def finish(p): p.run() for i in p.c: p.c[i].to_tree() ff = topology.topology(p) ff.build_bb() ff.segment() ff.dump_dot( digraph='size="7.00, 10.80"\nconcentrate=true\ncenter=true\n' ) r = render.render(p) r.add_flows() if p.a['objname'] != None: fn = "/tmp/_." + p.a['basename'] + "_" + p.a['objname'] else: fn = "/tmp/_." + p.a['basename'] fn += ".txt" print("Output written to:", fn) r.render(fn)
help='Print version') parser.add_argument('file', action='store', nargs=1, type=str, help='itp or rtp file') args = parser.parse_args() bVersion = vars(args)['version'] bVerbose = vars(args)['verbose'] if bVersion: output('Version {:s}'.format(__version__)) t = top.topology() ff = args.ff[0] if args.res: res = args.res[0] else: res = None topfile = args.file[0] if args.ro: rofile = args.ro[0] else: rofile = None t.setFF(ff)
p.setlabel(0x7f4a, "TEST_LOOP") ####################################################################### for ax in (0x7e1c, 0x7e20): ins = cpu.ins[ax] const.seven_seg_lcmt(ins, p.m.rd(ins.lo + 1)) ####################################################################### # Move instructions to tree cpu.to_tree() ####################################################################### p.g = topology.topology(p) p.g.build_bb() if True: p.g.add_flow("IRQ", p.m.b16(0x7ff8)) p.g.add_flow("SWI", p.m.b16(0x7ffa)) p.g.add_flow("NMI", p.m.b16(0x7ffc)) p.g.add_flow("RST", p.m.b16(0x7ffe)) if True: p.g.segment() p.g.setlabels(p) p.g.dump_dot() p.g.xxx(p)
def simulate(MODEL, N, NI, S, M): ''' simulate(MODEL,N,NI,S,M) generates time series of networks of dynamical systems for several different intial conditions. Parameters ------------------ MODEL: Dynamical model on network units. Currently, only kuramoto1, kuramoto2, michaelis_menten and roessler are supported. For detailed information about the models, please check methods section in the main manuscript. N: Network size. NI: Number of incoming connections per unit. S: Number of different time series. M: Number of time points per time series. Input type ------------------ MODEL: string N: integer NI: integer (NI<N) S: integer M: integer Output ------------------ 'Data/data.dat': File containing all simulated time series in a concatenaded form. 'Data/ts_param.dat': File containing time series parameters, i.e. S and M, for later extracting the different time series. Example ------------------ simulate('kuramoto2',25,4,30,10) generates 30 time series of 10 time points each for a network of 25 oscillators defined by the model kuramoto2. Each oscillator has 4 incoming connections. Accompanying material to "Model-free inference of direct interactions from nonlinear collective dynamics". Author: Jose Casadiego Date: May 2017 ''' #smpling rate of time series resolution=1 models={'kuramoto1', 'kuramoto2', 'michaelis_menten', 'roessler'} if (MODEL not in models): sys.exit('ERROR: MODEL must be a valid string:kuramoto1, kuramoto2, michaelis_menten, roessler') else: cmd='rm -r Data/' process = subprocess.Popen(cmd.split()) process.wait() cmd='mkdir Data/' process = subprocess.Popen(cmd.split()) process.wait() print('Creating network structure...') topology(N, 'homogeneous', 'directed', NI) # print('Simulating time series...') Y = np.array([]) if MODEL == 'kuramoto1': w = -2 + 4*np.random.uniform(low=0., high=1., size=(N,)) np.savetxt('Data/frequencies.dat', w, fmt='%.4f',delimiter='\t') for s in xrange(S): init = -3.14 + (3.14+3.14) * np.random.uniform(0.,1.,size=(N,)) tspan = np.arange(0,M,resolution) y = odeint(kuramoto1, init, tspan) Y = np.vstack((Y,y)) if Y.size else y elif MODEL == 'kuramoto2': w = -2 + (4) * np.random.uniform(0.,1.,size=(N,)) np.savetxt('Data/frequencies.dat',w, fmt='%.4f', delimiter='\t') for s in xrange(S): init = -3.14 + (3.14+3.14)*np.random.uniform(0.,1.,size=(N,)) tspan=np.arange(0,M,resolution) y = odeint(kuramoto2, init, tspan) Y = np.vstack((Y,y)) if Y.size else y elif MODEL == 'michaelis_menten': for s in xrange(S): init = 1+np.random.uniform(0.,1.,size=(N,)) tspan=np.arange(0,M,resolution) y = odeint(michaelis_menten, init, tspan) Y = np.vstack((Y,y)) if Y.size else y elif MODEL == 'roessler': for s in xrange(S): init=-5 + (5+5)*np.random.uniform(0.,1., size=(3*N,)) tspan = np.arange(0,M,resolution) y = odeint(roessler, init, tspan) Y = np.vstack((Y,y)) if Y.size else y ts_param=[S,M] np.savetxt('Data/data.dat', Y, fmt='%.4f', delimiter='\t') np.savetxt('Data/ts_param.dat', ts_param, fmt='%i',delimiter='\t') print('Simulation finished!')
if len(message) == 0: sys.exit(0) # break if message[0] != "{": # and message[0] != '[' continue message = json.loads(message) print "2:" + str(message) if message["cmd"] == "start": ip = message["ip"] seed_ip = message["seed_ip"] community = message["com"] username = message["username"] password = message["password"] community, ipTraffic, collectionsName, config_name = topo.topology( ip, seed_ip, community, username, password ) print "ip traffic(start) : " + str(ipTraffic) print "indexTraffic(start) : " + str(indexTraffic) # config_name coll_config = connectDatabase(config_name) coll_config.update({"index": "0"}, {"$set": {"ip_traffic": str(ipTraffic)}}) coll_config.update({"index": "0"}, {"$set": {"community": str(community)}}) coll = connectDatabase(collectionsName) a = [] for x in coll.find(): a.append(x) for d in a: try: del d["_id"]
cpu.to_tree() ####################################################################### p.setlabel(0x0df, "rr(adr=@r1,wid=r4)") p.setlabel(0x16f, "r2:r3=sum(0x48:0x49,0x2f:0x30)") p.setlabel(0x1dd, "inc(adr=@R0,wid=R1)") p.setlabel(0x2f0, "toggle_P1.7()") p.setlabel(0x6a5, "delay(someN)") p.setlabel(0x6b2, "memcpy(0x1d,0x35,3)") p.setlabel(0x6b8, "memcpy(r0,r1,r6)") ####################################################################### # Build code graph if True: p.g = topology.topology(p) p.g.build_bb() p.g.segment() p.g.setlabels(p) p.g.dump_dot() p.g.xxx(p) ####################################################################### # Render output print("Render") r = render.render(p) r.add_flows() r.render("/tmp/_.cbm900_wdcd.txt")
scipy.io.savemat('/mnt/c/Users/bnsc5/Documents/MatlabProjects/OpticalInterconnects/Project/percentDec', mdict=dict) if __name__ == "__main__": ################################################ #Initialize global variables and timing module globals.init() timing.init() ################################################ # baselineConnections = "p3_x16Large" # dir = './nvproflogs' # printTraffic(baselineConnections,dir) # #SCALER ONLY => (1,2,4,8) # printConnectivity(baselineConnections,dir,1) ##### TEST ##### baseLineTopology = topology("./connectionFiles/p3_x16Large") TM_og = trafficManager('./traceFiles/cifar10_alexnet_parameterserver_6473',baseLineTopology,1) # baseLineTopology = topology("./connectionFiles/3x3") # traceGen('./traceFiles/3x3_trace',baseLineTopology,100,None) # TM_og = trafficManager('./traceFiles/3x3_trace',baseLineTopology,1) TM_new1 = TM_og.genOptimizedTopology_LIMIT("./connectionFiles/testTestTest1",1) # TM_new2 = TM_og.genOptimizedTopology_LIMIT("./connectionFiles/testTestTest2",2) #TM_new4 = TM_og.genOptimizedTopology_LIMIT("./connectionFiles/testTestTest4",4) print 'Original RTL:\t'+str(TM_og.returnRTL()) print 'Optimized RTL:\t'+str(TM_new1.returnRTL()) + ' [Scaler: '+str(TM_new1.scaler)+']' # print 'Optimized RTL:\t'+str(TM_new2.returnRTL()) + ' [Scaler: '+str(TM_new2.scaler)+']' #print 'Optimized RTL:\t'+str(TM_new4.returnRTL()) + ' [Scaler: '+str(TM_new4.scaler)+']'
# encoding: utf-8 import topology import json js = [] tp = topology.topology() IP = tp[0] PORT = tp[1] adjID = tp[2] datalist = tp[3] for i in range(len(IP)): tmp = {} tmp["ID"] = i + 1 tmp["IP"] = IP[i] tmp["PORT"] = PORT[i] tmp["adjID"] = adjID[i] tmp["datalist"] = datalist[i] js.append(tmp) try: a = json.dumps(js) except Exception: print "JSON格式错误" else: print a
CISCOCDPMIB_cdpCacheDeviceId = ".1.3.6.1.4.1.9.9.23.1.2.1.1.6" CISCOCDPMIB_cdpCacheDevicePort = ".1.3.6.1.4.1.9.9.23.1.2.1.1.7" ########### IFMIB_ifIndex = ".1.3.6.1.2.1.2.2.1.1" # index CISCOSMI_ciscoMgmt = ".1.3.6.1.4.1.9.9.68.1.2.2.1.2" # vlan interface ########### forwarding blocking BRIDGEMIB_dot1dStpPort = "1.3.6.1.2.1.17.2.15.1.1" #stp port BRIDGEMIB_dot1dStpPortState = ".1.3.6.1.2.1.17.2.15.1.3" #stp port state #BRIDGEMIB_dot1dBasePort = ".1.3.6.1.2.1.17.1.4.1.1" # BRIDGEMIB_dot1dBasePortIfIndex = ".1.3.6.1.2.1.17.1.4.1.2" # port index #IFMIB_ifIndex #IFMIB_ifDescr indexTraffic = 0 community, ipTraffic, collectionsName, config_name = topo.topology( your_ip, ip, community, username, password) while (True): indexTraffic, traffic_datetime = traffic.traffic(community, ipTraffic, collectionsName, indexTraffic) #print "index traffic :" + str(indexTraffic) + "+++++++" collectionsNameTopo = collectionsName collectionsNameTraff = str(collectionsName) + "_traffic_" + str( indexTraffic - 1) #print collectionsNameTraff aaa, collectionsName_traffic_new = an.anaysit(collectionsNameTopo, collectionsNameTraff, indexTraffic) ##topo_config_name coll_config = router.connectDatabase(config_name) coll_config.update({"index": "0"},
while p.run(): pass ####################################################################### # import explore explore.brute_force(p, cpu, 0x000, 0x800) ####################################################################### # cpu.to_tree() ####################################################################### # Build code graph if True: p.g = topology.topology(p.t) p.g.segment() p.g.setlabels(p) p.g.dump_dot() p.g.xxx(p) ####################################################################### # Render output r = render.render(p) r.add_flows() r.render("/tmp/_.cdp1802.txt")
def setUp(self): self.path_to_XYZ_format = "../../files/histidine.xyz" self.maxDiff = None self.histidine_moleculeObject = xyz.parse_XYZ(self.path_to_XYZ_format) self.histidine_topologyObject = topology.topology( self.histidine_moleculeObject)
def test_vRouter(self, cfy): result = False test_result = [] test_scenario_list = self.test_scenario_yaml["test_scenario_list"] for test_scenario in test_scenario_list: if test_scenario["test_type"] == "function_test": function_test_scenario = test_scenario # FUNCTION TEST TOPOLOGY INITIALISATION function_tplgy = topology(orchestrator=cfy, logger=self.logger) result_data = self.init_function_testToplogy( function_tplgy, function_test_scenario) if result_data["status"] == "FAIL": return result_data # FUNCTION TEST TOPOLOGY DEPLOYMENT blueprint_info = \ {"url": self.FUNCTION_TEST_TPLGY_BLUEPRINT, "blueprint_name": self.FUNCTION_TEST_TPLGY_BP_NAME, "deployment_name": self.FUNCTION_TEST_TPLGY_DEPLOY_NAME} result_data = self.deploy_testTopology(function_tplgy, blueprint_info) if result_data["status"] == "FAIL": return result_data time.sleep(self.TPLGY_STABLE_WAIT) # FUNCTION TEST EXECUTION test_result_data_list = [] function_test_list = function_test_scenario[ "function_test_list"] for function_test in function_test_list: test_list = function_test["test_list"] target_vnf_name = function_test["target_vnf_name"] for test_info in test_list: self.logger.info(test_info["protocol"] + " " + test_info["test_kind"] + " test.") (result, test_result_data) = self.function_test_vRouter( cfy, target_vnf_name, test_info) test_result_data_list.append(test_result_data) if not result: break test_result.append( self.util.convert_functional_test_result( test_result_data_list)) self.logger.debug("request vnf's delete.") self.util.request_vm_delete(self.vnf_info_list) # FUNCTION TEST TOPOLOGY UNDEPLOYMENT function_tplgy.undeploy_vnf( self.FUNCTION_TEST_TPLGY_DEPLOY_NAME) elif test_scenario["test_type"] == "performance_test": performance_test_scenario = test_scenario # PERFORMANCE_ TEST TOPOLOGY INITIALISATION performance_tplgy = topology(orchestrator=cfy, logger=self.logger) result_data = self.init_performance_testToplogy( performance_tplgy, performance_test_scenario) if result_data["status"] == "FAIL": return result_data # PERFORMANCE TEST TOPOLOGY DEPLOYMENT blueprint_info = \ {"url": self.PERFORMANCE_TPLGY_BLUEPRINT, "blueprint_name": self.PERFORMANCE_TPLGY_BP_NAME, "deployment_name": self.PERFORMANCE_TPLGY_DEPLOY_NAME} result_data = self.deploy_testTopology(performance_tplgy, blueprint_info) if result_data["status"] == "FAIL": return result_data time.sleep(self.TPLGY_STABLE_WAIT) # PERFORMANCE TEST EXECUTION performance_test_list = performance_test_scenario[ "performance_test_list"] for performance_test_info in performance_test_list: result = self.performance_test_vRouter( cfy, performance_test_scenario, performance_test_info) self.logger.debug("request vnf's delete.") self.util.request_vm_delete(self.vnf_info_list) # PERFORMANCE TEST TOPOLOGY UNDEPLOYMENT performance_tplgy.undeploy_vnf( self.PERFORMANCE_TPLGY_DEPLOY_NAME) else: return self.step_failure("testing_vRouter", "Error : Unknown topology type.") self.util.write_result_data(test_result) if result: return self.set_resultdata(self.testcase_start_time, self.end_time_ts, "PASS", self.results) return self.step_failure("testing_vRouter", "Error : Faild to test execution.")
threads.append(thread) print "Flushing the route tables...." for thread in threads: thread.join() print "Flushing Complete." print "Preparing to Create a "+topolog+" topology" if((topolog == 'Grid') and round(sqrt(Station))!=sqrt(Station)): print('For grid topology, the number of nodes has to be a square number! Choose a new n.') adjMatrix=topology(topolog,Station) (row,col) = adjMatrix.shape print adjMatrix for i in range(0,row): ipsplit = ipAddress[i].split('.') j=ipsplit[2] k=ipsplit[3] routeFilename = './Data/n-'+j+'-'+k+'/routeTable' routeFile = open(routeFilename,'wa') nonrouteFilename = './Data/n-'+j+'-'+k+'/nonrouteTable' nonrouteFile = open(nonrouteFilename,'wa')
import requests import json from topology import topology controller = '192.168.31.128:8181' url_topology = 'http://{}/restconf/operational/network-topology:network-topology/topology/example-linkstate-topology'.format( controller) headers = {'Authorization': 'admin:<admin>'} r = requests.get(url_topology, auth=('admin', 'admin')) j = r.text msg = json.loads(j) topo = topology(msg) d = topo.get_topology() d_json = topo.topology_jscript # link_list = topo.get_links() # node_list = topo.get_nodes() # topologyData = {} # json_links_list = json.dumps(links_list, indent = 2) # print(json_links_list) f = open('data4.js', 'w+') f.write(d_json)