def test_reversed_direction(equipment, setup, requests, services): """ checks that if spectrum is selected on one direction it is also selected on reversed direction """ network, oms_list = setup dsjn = disjunctions_from_json(services) dsjn = deduplicate_disjunctions(dsjn) paths = compute_path_dsjctn(network, equipment, requests, dsjn) rev_pths = [] for pth in paths: if pth: rev_pths.append(find_reversed_path(pth)) else: rev_pths.append([]) # build the list of spectrum slots that will be used for each request. For this purpose # play the selection part of path_assign_spectrum spectrum_list = [] for i, pth in enumerate(paths): if pth: number_wl = ceil(requests[i].path_bandwidth / requests[i].bit_rate) requested_m = ceil(requests[i].spacing / slot) * number_wl (center_n, startn, stopn), path_oms = spectrum_selection(pth, oms_list, requested_m, requested_n=None) spectrum_list.append([center_n, startn, stopn]) else: spectrum_list.append([]) pth_assign_spectrum(paths, requests, oms_list, rev_pths) # pth-assign concatenates path and reversed path for i, pth in enumerate(paths): # verifies that each element (not trx and not roadm) in the path has same # spectrum occupation if pth: this_path = [ elem for elem in pth if not isinstance(elem, Roadm) and not isinstance(elem, Transceiver) ] print(f'path {[el.uid for el in this_path]}') this_revpath = [ elem for elem in rev_pths[i] if not isinstance(elem, Roadm) and not isinstance(elem, Transceiver) ] print(f'rev_path {[el.uid for el in this_revpath]}') print('') for j, elem in enumerate(this_revpath): imin = elem.oms.spectrum_bitmap.geti(spectrum_list[i][1]) imax = elem.oms.spectrum_bitmap.geti(spectrum_list[i][2]) print(f'rev_elem {elem.uid}') print(f' elem {this_path[len(this_path)-j-1].uid}') print( f'\trev_spectrum: {elem.oms.spectrum_bitmap.bitmap[imin:imax]}' ) print( f'\t spectrum: ' + f'{this_path[len(this_path)-j-1].oms.spectrum_bitmap.bitmap[imin:imax]}' ) assert elem.oms.spectrum_bitmap.bitmap[imin:imax] == \ this_path[len(this_path) - j - 1].oms.spectrum_bitmap.bitmap[imin:imax]
def test_n_m_requests(setup, equipment, n, m, final_n, final_m, blocking_reason, request_set): """ test that various N and M values for a request end up with the correct path assgnment """ network, oms_list = setup # add an occupation on one of the span of the expected path OMS list on both directions # as defined by its offsets within the OMS list: [17, 20, 13, 22] and reversed path [19, 16, 21, 26] expected_path = [17, 20, 13, 22] expected_oms = [13, 16, 17, 19, 20, 21, 22, 26] some_oms = oms_list[expected_oms[3]] some_oms.assign_spectrum( -30, 32 ) # means that spectrum is occupied from indexes -62 to 1 on reversed path params = request_set params['effective_freq_slot'] = {'N': n, 'M': m} rqs = [PathRequest(**params)] paths = compute_path_dsjctn(network, equipment, rqs, []) # check that the computed path is the expected one (independant of blocking issues due to spectrum) path_oms = list( set([ e.oms_id for e in paths[0] if not isinstance(e, (Transceiver, Roadm)) ])) assert path_oms == expected_path # function to be tested: pth_assign_spectrum(paths, rqs, oms_list, [find_reversed_path(paths[0])]) # check that spectrum is correctly assigned assert rqs[0].N == final_n assert rqs[0].M == final_m assert getattr(rqs[0], 'blocking_reason', None) == blocking_reason
def test_spectrum_assignment_on_path(equipment, setup, requests): """ test assignment functions on path and network """ network, oms_list = setup req = [deepcopy(requests[1])] paths = compute_path_dsjctn(network, equipment, req, []) print(req) for nval in range(100): req = [deepcopy(requests[1])] (center_n, startn, stopn), path_oms = spectrum_selection(paths[0], oms_list, 4) pth_assign_spectrum(paths, req, oms_list, [find_reversed_path(paths[0])]) print(f'testing on following oms {path_oms}') # check that only 96 channels are feasible if nval >= 96: print(center_n, startn, stopn) print('only 96 channels of 4 slots pass in this grid') assert center_n is None and startn is None and stopn is None else: print(center_n, startn, stopn) print('at least 96 channels of 4 slots should pass in this grid') assert center_n is not None and startn is not None and stopn is not None req = [requests[2]] paths = compute_path_dsjctn(network, equipment, req, []) (center_n, startn, stopn), path_oms = spectrum_selection(paths[0], oms_list, 4, 478) print(oms_list[0].spectrum_bitmap.freq_index_max) print(oms_list[0]) print(center_n, startn, stopn) print('spectrum selection error: should be None') assert center_n is None and startn is None and stopn is None (center_n, startn, stopn), path_oms = spectrum_selection(paths[0], oms_list, 4, 477) print(center_n, startn, stopn) print('spectrum selection error should not be None') assert center_n is not None and startn is not None and stopn is not None
def test_freq_slot_exist(setup, equipment, request_set): """ test that assignment works even if effective_freq_slot is not populated """ network, oms_list = setup params = request_set params['effective_freq_slot'] = None rqs = [PathRequest(**params)] paths = compute_path_dsjctn(network, equipment, rqs, []) pth_assign_spectrum(paths, rqs, oms_list, [find_reversed_path(paths[0])]) assert rqs[0].N == -256 assert rqs[0].M == 32
def test_inconsistant_freq_slot(setup, equipment, request_set): """ test that an inconsistant M correctly raises an error """ network, oms_list = setup params = request_set # minimum required nb of slots is 32 (800Gbit/100Gbit/s channels each occupying 50GHz ie 4 slots) params['effective_freq_slot'] = {'N': 0, 'M': 4} with pytest.raises(ServiceError): _check_one_request(params, 196.05e12) params['trx_mode'] = None rqs = [PathRequest(**params)] paths = compute_path_dsjctn(network, equipment, rqs, []) pth_assign_spectrum(paths, rqs, oms_list, [find_reversed_path(paths[0])]) assert rqs[0].blocking_reason == 'NOT_ENOUGH_RESERVED_SPECTRUM'
def test_json_response_generation(xls_input, expected_response_file): """ tests if json response is correctly generated for all combinations of requests """ equipment = load_equipment(eqpt_filename) network = load_network(xls_input, equipment) p_db = equipment['SI']['default'].power_dbm p_total_db = p_db + lin2db( automatic_nch(equipment['SI']['default'].f_min, equipment['SI']['default'].f_max, equipment['SI']['default'].spacing)) build_network(network, equipment, p_db, p_total_db) data = read_service_sheet(xls_input, equipment, network) # change one of the request with bidir option to cover bidir case as well data['path-request'][2]['bidirectional'] = True oms_list = build_oms_list(network, equipment) rqs = requests_from_json(data, equipment) dsjn = disjunctions_from_json(data) dsjn = deduplicate_disjunctions(dsjn) rqs, dsjn = requests_aggregation(rqs, dsjn) pths = compute_path_dsjctn(network, equipment, rqs, dsjn) propagatedpths, reversed_pths, reversed_propagatedpths = \ compute_path_with_disjunction(network, equipment, rqs, pths) pth_assign_spectrum(pths, rqs, oms_list, reversed_pths) result = [] for i, pth in enumerate(propagatedpths): # test ServiceError handling : when M is zero at this point, the # json result should not be created if there is no blocking reason if i == 1: my_rq = deepcopy(rqs[i]) my_rq.M = 0 with pytest.raises(ServiceError): ResultElement(my_rq, pth, reversed_propagatedpths[i]).json my_rq.blocking_reason = 'NO_SPECTRUM' ResultElement(my_rq, pth, reversed_propagatedpths[i]).json result.append(ResultElement(rqs[i], pth, reversed_propagatedpths[i])) temp = {'response': [n.json for n in result]} expected = load_json(expected_response_file) for i, response in enumerate(temp['response']): if i == 2: # compare response must be False because z-a metric is missing # (request with bidir option to cover bidir case) assert not compare_response(expected['response'][i], response) print(f'response {response["response-id"]} should not match') expected['response'][2]['path-properties']['z-a-path-metric'] = [{ 'metric-type': 'SNR-bandwidth', 'accumulative-value': 22.809999999999999 }, { 'metric-type': 'SNR-0.1nm', 'accumulative-value': 26.890000000000001 }, { 'metric-type': 'OSNR-bandwidth', 'accumulative-value': 26.239999999999998 }, { 'metric-type': 'OSNR-0.1nm', 'accumulative-value': 30.32 }, { 'metric-type': 'reference_power', 'accumulative-value': 0.0012589254117941673 }, { 'metric-type': 'path_bandwidth', 'accumulative-value': 60000000000.0 }] # test should be OK now else: assert compare_response(expected['response'][i], response) print(f'response {response["response-id"]} is not correct')
def path_requests_run(args=None): parser = argparse.ArgumentParser( description= 'Compute performance for a list of services provided in a json file or an excel sheet', epilog=_help_footer, formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) _add_common_options(parser, network_default=_examples_dir / 'meshTopologyExampleV2.xls') parser.add_argument('service_filename', nargs='?', type=Path, metavar='SERVICES-REQUESTS.(json|xls|xlsx)', default=_examples_dir / 'meshTopologyExampleV2.xls', help='Input service file') parser.add_argument('-bi', '--bidir', action='store_true', help='considers that all demands are bidir') parser.add_argument( '-o', '--output', type=Path, metavar=_help_fname_json_csv, help='Store satisifed requests into a JSON or CSV file') args = parser.parse_args(args if args is not None else sys.argv[1:]) _setup_logging(args) _logger.info( f'Computing path requests {args.service_filename} into JSON format') print( f'{ansi_escapes.blue}Computing path requests {os.path.relpath(args.service_filename)} into JSON format{ansi_escapes.reset}' ) (equipment, network) = load_common_data(args.equipment, args.topology, args.sim_params, args.save_network_before_autodesign) # Build the network once using the default power defined in SI in eqpt config # TODO power density: db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by # spacing, f_min and f_max p_db = equipment['SI']['default'].power_dbm p_total_db = p_db + lin2db( automatic_nch(equipment['SI']['default'].f_min, equipment['SI']['default'].f_max, equipment['SI']['default'].spacing)) try: build_network(network, equipment, p_db, p_total_db) except exceptions.NetworkTopologyError as e: print( f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {e}' ) sys.exit(1) except exceptions.ConfigurationError as e: print( f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {e}') sys.exit(1) if args.save_network is not None: save_network(network, args.save_network) print( f'{ansi_escapes.blue}Network (after autodesign) saved to {args.save_network}{ansi_escapes.reset}' ) oms_list = build_oms_list(network, equipment) try: data = load_requests(args.service_filename, equipment, bidir=args.bidir, network=network, network_filename=args.topology) rqs = requests_from_json(data, equipment) except exceptions.ServiceError as e: print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {e}') sys.exit(1) # check that request ids are unique. Non unique ids, may # mess the computation: better to stop the computation all_ids = [r.request_id for r in rqs] if len(all_ids) != len(set(all_ids)): for item in list(set(all_ids)): all_ids.remove(item) msg = f'Requests id {all_ids} are not unique' _logger.critical(msg) sys.exit() rqs = correct_json_route_list(network, rqs) # pths = compute_path(network, equipment, rqs) dsjn = disjunctions_from_json(data) print(f'{ansi_escapes.blue}List of disjunctions{ansi_escapes.reset}') print(dsjn) # need to warn or correct in case of wrong disjunction form # disjunction must not be repeated with same or different ids dsjn = deduplicate_disjunctions(dsjn) # Aggregate demands with same exact constraints print( f'{ansi_escapes.blue}Aggregating similar requests{ansi_escapes.reset}') rqs, dsjn = requests_aggregation(rqs, dsjn) # TODO export novel set of aggregated demands in a json file print( f'{ansi_escapes.blue}The following services have been requested:{ansi_escapes.reset}' ) print(rqs) print( f'{ansi_escapes.blue}Computing all paths with constraints{ansi_escapes.reset}' ) try: pths = compute_path_dsjctn(network, equipment, rqs, dsjn) except exceptions.DisjunctionError as this_e: print( f'{ansi_escapes.red}Disjunction error:{ansi_escapes.reset} {this_e}' ) sys.exit(1) print( f'{ansi_escapes.blue}Propagating on selected path{ansi_escapes.reset}') propagatedpths, reversed_pths, reversed_propagatedpths = compute_path_with_disjunction( network, equipment, rqs, pths) # Note that deepcopy used in compute_path_with_disjunction returns # a list of nodes which are not belonging to network (they are copies of the node objects). # so there can not be propagation on these nodes. pth_assign_spectrum(pths, rqs, oms_list, reversed_pths) print(f'{ansi_escapes.blue}Result summary{ansi_escapes.reset}') header = [ 'req id', ' demand', ' snr@bandwidth A-Z (Z-A)', ' [email protected] A-Z (Z-A)', ' Receiver minOSNR', ' mode', ' Gbit/s', ' nb of tsp pairs', 'N,M or blocking reason' ] data = [] data.append(header) for i, this_p in enumerate(propagatedpths): rev_pth = reversed_propagatedpths[i] if rev_pth and this_p: psnrb = f'{round(mean(this_p[-1].snr),2)} ({round(mean(rev_pth[-1].snr),2)})' psnr = f'{round(mean(this_p[-1].snr_01nm), 2)}' +\ f' ({round(mean(rev_pth[-1].snr_01nm),2)})' elif this_p: psnrb = f'{round(mean(this_p[-1].snr),2)}' psnr = f'{round(mean(this_p[-1].snr_01nm),2)}' try: if rqs[i].blocking_reason in BLOCKING_NOPATH: line = [ f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} :', f'-', f'-', f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}', f'-', f'{rqs[i].blocking_reason}' ] else: line = [ f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb, psnr, f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}', f'-', f'{rqs[i].blocking_reason}' ] except AttributeError: line = [ f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb, psnr, f'{rqs[i].OSNR + equipment["SI"]["default"].sys_margins}', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}', f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }', f'({rqs[i].N},{rqs[i].M})' ] data.append(line) col_width = max(len(word) for row in data for word in row[2:]) # padding firstcol_width = max(len(row[0]) for row in data) # padding secondcol_width = max(len(row[1]) for row in data) # padding for row in data: firstcol = ''.join(row[0].ljust(firstcol_width)) secondcol = ''.join(row[1].ljust(secondcol_width)) remainingcols = ''.join( word.center(col_width, ' ') for word in row[2:]) print(f'{firstcol} {secondcol} {remainingcols}') print( f'{ansi_escapes.yellow}Result summary shows mean SNR and OSNR (average over all channels){ansi_escapes.reset}' ) if args.output: result = [] # assumes that list of rqs and list of propgatedpths have same order for i, pth in enumerate(propagatedpths): result.append( ResultElement(rqs[i], pth, reversed_propagatedpths[i])) temp = _path_result_json(result) if args.output.suffix.lower() == '.json': save_json(temp, args.output) print( f'{ansi_escapes.blue}Saved JSON to {args.output}{ansi_escapes.reset}' ) elif args.output.suffix.lower() == '.csv': with open(args.output, "w", encoding='utf-8') as fcsv: jsontocsv(temp, equipment, fcsv) print( f'{ansi_escapes.blue}Saved CSV to {args.output}{ansi_escapes.reset}' ) else: print( f'{ansi_escapes.red}Cannot save output: neither JSON nor CSV file{ansi_escapes.reset}' ) sys.exit(1)