def test_csv_response_generation(json_input, csv_output): """ tests if generated csv is consistant with expected generation same columns (order not important) """ with open(json_input) as jsonfile: json_data = load(jsonfile) equipment = load_equipment(eqpt_filename) csv_filename = str(csv_output)+'.csv' with open(csv_filename, 'w', encoding='utf-8') as fcsv: jsontocsv(json_data, equipment, fcsv) expected_csv_filename = str(csv_output)+'_expected.csv' # expected header # csv_header = \ # [ # 'response-id', # 'source', # 'destination', # 'path_bandwidth', # 'Pass?', # 'nb of tsp pairs', # 'total cost', # 'transponder-type', # 'transponder-mode', # 'OSNR-0.1nm', # 'SNR-0.1nm', # 'SNR-bandwidth', # 'baud rate (Gbaud)', # 'input power (dBm)', # 'path' # ] resp = read_csv(csv_filename) unlink(csv_filename) expected_resp = read_csv(expected_csv_filename) resp_header = list(resp.head(0)) expected_resp_header = list(expected_resp.head(0)) # check that headers are the same resp_header.sort() expected_resp_header.sort() print('headers are differents') print(resp_header) print(expected_resp_header) assert resp_header == expected_resp_header # for each header checks that the output are as expected resp.sort_values(by=['response-id']) expected_resp.sort_values(by=['response-id']) for column in expected_resp: assert list(resp[column].fillna('')) == list(expected_resp[column].fillna('')) print('results are different') print(list(resp[column])) print(list(expected_resp[column])) print(type(list(resp[column])[-1]))
def launch_cli(network, data, equipment): """ Compute requests using network, data and equipment with client line interface """ propagatedpths, reversed_propagatedpths, rqs = compute_requests( network, data, equipment) #Generate the output if ARGS.output: result = [] # assumes that list of rqs and list of propgatedpths have same order for i, pth in enumerate(propagatedpths): result.append( Result_element(rqs[i], pth, reversed_propagatedpths[i])) temp = path_result_json(result) fnamecsv = f'{str(ARGS.output)[0:len(str(ARGS.output))-len(str(ARGS.output.suffix))]}.csv' fnamejson = f'{str(ARGS.output)[0:len(str(ARGS.output))-len(str(ARGS.output.suffix))]}.json' with open(fnamejson, 'w', encoding='utf-8') as fjson: fjson.write( dumps(path_result_json(result), indent=2, ensure_ascii=False)) with open(fnamecsv, "w", encoding='utf-8') as fcsv: jsontocsv(temp, equipment, fcsv) print('\x1b[1;34;40m' + f'saving in {ARGS.output} and {fnamecsv}' + '\x1b[0m')
Reads JSON path result file in accordance with the Yang model for requesting path computation and writes results to a CSV file. See: draft-ietf-teas-yang-path-computation-01.txt """ from argparse import ArgumentParser from pathlib import Path from json import loads from gnpy.core.equipment import load_equipment from gnpy.core.request import jsontocsv parser = ArgumentParser(description = 'A function that writes json path results in an excel sheet.') parser.add_argument('filename', nargs='?', type = Path) parser.add_argument('output_filename', nargs='?', type = Path) parser.add_argument('eqpt_filename', nargs='?', type = Path, default=Path(__file__).parent / 'eqpt_config.json') if __name__ == '__main__': args = parser.parse_args() with open(args.output_filename, 'w', encoding='utf-8') as file: with open(args.filename, encoding='utf-8') as f: print(f'Reading {args.filename}') json_data = loads(f.read()) equipment = load_equipment(args.eqpt_filename) print(f'Writing in {args.output_filename}') jsontocsv(json_data,equipment,file)
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', f'{round(mean(p[-1].snr),2)}',\ f'{round(mean(p[-1].snr+lin2db(rqs[i].baud_rate/(12.5e9))),2)}',\ f'{rqs[i].OSNR}', f'{rqs[i].tsp_mode}' , f'{round(rqs[i].path_bandwidth * 1e-9,2)}' , f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }'] else: line = [f'{rqs[i].request_id}',f' {rqs[i].source} to {rqs[i].destination} : not feasible '] data.append(line) col_width = max(len(word) for row in data for word in row[2:]) # padding firstcol_width = max(len(row[0]) for row in data ) # padding secondcol_width = max(len(row[1]) for row in data ) # padding for row in data: firstcol = ''.join(row[0].ljust(firstcol_width)) secondcol = ''.join(row[1].ljust(secondcol_width)) remainingcols = ''.join(word.center(col_width,' ') for word in row[2:]) print(f'{firstcol} {secondcol} {remainingcols}') if args.output : result = [] # assumes that list of rqs and list of propgatedpths have same order for i,p in enumerate(propagatedpths): result.append(Result_element(rqs[i],p)) temp = path_result_json(result) fnamecsv = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.csv' fnamejson = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.json' with open(fnamejson, 'w', encoding='utf-8') as f: f.write(dumps(path_result_json(result), indent=2, ensure_ascii=False)) with open(fnamecsv,"w", encoding='utf-8') as fcsv : jsontocsv(temp,equipment,fcsv) print('\x1b[1;34;40m'+f'saving in {args.output} and {fnamecsv}'+ '\x1b[0m')
def main(args): """ main function that calls all functions """ LOGGER.info( f'Computing path requests {args.service_filename} into JSON format') print('\x1b[1;34;40m' +\ f'Computing path requests {args.service_filename} into JSON format'+ '\x1b[0m') # for debug # print( args.eqpt_filename) try: data = load_requests(args.service_filename, args.eqpt_filename, args.bidir) equipment = load_equipment(args.eqpt_filename) network = load_network(args.network_filename, equipment) except EquipmentConfigError as this_e: print( f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {this_e}' ) exit(1) except NetworkTopologyError as this_e: print( f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {this_e}' ) exit(1) except ConfigurationError as this_e: print( f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {this_e}' ) exit(1) except ServiceError as this_e: print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}') exit(1) # Build the network once using the default power defined in SI in eqpt config # TODO power density: db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by # spacing, f_min and f_max p_db = equipment['SI']['default'].power_dbm p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\ equipment['SI']['default'].f_max, equipment['SI']['default'].spacing)) build_network(network, equipment, p_db, p_total_db) save_network(args.network_filename, network) oms_list = build_oms_list(network, equipment) try: rqs = requests_from_json(data, equipment) except ServiceError as this_e: print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}') exit(1) # check that request ids are unique. Non unique ids, may # mess the computation: better to stop the computation all_ids = [r.request_id for r in rqs] if len(all_ids) != len(set(all_ids)): for item in list(set(all_ids)): all_ids.remove(item) msg = f'Requests id {all_ids} are not unique' LOGGER.critical(msg) exit() try: rqs = correct_route_list(network, rqs) except ServiceError as this_e: print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}') exit(1) # pths = compute_path(network, equipment, rqs) dsjn = disjunctions_from_json(data) print('\x1b[1;34;40m' + f'List of disjunctions' + '\x1b[0m') print(dsjn) # need to warn or correct in case of wrong disjunction form # disjunction must not be repeated with same or different ids dsjn = correct_disjn(dsjn) # Aggregate demands with same exact constraints print('\x1b[1;34;40m' + f'Aggregating similar requests' + '\x1b[0m') rqs, dsjn = requests_aggregation(rqs, dsjn) # TODO export novel set of aggregated demands in a json file print('\x1b[1;34;40m' + 'The following services have been requested:' + '\x1b[0m') print(rqs) print('\x1b[1;34;40m' + f'Computing all paths with constraints' + '\x1b[0m') try: pths = compute_path_dsjctn(network, equipment, rqs, dsjn) except DisjunctionError as this_e: print( f'{ansi_escapes.red}Disjunction error:{ansi_escapes.reset} {this_e}' ) exit(1) print('\x1b[1;34;40m' + f'Propagating on selected path' + '\x1b[0m') propagatedpths, reversed_pths, reversed_propagatedpths = \ compute_path_with_disjunction(network, equipment, rqs, pths) # Note that deepcopy used in compute_path_with_disjunction returns # a list of nodes which are not belonging to network (they are copies of the node objects). # so there can not be propagation on these nodes. pth_assign_spectrum(pths, rqs, oms_list, reversed_pths) print('\x1b[1;34;40m' + f'Result summary' + '\x1b[0m') header = ['req id', ' demand', ' snr@bandwidth A-Z (Z-A)', ' [email protected] A-Z (Z-A)',\ ' Receiver minOSNR', ' mode', ' Gbit/s', ' nb of tsp pairs',\ 'N,M or blocking reason'] data = [] data.append(header) for i, this_p in enumerate(propagatedpths): rev_pth = reversed_propagatedpths[i] if rev_pth and this_p: psnrb = f'{round(mean(this_p[-1].snr),2)} ({round(mean(rev_pth[-1].snr),2)})' psnr = f'{round(mean(this_p[-1].snr_01nm), 2)}' +\ f' ({round(mean(rev_pth[-1].snr_01nm),2)})' elif this_p: psnrb = f'{round(mean(this_p[-1].snr),2)}' psnr = f'{round(mean(this_p[-1].snr_01nm),2)}' try: if rqs[i].blocking_reason in BLOCKING_NOPATH: line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} :',\ f'-', f'-', f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',\ f'-', f'{rqs[i].blocking_reason}'] else: line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,\ psnr, f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',\ f'-', f'{rqs[i].blocking_reason}'] except AttributeError: line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,\ psnr, f'{rqs[i].OSNR}', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',\ f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }', f'({rqs[i].N},{rqs[i].M})'] data.append(line) col_width = max(len(word) for row in data for word in row[2:]) # padding firstcol_width = max(len(row[0]) for row in data) # padding secondcol_width = max(len(row[1]) for row in data) # padding for row in data: firstcol = ''.join(row[0].ljust(firstcol_width)) secondcol = ''.join(row[1].ljust(secondcol_width)) remainingcols = ''.join( word.center(col_width, ' ') for word in row[2:]) print(f'{firstcol} {secondcol} {remainingcols}') print('\x1b[1;33;40m'+f'Result summary shows mean SNR and OSNR (average over all channels)' +\ '\x1b[0m') if args.output: result = [] # assumes that list of rqs and list of propgatedpths have same order for i, pth in enumerate(propagatedpths): result.append( Result_element(rqs[i], pth, reversed_propagatedpths[i])) temp = path_result_json(result) fnamecsv = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.csv' fnamejson = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.json' with open(fnamejson, 'w', encoding='utf-8') as fjson: fjson.write( dumps(path_result_json(result), indent=2, ensure_ascii=False)) with open(fnamecsv, "w", encoding='utf-8') as fcsv: jsontocsv(temp, equipment, fcsv) print('\x1b[1;34;40m' + f'saving in {args.output} and {fnamecsv}' + '\x1b[0m')
print(pths) test = compute_path(network, equipment, pths) #TODO write results header = ['demand', 'snr@bandwidth', '[email protected]', 'Receiver minOSNR'] data = [] data.append(header) for i, p in enumerate(test): if p: line = [f'{pths[i].source} to {pths[i].destination} : ', f'{round(mean(p[-1].snr),2)}',\ f'{round(mean(p[-1].snr+lin2db(pths[i].baud_rate/(12.5e9))),2)}',\ f'{pths[i].OSNR}'] else: line = [f'no path from {pths[i].source} to {pths[i].destination} '] data.append(line) col_width = max(len(word) for row in data for word in row) # padding for row in data: print(''.join(word.ljust(col_width) for word in row)) if args.output: result = [] for p in test: result.append(Result_element(pths[test.index(p)], p)) with open(args.output, 'w') as f: f.write(dumps(path_result_json(result), indent=2)) fnamecsv = next(s for s in args.output.split('.')) + '.csv' with open(fnamecsv, "w") as fcsv: jsontocsv(path_result_json(result), equipment, fcsv)