Beispiel #1
0
def test_json_response_generation(xls_input, expected_response_file):
    """ tests if json response is correctly generated for all combinations of requests
    """
    data = convert_service_sheet(xls_input, eqpt_filename)
    equipment = load_equipment(eqpt_filename)
    network = load_network(xls_input, equipment)
    p_db = equipment['SI']['default'].power_dbm

    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)
    rqs = requests_from_json(data, equipment)
    rqs = correct_route_list(network, rqs)
    dsjn = disjunctions_from_json(data)
    dsjn = correct_disjn(dsjn)
    rqs, dsjn = requests_aggregation(rqs, dsjn)
    pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
    propagatedpths = compute_path_with_disjunction(network, equipment, rqs, pths)
    result = []
    for i, pth in enumerate(propagatedpths):
        result.append(Result_element(rqs[i], pth))
    temp = {
        'response': [n.json for n in result]
    }
    # load expected result and compare keys
    # (not values at this stage)
    with open(expected_response_file) as jsonfile:
        expected = load(jsonfile)

    for i, response in enumerate(temp['response']):
        assert compare_response(expected['response'][i], response)
Beispiel #2
0
def test_auto_design_generation_fromxlsgainmode(xls_input,
                                                expected_json_output):
    equipment = load_equipment(eqpt_filename)
    network = load_network(xls_input, equipment)
    # in order to test the Eqpt sheet and load gain target, change the power-mode to False (to be in gain mode)
    equipment['Span']['default'].power_mode = False
    # Build the network once using the default power defined in SI in eqpt config

    p_db = equipment['SI']['default'].power_dbm

    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)
    save_network(xls_input, network)

    actual_json_output = f'{str(xls_input)[0:len(str(xls_input))-4]}_auto_design.json'

    with open(actual_json_output, encoding='utf-8') as f:
        actual = load(f)
    unlink(actual_json_output)

    with open(expected_json_output, encoding='utf-8') as f:
        expected = load(f)

    results = compare_networks(expected, actual)
    assert not results.elements.missing
    assert not results.elements.extra
    assert not results.elements.different
    assert not results.connections.missing
    assert not results.connections.extra
    assert not results.connections.different
Beispiel #3
0
def propagation(input_power, con_in, con_out,dest):
    equipment = load_equipment(eqpt_library_name)
    network = load_network(network_file_name,equipment)
    build_network(network, equipment, 0, 20)

    # parametrize the network elements with the con losses and adapt gain
    # (assumes all spans are identical)
    for e in network.nodes():
        if isinstance(e, Fiber):
            loss = e.loss_coef * e.length
            e.con_in = con_in
            e.con_out = con_out
        if isinstance(e, Edfa):
            e.operational.gain_target = loss + con_in + con_out

    transceivers = {n.uid: n for n in network.nodes() if isinstance(n, Transceiver)}

    p = input_power
    p = db2lin(p) * 1e-3
    spacing = 50e9 # THz
    si = create_input_spectral_information(191.3e12, 191.3e12+79*spacing, 0.15, 32e9, p, spacing)
    source = next(transceivers[uid] for uid in transceivers if uid == 'trx A')
    sink = next(transceivers[uid] for uid in transceivers if uid == dest)
    path = dijkstra_path(network, source, sink)
    for el in path:
        si = el(si)
        print(el) # remove this line when sweeping across several powers
    edfa_sample = next(el for el in path if isinstance(el, Edfa))
    nf = mean(edfa_sample.nf)

    print(f'pw: {input_power} conn in: {con_in} con out: {con_out}',
          f'[email protected]: {round(mean(sink.osnr_ase_01nm),2)}',
          f'SNR@bandwitdth: {round(mean(sink.snr),2)}')
    return sink , nf
Beispiel #4
0
def test_disjunction(net,eqpt,serv):
    data = load_requests(serv, eqpt, bidir=False)
    equipment = load_equipment(eqpt)
    network = load_network(net,equipment)
    # Build the network once using the default power defined in SI in eqpt config
    # power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
    # spacing, f_min and f_max 
    p_db = equipment['SI']['default'].power_dbm
    
    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)
    build_oms_list(network, equipment)

    rqs = requests_from_json(data, equipment)
    rqs = correct_route_list(network, rqs)
    dsjn = disjunctions_from_json(data)
    pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
    print(dsjn)

    dsjn_list = [d.disjunctions_req for d in dsjn ]

    # assumes only pairs in dsjn list
    test = True
    for e in dsjn_list:
        rqs_id_list = [r.request_id for r in rqs]
        p1 = pths[rqs_id_list.index(e[0])][1:-1]
        p2 = pths[rqs_id_list.index(e[1])][1:-1]
        if isdisjoint(p1, p2) + isdisjoint(p1, find_reversed_path(p2)) > 0:
            test = False
            print(f'Computed path (roadms):{[e.uid for e in p1  if isinstance(e, Roadm)]}\n')
            print(f'Computed path (roadms):{[e.uid for e in p2  if isinstance(e, Roadm)]}\n')
            break
    print(dsjn_list)
    assert test
Beispiel #5
0
def setup_trx():
    """init transceiver class to access snr and osnr calculations"""
    equipment = load_equipment(eqpt_library)
    network = load_network(test_network, equipment)
    build_network(network, equipment, 0, 20)
    trx = [n for n in network.nodes() if isinstance(n, Transceiver)][0]
    return trx
Beispiel #6
0
def setup_edfa_fixed_gain():
    """init edfa class by reading the 2nd edfa in test_network.json file"""
    equipment = load_equipment(eqpt_library)
    network = load_network(test_network, equipment)
    build_network(network, equipment, 0, 20)
    edfa = [n for n in network.nodes() if isinstance(n, Edfa)][1]
    yield edfa
def test_does_not_loop_back(net, eqpt, serv):
    data = load_requests(serv, eqpt)
    equipment = load_equipment(eqpt)
    network = load_network(net, equipment)

    # Build the network once using the default power defined in SI in eqpt config
    # power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
    # spacing, f_min and f_max
    p_db = equipment['SI']['default'].power_dbm

    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)

    rqs = requests_from_json(data, equipment)
    rqs = correct_route_list(network, rqs)
    dsjn = disjunctions_from_json(data)
    pths = compute_path_dsjctn(network, equipment, rqs, dsjn)

    # check that computed paths do not loop back ie each element appears only once
    test = True
    for p in pths:
        for el in p:
            p.remove(el)
            a = [e for e in p if e.uid == el.uid]
            if a:
                test = False
                break

    assert test
Beispiel #8
0
def test_csv_response_generation(json_input, csv_output):
    """ tests if generated csv is consistant with expected generation
        same columns (order not important)
    """
    with open(json_input) as jsonfile:
        json_data = load(jsonfile)
    equipment = load_equipment(eqpt_filename)
    csv_filename = str(csv_output)+'.csv'
    with open(csv_filename, 'w', encoding='utf-8') as fcsv:
        jsontocsv(json_data, equipment, fcsv)

    expected_csv_filename = str(csv_output)+'_expected.csv'

    # expected header
    # csv_header = \
    # [
    #  'response-id',
    #  'source',
    #  'destination',
    #  'path_bandwidth',
    #  'Pass?',
    #  'nb of tsp pairs',
    #  'total cost',
    #  'transponder-type',
    #  'transponder-mode',
    #  'OSNR-0.1nm',
    #  'SNR-0.1nm',
    #  'SNR-bandwidth',
    #  'baud rate (Gbaud)',
    #  'input power (dBm)',
    #  'path'
    # ]

    resp = read_csv(csv_filename)
    unlink(csv_filename)
    expected_resp = read_csv(expected_csv_filename)
    resp_header = list(resp.head(0))
    expected_resp_header = list(expected_resp.head(0))
    # check that headers are the same
    resp_header.sort()
    expected_resp_header.sort()
    print('headers are differents')
    print(resp_header)
    print(expected_resp_header)
    assert resp_header == expected_resp_header

    # for each header checks that the output are as expected
    resp.sort_values(by=['response-id'])
    expected_resp.sort_values(by=['response-id'])

    for column in expected_resp:
        assert list(resp[column].fillna('')) == list(expected_resp[column].fillna(''))
        print('results are different')
        print(list(resp[column]))
        print(list(expected_resp[column]))
        print(type(list(resp[column])[-1]))
Beispiel #9
0
def setup_edfa_variable_gain():
    """init edfa class by reading test_network.json file
    remove all gain and nf ripple"""
    equipment = load_equipment(eqpt_library)
    network = load_network(test_network, equipment)
    build_network(network, equipment,0, 20)
    edfa = [n for n in network.nodes() if isinstance(n, Edfa)][0]
    edfa.gain_ripple = zeros(96)
    edfa.interpol_nf_ripple = zeros(96)
    yield edfa
def test_automaticmodefeature(net, eqpt, serv, expected_mode):
    data = load_requests(serv, eqpt, bidir=False)
    equipment = load_equipment(eqpt)
    network = load_network(net, equipment)

    # Build the network once using the default power defined in SI in eqpt config
    # power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
    # spacing, f_min and f_max
    p_db = equipment['SI']['default'].power_dbm

    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)

    rqs = requests_from_json(data, equipment)
    rqs = correct_route_list(network, rqs)
    dsjn = []
    pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
    path_res_list = []

    for i, pathreq in enumerate(rqs):

        # use the power specified in requests but might be different from the one specified for design
        # the power is an optional parameter for requests definition
        # if optional, use the one defines in eqt_config.json
        p_db = lin2db(pathreq.power * 1e3)
        p_total_db = p_db + lin2db(pathreq.nb_channel)
        print(f'request {pathreq.request_id}')
        print(f'Computing path from {pathreq.source} to {pathreq.destination}')
        print(f'with path constraint: {[pathreq.source]+pathreq.nodes_list}'
              )  #adding first node to be clearer on the output

        total_path = pths[i]
        print(
            f'Computed path (roadms):{[e.uid for e in total_path  if isinstance(e, Roadm)]}\n'
        )
        # for debug
        # print(f'{pathreq.baud_rate}   {pathreq.power}   {pathreq.spacing}   {pathreq.nb_channel}')
        if pathreq.baud_rate is not None:
            print(pathreq.format)
            path_res_list.append(pathreq.format)
            total_path = propagate(total_path, pathreq, equipment)
        else:
            total_path, mode = propagate_and_optimize_mode(
                total_path, pathreq, equipment)
            # if no baudrate satisfies spacing, no mode is returned and an empty path is returned
            # a warning is shown in the propagate_and_optimize_mode
            if mode is not None:
                print(mode['format'])
                path_res_list.append(mode['format'])
            else:
                print('nok')
                path_res_list.append('nok')
    print(path_res_list)
    assert path_res_list == expected_mode
Beispiel #11
0
def equipment():
    """init transceiver class to access snr and osnr calculations"""
    equipment = load_equipment(EQPT_LIBRARY_NAME)
    # define some booster and preamps
    restrictions_list = [{
        'type_variety': 'booster_medium_gain',
        'type_def': 'variable_gain',
        'gain_flatmax': 25,
        'gain_min': 15,
        'p_max': 21,
        'nf_min': 5.8,
        'nf_max': 10,
        'out_voa_auto': False,
        'allowed_for_design': False
    }, {
        'type_variety': 'preamp_medium_gain',
        'type_def': 'variable_gain',
        'gain_flatmax': 26,
        'gain_min': 15,
        'p_max': 23,
        'nf_min': 6,
        'nf_max': 10,
        'out_voa_auto': False,
        'allowed_for_design': False
    }, {
        'type_variety': 'preamp_high_gain',
        'type_def': 'variable_gain',
        'gain_flatmax': 35,
        'gain_min': 25,
        'p_max': 21,
        'nf_min': 5.5,
        'nf_max': 7,
        'out_voa_auto': False,
        'allowed_for_design': False
    }, {
        'type_variety': 'preamp_low_gain',
        'type_def': 'variable_gain',
        'gain_flatmax': 16,
        'gain_min': 8,
        'p_max': 23,
        'nf_min': 6.5,
        'nf_max': 11,
        'out_voa_auto': False,
        'allowed_for_design': False
    }]
    # add them to the library
    for entry in restrictions_list:
        equipment['Edfa'][entry['type_variety']] = Amp.from_json(
            EQPT_LIBRARY_NAME, **entry)
    return equipment
Beispiel #12
0
def test_no_amp_feature(node_uid):
    ''' Check that booster is not placed on a roadm if fused is specified
        test_parser covers partly this behaviour. This test should guaranty that the
        feature is preserved even if convert is changed
    '''
    equipment = load_equipment(EQPT_LIBRARY_NAME)
    json_network = load_json(NETWORK_FILE_NAME)

    for elem in json_network['elements']:
        if elem['uid'] == node_uid:
            #replace edfa node by a fused node in the topology
            elem['type'] = 'Fused'
            elem.pop('type_variety')
            elem.pop('operational')
            elem['params'] = {'loss': 0}

            next_node_uid = next(conn['to_node'] for conn in json_network['connections'] \
                                 if conn['from_node'] == node_uid)
            previous_node_uid = next(conn['from_node'] for conn in json_network['connections'] \
                                 if conn['to_node'] == node_uid)

    network = network_from_json(json_network, equipment)
    # Build the network once using the default power defined in SI in eqpt config
    # power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
    # spacing, f_min and f_max
    p_db = equipment['SI']['default'].power_dbm
    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))

    build_network(network, equipment, p_db, p_total_db)

    node = next(nd for nd in network.nodes() if nd.uid == node_uid)
    next_node = next(network.successors(node))
    previous_node = next(network.predecessors(node))

    if not isinstance(node, Fused):
        raise AssertionError()
    if not node.params.loss == 0.0:
        raise AssertionError()
    if not next_node_uid == next_node.uid:
        raise AssertionError()
    if not previous_node_uid == previous_node.uid:
        raise AssertionError()
Beispiel #13
0
def main(args):
    """ main function that calls all functions
    """
    LOGGER.info(
        f'Computing path requests {args.service_filename} into JSON format')
    print('\x1b[1;34;40m' +\
          f'Computing path requests {args.service_filename} into JSON format'+ '\x1b[0m')
    # for debug
    # print( args.eqpt_filename)

    try:
        data = load_requests(args.service_filename, args.eqpt_filename,
                             args.bidir)
        equipment = load_equipment(args.eqpt_filename)
        network = load_network(args.network_filename, equipment)
    except EquipmentConfigError as this_e:
        print(
            f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {this_e}'
        )
        exit(1)
    except NetworkTopologyError as this_e:
        print(
            f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {this_e}'
        )
        exit(1)
    except ConfigurationError as this_e:
        print(
            f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {this_e}'
        )
        exit(1)
    except ServiceError as this_e:
        print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
        exit(1)
    # input_str = raw_input("How will you use your program: c:[cli] , a:[api] ?")
    # print(input_str)
    #
    if ((args.rest == 1) and (args.output is None)):
        print('you have chosen the rest mode')
        APP.run(host='0.0.0.0', port=5000, debug=True)
    elif ((args.rest > 1) or ((args.rest == 1) and (args.output is not None))):
        print('command is not well formulated')
    else:
        launch_cli(network, data, equipment)
Beispiel #14
0
    def post(self):
        data = request.get_json()
        equipment = load_equipment('examples/2019-demo-equipment.json')
        topo_json = load_json('examples/2019-demo-topology.json')
        network = network_from_json(topo_json, equipment)
        try:
            propagatedpths, reversed_propagatedpths, rqs = compute_requests(
                network, data, equipment)
            # Generate the output
            result = []
            #assumes that list of rqs and list of propgatedpths have same order
            for i, pth in enumerate(propagatedpths):
                result.append(
                    Result_element(rqs[i], pth, reversed_propagatedpths[i]))

            return {"result": path_result_json(result)}, 201
        except ServiceError as this_e:
            msg = f'Service error: {this_e}'
            return {"result": msg}, 400
Beispiel #15
0
def main(args):
    """ main function that calls all functions
    """
    LOGGER.info(
        f'Computing path requests {args.service_filename} into JSON format')
    print('\x1b[1;34;40m' +\
          f'Computing path requests {args.service_filename} into JSON format'+ '\x1b[0m')
    # for debug
    # print( args.eqpt_filename)

    try:
        data = load_requests(args.service_filename, args.eqpt_filename,
                             args.bidir)
        equipment = load_equipment(args.eqpt_filename)
        network = load_network(args.network_filename, equipment)
    except EquipmentConfigError as this_e:
        print(
            f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {this_e}'
        )
        exit(1)
    except NetworkTopologyError as this_e:
        print(
            f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {this_e}'
        )
        exit(1)
    except ConfigurationError as this_e:
        print(
            f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {this_e}'
        )
        exit(1)
    except ServiceError as this_e:
        print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
        exit(1)

    # Build the network once using the default power defined in SI in eqpt config
    # TODO power density: db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
    # spacing, f_min and f_max
    p_db = equipment['SI']['default'].power_dbm

    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)
    save_network(args.network_filename, network)

    oms_list = build_oms_list(network, equipment)

    try:
        rqs = requests_from_json(data, equipment)
    except ServiceError as this_e:
        print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
        exit(1)
    # check that request ids are unique. Non unique ids, may
    # mess the computation: better to stop the computation
    all_ids = [r.request_id for r in rqs]
    if len(all_ids) != len(set(all_ids)):
        for item in list(set(all_ids)):
            all_ids.remove(item)
        msg = f'Requests id {all_ids} are not unique'
        LOGGER.critical(msg)
        exit()
    try:
        rqs = correct_route_list(network, rqs)
    except ServiceError as this_e:
        print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
        exit(1)
    # pths = compute_path(network, equipment, rqs)
    dsjn = disjunctions_from_json(data)

    print('\x1b[1;34;40m' + f'List of disjunctions' + '\x1b[0m')
    print(dsjn)
    # need to warn or correct in case of wrong disjunction form
    # disjunction must not be repeated with same or different ids
    dsjn = correct_disjn(dsjn)

    # Aggregate demands with same exact constraints
    print('\x1b[1;34;40m' + f'Aggregating similar requests' + '\x1b[0m')

    rqs, dsjn = requests_aggregation(rqs, dsjn)
    # TODO export novel set of aggregated demands in a json file

    print('\x1b[1;34;40m' + 'The following services have been requested:' +
          '\x1b[0m')
    print(rqs)

    print('\x1b[1;34;40m' + f'Computing all paths with constraints' +
          '\x1b[0m')
    try:
        pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
    except DisjunctionError as this_e:
        print(
            f'{ansi_escapes.red}Disjunction error:{ansi_escapes.reset} {this_e}'
        )
        exit(1)

    print('\x1b[1;34;40m' + f'Propagating on selected path' + '\x1b[0m')
    propagatedpths, reversed_pths, reversed_propagatedpths = \
        compute_path_with_disjunction(network, equipment, rqs, pths)
    # Note that deepcopy used in compute_path_with_disjunction returns
    # a list of nodes which are not belonging to network (they are copies of the node objects).
    # so there can not be propagation on these nodes.

    pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)

    print('\x1b[1;34;40m' + f'Result summary' + '\x1b[0m')
    header = ['req id', '  demand', '  snr@bandwidth A-Z (Z-A)', '  [email protected] A-Z (Z-A)',\
              '  Receiver minOSNR', '  mode', '  Gbit/s', '  nb of tsp pairs',\
              'N,M or blocking reason']
    data = []
    data.append(header)
    for i, this_p in enumerate(propagatedpths):
        rev_pth = reversed_propagatedpths[i]
        if rev_pth and this_p:
            psnrb = f'{round(mean(this_p[-1].snr),2)} ({round(mean(rev_pth[-1].snr),2)})'
            psnr = f'{round(mean(this_p[-1].snr_01nm), 2)}' +\
                   f' ({round(mean(rev_pth[-1].snr_01nm),2)})'
        elif this_p:
            psnrb = f'{round(mean(this_p[-1].snr),2)}'
            psnr = f'{round(mean(this_p[-1].snr_01nm),2)}'

        try:
            if rqs[i].blocking_reason in BLOCKING_NOPATH:
                line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} :',\
                        f'-', f'-', f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',\
                        f'-', f'{rqs[i].blocking_reason}']
            else:
                line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,\
                        psnr, f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',\
                        f'-', f'{rqs[i].blocking_reason}']
        except AttributeError:
            line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,\
                    psnr, f'{rqs[i].OSNR}', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',\
                    f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }', f'({rqs[i].N},{rqs[i].M})']
        data.append(line)

    col_width = max(len(word) for row in data for word in row[2:])  # padding
    firstcol_width = max(len(row[0]) for row in data)  # padding
    secondcol_width = max(len(row[1]) for row in data)  # padding
    for row in data:
        firstcol = ''.join(row[0].ljust(firstcol_width))
        secondcol = ''.join(row[1].ljust(secondcol_width))
        remainingcols = ''.join(
            word.center(col_width, ' ') for word in row[2:])
        print(f'{firstcol} {secondcol} {remainingcols}')
    print('\x1b[1;33;40m'+f'Result summary shows mean SNR and OSNR (average over all channels)' +\
          '\x1b[0m')

    if args.output:
        result = []
        # assumes that list of rqs and list of propgatedpths have same order
        for i, pth in enumerate(propagatedpths):
            result.append(
                Result_element(rqs[i], pth, reversed_propagatedpths[i]))
        temp = path_result_json(result)
        fnamecsv = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.csv'
        fnamejson = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.json'
        with open(fnamejson, 'w', encoding='utf-8') as fjson:
            fjson.write(
                dumps(path_result_json(result), indent=2, ensure_ascii=False))
            with open(fnamecsv, "w", encoding='utf-8') as fcsv:
                jsontocsv(temp, equipment, fcsv)
                print('\x1b[1;34;40m' +
                      f'saving in {args.output} and {fnamecsv}' + '\x1b[0m')
Beispiel #16
0
def test_json_response_generation(xls_input, expected_response_file):
    """ tests if json response is correctly generated for all combinations of requests
    """
    data = convert_service_sheet(xls_input, eqpt_filename)
    # change one of the request with bidir option to cover bidir case as well
    data['path-request'][2]['bidirectional'] = True

    equipment = load_equipment(eqpt_filename)
    network = load_network(xls_input, equipment)
    p_db = equipment['SI']['default'].power_dbm

    p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
        equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
    build_network(network, equipment, p_db, p_total_db)
    oms_list = build_oms_list(network, equipment)
    rqs = requests_from_json(data, equipment)
    rqs = correct_route_list(network, rqs)
    dsjn = disjunctions_from_json(data)
    dsjn = correct_disjn(dsjn)
    rqs, dsjn = requests_aggregation(rqs, dsjn)
    pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
    propagatedpths, reversed_pths, reversed_propagatedpths = \
        compute_path_with_disjunction(network, equipment, rqs, pths)
    pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)

    result = []
    for i, pth in enumerate(propagatedpths):
        # test ServiceError handling : when M is zero at this point, the
        # json result should not be created if there is no blocking reason
        if i == 1:
            my_rq = deepcopy(rqs[i])
            my_rq.M = 0
            with pytest.raises(ServiceError):
                Result_element(my_rq, pth, reversed_propagatedpths[i]).json

            my_rq.blocking_reason = 'NO_SPECTRUM'
            Result_element(my_rq, pth, reversed_propagatedpths[i]).json

        result.append(Result_element(rqs[i], pth, reversed_propagatedpths[i]))

    temp = {'response': [n.json for n in result]}
    # load expected result and compare keys and values

    with open(expected_response_file) as jsonfile:
        expected = load(jsonfile)
        # since we changes bidir attribute of request#2, need to add the corresponding
        # metric in response

    for i, response in enumerate(temp['response']):
        if i == 2:
            # compare response must be False because z-a metric is missing
            # (request with bidir option to cover bidir case)
            assert not compare_response(expected['response'][i], response)
            print(f'response {response["response-id"]} should not match')
            expected['response'][2]['path-properties']['z-a-path-metric'] = [{
                'metric-type':
                'SNR-bandwidth',
                'accumulative-value':
                22.809999999999999
            }, {
                'metric-type':
                'SNR-0.1nm',
                'accumulative-value':
                26.890000000000001
            }, {
                'metric-type':
                'OSNR-bandwidth',
                'accumulative-value':
                26.239999999999998
            }, {
                'metric-type':
                'OSNR-0.1nm',
                'accumulative-value':
                30.32
            }, {
                'metric-type':
                'reference_power',
                'accumulative-value':
                0.0012589254117941673
            }, {
                'metric-type':
                'path_bandwidth',
                'accumulative-value':
                60000000000.0
            }]
            # test should be OK now
        else:
            assert compare_response(expected['response'][i], response)
            print(f'response {response["response-id"]} is not correct')
Beispiel #17
0
Reads JSON path result file in accordance with the Yang model for requesting
path computation and writes results to a CSV file.

See: draft-ietf-teas-yang-path-computation-01.txt
"""

from argparse import ArgumentParser
from pathlib import Path
from json import loads
from gnpy.core.equipment  import load_equipment
from gnpy.core.request  import jsontocsv


parser = ArgumentParser(description = 'A function that writes json path results in an excel sheet.')
parser.add_argument('filename', nargs='?', type = Path)
parser.add_argument('output_filename', nargs='?', type = Path)
parser.add_argument('eqpt_filename', nargs='?', type = Path, default=Path(__file__).parent / 'eqpt_config.json')

if __name__ == '__main__':
    args = parser.parse_args()

    with open(args.output_filename, 'w', encoding='utf-8') as file:
        with open(args.filename, encoding='utf-8') as f:
            print(f'Reading {args.filename}')
            json_data = loads(f.read())
            equipment = load_equipment(args.eqpt_filename)
            print(f'Writing in {args.output_filename}')
            jsontocsv(json_data,equipment,file)

Beispiel #18
0
parser.add_argument('-v', '--verbose', action='count', default=0, help='increases verbosity for each occurence')
parser.add_argument('-l', '--list-nodes', action='store_true', help='list all transceiver nodes')
parser.add_argument('-po', '--power', default=0, help='channel ref power in dBm')
parser.add_argument('-names', '--names-matching', action='store_true', help='display network names that are closed matches')
parser.add_argument('filename', nargs='?', type=Path,
                    default=Path(__file__).parent / 'edfa_example_network.json')
parser.add_argument('source', nargs='?', help='source node')
parser.add_argument('destination',   nargs='?', help='destination node')


if __name__ == '__main__':
    args = parser.parse_args()
    basicConfig(level={0: ERROR, 1: INFO, 2: DEBUG}.get(args.verbose, DEBUG))

    try:
        equipment = load_equipment(args.equipment)
        network = load_network(args.filename, equipment, args.names_matching)
        sim_params = load_sim_params(args.sim_params) if args.sim_params is not None else None
    except EquipmentConfigError as e:
        print(f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {e}')
        exit(1)
    except NetworkTopologyError as e:
        print(f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {e}')
        exit(1)
    except ConfigurationError as e:
        print(f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {e}')
        exit(1)

    if args.plot:
        plot_baseline(network)
Beispiel #19
0
    def __init__(self, Request, eqpt_filename):
        # request_id is str
        # excel has automatic number formatting that adds .0 on integer values
        # the next lines recover the pure int value, assuming this .0 is unwanted
        if not isinstance(Request.request_id, str):
            value = str(int(Request.request_id))
            if value.endswith('.0'):
                value = value[:-2]
            self.request_id = value
        else:
            self.request_id = Request.request_id
        self.source = Request.source
        self.destination = Request.destination
        self.srctpid = f'trx {Request.source}'
        self.dsttpid = f'trx {Request.destination}'
        # test that trx_type belongs to eqpt_config.json
        # if not replace it with a default
        equipment = load_equipment(eqpt_filename)
        try:
            if equipment['Transceiver'][Request.trx_type]:
                self.trx_type = Request.trx_type
            if [
                    mode
                    for mode in equipment['Transceiver'][Request.trx_type].mode
            ]:
                self.mode = Request.mode
        except KeyError:
            msg = f'could not find tsp : {Request.trx_type} with mode: {Request.mode} in eqpt library \nComputation stopped.'
            #print(msg)
            logger.critical(msg)
            exit()
        # excel input are in GHz and dBm
        self.spacing = Request.spacing * 1e9
        self.power = db2lin(Request.power) * 1e-3
        self.nb_channel = int(Request.nb_channel)
        if not isinstance(Request.disjoint_from, str):
            value = str(int(Request.disjoint_from))
            if value.endswith('.0'):
                value = value[:-2]
        else:
            value = Request.disjoint_from
        self.disjoint_from = [n for n in value.split()]
        self.nodes_list = []
        if Request.nodes_list:
            self.nodes_list = Request.nodes_list.split(' | ')
        try:
            self.nodes_list.remove(self.source)
            msg = f'{self.source} removed from explicit path node-list'
            logger.info(msg)
            # print(msg)
        except ValueError:
            msg = f'{self.source} already removed from explicit path node-list'
            logger.info(msg)
            # print(msg)
        try:
            self.nodes_list.remove(self.destination)
            msg = f'{self.destination} removed from explicit path node-list'
            logger.info(msg)
            # print(msg)
        except ValueError:
            msg = f'{self.destination} already removed from explicit path node-list'
            logger.info(msg)
            # print(msg)

        self.loose = 'loose'
        if Request.is_loose == 'no':
            self.loose = 'strict'
    def __init__(self, Request, eqpt_filename):
        # request_id is str
        # excel has automatic number formatting that adds .0 on integer values
        # the next lines recover the pure int value, assuming this .0 is unwanted
        self.request_id = correct_xlrd_int_to_str_reading(Request.request_id)
        self.source = Request.source
        self.destination = Request.destination
        # TODO: the automatic naming generated by excel parser requires that source and dest name
        # be a string starting with 'trx' : this is manually added here.
        self.srctpid = f'trx {Request.source}'
        self.dsttpid = f'trx {Request.destination}'
        # test that trx_type belongs to eqpt_config.json
        # if not replace it with a default
        equipment = load_equipment(eqpt_filename)
        try:
            if equipment['Transceiver'][Request.trx_type]:
                self.trx_type = correct_xlrd_int_to_str_reading(
                    Request.trx_type)
            if Request.mode is not None:
                Requestmode = correct_xlrd_int_to_str_reading(Request.mode)
                if [
                        mode for mode in equipment['Transceiver']
                    [Request.trx_type].mode if mode['format'] == Requestmode
                ]:
                    self.mode = Requestmode
                else:
                    msg = f'Request Id: {self.request_id} - could not find tsp : \'{Request.trx_type}\' with mode: \'{Requestmode}\' in eqpt library \nComputation stopped.'
                    #print(msg)
                    logger.critical(msg)
                    exit(1)
            else:
                Requestmode = None
                self.mode = Request.mode
        except KeyError:
            msg = f'Request Id: {self.request_id} - could not find tsp : \'{Request.trx_type}\' with mode: \'{Requestmode}\' in eqpt library \nComputation stopped.'
            #print(msg)
            logger.critical(msg)
            exit()
        # excel input are in GHz and dBm
        if Request.spacing is not None:
            self.spacing = Request.spacing * 1e9
        else:
            msg = f'Request {self.request_id} missing spacing: spacing is mandatory.\ncomputation stopped'
            logger.critical(msg)
            exit()
        if Request.power is not None:
            self.power = db2lin(Request.power) * 1e-3
        else:
            self.power = None
        if Request.nb_channel is not None:
            self.nb_channel = int(Request.nb_channel)
        else:
            self.nb_channel = None

        value = correct_xlrd_int_to_str_reading(Request.disjoint_from)
        self.disjoint_from = [n for n in value.split(' | ') if value]
        self.nodes_list = []
        if Request.nodes_list:
            self.nodes_list = Request.nodes_list.split(' | ')

        # cleaning the list of nodes to remove source and destination
        # (because the remaining of the program assumes that the nodes list are nodes
        # on the path and should not include source and destination)
        try:
            self.nodes_list.remove(self.source)
            msg = f'{self.source} removed from explicit path node-list'
            logger.info(msg)
        except ValueError:
            msg = f'{self.source} already removed from explicit path node-list'
            logger.info(msg)

        try:
            self.nodes_list.remove(self.destination)
            msg = f'{self.destination} removed from explicit path node-list'
            logger.info(msg)
        except ValueError:
            msg = f'{self.destination} already removed from explicit path node-list'
            logger.info(msg)

        # the excel parser applies the same hop-type to all nodes in the route nodes_list.
        # user can change this per node in the generated json
        self.loose = 'loose'
        if Request.is_loose == 'no':
            self.loose = 'strict'
        self.path_bandwidth = None
        if Request.path_bandwidth is not None:
            self.path_bandwidth = Request.path_bandwidth * 1e9
        else:
            self.path_bandwidth = 0