Exemplo n.º 1
0
def get_field_test_ecl_info():
    # important cell info: msg1 rep, msg3 rep, threshold, ecl
    ddp = DeploymentDataProcessor('ls')
    rootpath = 'F:/field_test_output_copy/decoded_pkl_1/'
    flist = []
    for root, dirs, files in os.walk(rootpath):
        if len(files) != 0:
            for f in files:
                flist.append(root + '/' + f)
    print(len(flist))
    df = pd.DataFrame(columns=[
        'node_id', 'test_id', 'pack_id', 'ra_id', 'ecl', 'selected_by',
        'threshold'
    ])
    i = 0
    for f in flist:
        fn = f.split('/')[-1].split('.')[0].split('_')
        node_id, test_id, pack_id, ra_id = fn[2], fn[4], fn[5], 1

        msg_list = ddp.load_pickle_file(f)
        for m in msg_list:
            if m[3] == 'LL1_LOG_ECL_INFO':
                if i != 0 and (node_id, pack_id,
                               test_id) == (df.iloc[i - 1]['node_id'],
                                            df.iloc[i - 1]['pack_id'],
                                            df.iloc[i - 1]['test_id']):
                    ra_id += 1
                ecl = m[7]['current_ecl']
                ecl_by = m[7]['ecl_selected_by'].split('(')[0]
                threshold = m[7]['threshold']
                df.loc[i] = [
                    node_id, test_id, pack_id, ra_id, ecl, ecl_by, threshold
                ]
                i += 1
    df.to_csv('D:/temp/field_test_ecl.csv', index=False)
Exemplo n.º 2
0
def get_msg3_rtx_field_test():
    ddp = DeploymentDataProcessor('ls')
    rootpath = 'F:/field_test_output_copy/decoded_pkl_1/'
    flist = []
    for root, dirs, files in os.walk(rootpath):
        if len(files) != 0:
            for f in files:
                flist.append(root + '/' + f)
    print(len(flist))
    df = pd.DataFrame(columns=[
        'node_id', 'test_id', 'pack_id', 'msg1_tx', 'msg2_rx', 'msg4_rx'
    ])
    i = 0
    for f in flist:
        fn = f.split('/')[-1].split('.')[0].split('_')
        node_id, test_id, pack_id = fn[2], fn[4], fn[5]
        count_msg1 = 0
        count_msg2 = 0
        count_msg4 = 0
        msg_list = ddp.load_pickle_file(f)
        for m in msg_list:
            if m[3] == 'LL1_RAR_UL_GRANT':
                count_msg2 += 1
            elif m[3] == 'LL1_NPRACH_START_TIME':
                count_msg1 += 1
            elif m[3] == 'LL1_RACH_CONTENTION_RESOLUTION_SUCCESS_IND':
                count_msg4 += 1
        df.loc[i] = [
            node_id, test_id, pack_id, count_msg1, count_msg2, count_msg4
        ]
        i += 1
    df.to_csv('D:/temp/field_test_msg3.csv', index=False)
Exemplo n.º 3
0
def stage_3(flag):
    print('<Stage 3> Aggregate Extracted Info')
    ddp = DeploymentDataProcessor(whoami)
    if flag == 'ecl':
        csv_dir = ddp.rse_csv_output_dir
    elif flag == 'rar':
        csv_dir = ddp.rar_csv_dir
    elif flag == 'dci':
        csv_dir = ddp.dci_csv_dir
    elif flag == 'nrs':
        csv_dir = ddp.rse_csv_output_dir
    ddp.aggregate_extracted_info_to_csv(flag, csv_dir)
Exemplo n.º 4
0
def get_msg3_rtx_1():
    # rep = 8
    # rep = 16
    # dbg_log = 'D:/Nutstore/我的坚果云/Field Test/Amarisoft_SDR_Optimization/Msg3_Rep/DBG_Log/'
    # get_pkl(dbg_log+'Msg3_Rep='+str(rep)+'/')
    # rep = 32
    # dbg_log = 'D:/Nutstore/我的坚果云/Field Test/Amarisoft_SDR_Optimization/Inactivetimer/DBG/Timer=20s/'
    # get_pkl(dbg_log)

    ddp = DeploymentDataProcessor('ls')
    rootpath = 'D:/Nutstore/我的坚果云/Field Test/Deployment Experiment Output/decoded_pkl/'
    flist = []
    for root, dirs, files in os.walk(rootpath):
        if len(files) != 0:
            for f in files:
                flist.append(root + '/' + f)
    df = pd.DataFrame(columns=[
        'node_id', 'test_id', 'pack_id', 'msg3_rep', 'msg1_tx', 'msg2_rx',
        'msg4_rx'
    ])
    i = 0
    for f in flist:
        fn = f.split('/')[-1].split('.')[0].split('_')
        node_id, test_id, pack_id = fn[0], fn[2], fn[3]
        count_msg1 = 0
        count_msg2 = 0
        count_msg4 = 0
        msg_list = ddp.load_pickle_file(f)
        for m in msg_list:
            if m[3] == 'LL1_RAR_UL_GRANT':
                count_msg2 += 1
            elif m[3] == 'LL1_NPRACH_START_TIME':
                count_msg1 += 1
            elif m[3] == 'LL1_RACH_CONTENTION_RESOLUTION_SUCCESS_IND':
                count_msg4 += 1
        df.loc[i] = [
            node_id, test_id, pack_id, rep, count_msg1, count_msg2, count_msg4
        ]
        i += 1
    df.to_csv('D:/temp/rep=16.csv', index=False)
Exemplo n.º 5
0
def get_field_test_msg3_info():
    # important cell info: msg1 rep, msg3 rep, threshold, ecl
    ddp = DeploymentDataProcessor('ls')
    rootpath = 'F:/field_test_output_copy/decoded_pkl_1/'
    flist = []
    for root, dirs, files in os.walk(rootpath):
        if len(files) != 0:
            for f in files:
                flist.append(root + '/' + f)
    print(len(flist))
    df = pd.DataFrame(columns=['node_id', 'test_id', 'pack_id', 'ra_id'] + [
        'subcarrier_ind_nsc', 'pusch_start_subframe',
        'modulation_coding_scheme_tbs', 'repetition_number',
        'subcarrier_space', 'modulation_coding_scheme_imcs',
        'modulation_coding_scheme_itbs', 'modulation_coding_scheme_qm',
        'subcarrier_ind_isc', 'subcarrier_ind_len', 'resource_unit_number',
        'result', 'expected_rapid'
    ])
    i = 0
    for f in flist:
        fn = f.split('/')[-1].split('.')[0].split('_')
        node_id, test_id, pack_id, ra_id = fn[2], fn[4], fn[5], 1

        msg_list = ddp.load_pickle_file(f)
        for m in msg_list:
            if m[3] == 'LL1_RAR_UL_GRANT':
                if i != 0 and (node_id, pack_id,
                               test_id) == (df.iloc[i - 1]['node_id'],
                                            df.iloc[i - 1]['pack_id'],
                                            df.iloc[i - 1]['test_id']):
                    ra_id += 1

                df.loc[i] = [node_id, test_id, pack_id, ra_id] + list(
                    m[7]['rar_pdu'].values()) + [
                        m[7]['result'], m[7]['expected_rapid']
                    ]
                i += 1
    df.to_csv('D:/temp/field_test_msg3.csv', index=False)
Exemplo n.º 6
0
def get_field_test_msg1_info():
    # important cell info: msg1 rep, msg3 rep, threshold, ecl
    ddp = DeploymentDataProcessor('ls')
    rootpath = 'F:/field_test_output_copy/decoded_pkl_1/'
    flist = []
    for root, dirs, files in os.walk(rootpath):
        if len(files) != 0:
            for f in files:
                flist.append(root + '/' + f)
    print(len(flist))
    df = pd.DataFrame(columns=[
        'node_id', 'test_id', 'pack_id', 'ra_id', 'hfn1', 'sfn1', 'sf1',
        'hfn2', 'sfn2', 'sf2'
    ])
    i = 0
    for f in flist:
        fn = f.split('/')[-1].split('.')[0].split('_')
        node_id, test_id, pack_id, ra_id = fn[2], fn[4], fn[5], 1

        msg_list = ddp.load_pickle_file(f)
        for m in msg_list:
            if m[3] == 'LL1_NPRACH_START_TIME':
                if i != 0 and (node_id, pack_id,
                               test_id) == (df.iloc[i - 1]['node_id'],
                                            df.iloc[i - 1]['pack_id'],
                                            df.iloc[i - 1]['test_id']):
                    ra_id += 1
                hfn1, sfn1, sf1 = m[7]['hfn'], m[7]['sfn'], m[7]['sf']
            elif m[3] == 'LL1_NPRACH_END_TIME':
                hfn2, sfn2, sf2 = m[7]['hfn'], m[7]['sfn'], m[7]['sf']

                df.loc[i] = [
                    node_id, test_id, pack_id, ra_id, hfn1, sfn1, sf1, hfn2,
                    sfn2, sf2
                ]
                i += 1
    df.to_csv('D:/temp/field_test_msg1.csv', index=False)
Exemplo n.º 7
0
def get_ra_info():
    ddp = DeploymentDataProcessor('ls')
    rootpath = 'F:/field_test_output_copy/decoded_pkl_1/'
    flist = []
    for root, dirs, files in os.walk(rootpath):
        if len(files) != 0:
            for f in files:
                flist.append(root + '/' + f)
    print(len(flist))
    df = pd.DataFrame(columns=['node_id', 'test_id', 'pack_id', 'ra_id'] + [
        'ecl', 'selected_by', 'threshold', 'hfn1', 'sfn1', 'sf1', 'hfn2',
        'sfn2', 'sf2', 'subcarrier_ind_nsc', 'pusch_start_subframe',
        'modulation_coding_scheme_tbs', 'repetition_number',
        'subcarrier_space', 'modulation_coding_scheme_imcs',
        'modulation_coding_scheme_itbs', 'modulation_coding_scheme_qm',
        'subcarrier_ind_isc', 'subcarrier_ind_len', 'resource_unit_number',
        'result', 'expected_rapid'
    ])
    i = 0
    states = {
        'START': 0,
        'ECL': 1,
        'MSG1_START': 2,
        'MSG1_END': 3,
        'RAR': 4,
        'FINISHED': 5
    }
    ecl_list = 3 * [None]
    msg1_start_list = 3 * [None]
    msg1_end_list = 3 * [None]
    rar_list = 13 * [None]
    for j in range(0, len(flist)):
        f = flist[j]
        if j % 10 == 0:
            print('DEBUG: ', j, ' / ', len(flist))
        fn = f.split('/')[-1].split('.')[0].split('_')
        node_id, test_id, pack_id, ra_id = fn[2], fn[4], fn[5], 1
        msg_list = ddp.load_pickle_file(f)
        st = states['START']

        for m in msg_list:
            if st == states['START']:
                if m[3] == 'LL1_LOG_ECL_INFO':
                    ecl_list = [
                        m[7]['current_ecl'],
                        m[7]['ecl_selected_by'].split('(')[0],
                        m[7]['threshold']
                    ]
                    st = states['ECL']
                elif m[3] == 'LL1_NPRACH_START_TIME':
                    msg1_start_list = [m[7]['hfn'], m[7]['sfn'], m[7]['sf']]
                    st = states['MSG1_START']
                elif m[3] == 'LL1_NPRACH_END_TIME':
                    msg1_end_list = [m[7]['hfn'], m[7]['sfn'], m[7]['sf']]
                    st = states['MSG1_END']
                elif m[3] == 'LL1_RAR_UL_GRANT':
                    rar_list = list(m[7]['rar_pdu'].values()) + [
                        m[7]['result'], m[7]['expected_rapid']
                    ]
                    st = states['RAR']
            elif st == states['ECL']:
                if m[3] == 'LL1_NPRACH_START_TIME':
                    msg1_start_list = [m[7]['hfn'], m[7]['sfn'], m[7]['sf']]
                    st = states['MSG1_START']
            elif st == states['MSG1_START']:
                if m[3] == 'LL1_NPRACH_END_TIME':
                    msg1_end_list = [m[7]['hfn'], m[7]['sfn'], m[7]['sf']]
                    st = states['MSG1_END']
            elif st == states['MSG1_END']:
                if m[3] == 'LL1_RAR_UL_GRANT':
                    rar_list = list(m[7]['rar_pdu'].values()) + [
                        m[7]['result'], m[7]['expected_rapid']
                    ]
                    st = states['RAR']
            elif st == states['RAR']:
                if m[3] == 'LL1_LOG_ECL_INFO':
                    # last RA finished and start new RA
                    df.loc[i] = [
                        node_id, test_id, pack_id, ra_id
                    ] + ecl_list + msg1_start_list + msg1_end_list + rar_list
                    i += 1
                    ecl_list = 3 * [None]
                    msg1_start_list = 3 * [None]
                    msg1_end_list = 3 * [None]
                    rar_list = 13 * [None]

                    ra_id += 1
                    ecl_list = [
                        m[7]['current_ecl'],
                        m[7]['ecl_selected_by'].split('(')[0],
                        m[7]['threshold']
                    ]
                    st = states['ECL']
        df.loc[i] = [node_id, test_id, pack_id, ra_id
                     ] + ecl_list + msg1_start_list + msg1_end_list + rar_list
        i += 1
        ecl_list = 3 * [None]
        msg1_start_list = 3 * [None]
        msg1_end_list = 3 * [None]
        rar_list = 13 * [None]

    df.to_csv('D:/temp/field_test_ra_info.csv', index=False)
Exemplo n.º 8
0
def stage_4(flag):
    # merge the sum output data with meta csv
    print('<Stage 4> Merge with meta data to post_ft_processing')
    ddp = DeploymentDataProcessor(whoami)
    if flag == 1:
        ddp.merge_msg3_data_rep_with_meta()
Exemplo n.º 9
0
def stage_2(cmd, one_file_name):
    # Input from PKL, process and output the results.
    print('<Stage 2> Message list => Extracted info')
    ddp = DeploymentDataProcessor(whoami)
    if cmd == 'print':
        print(ddp.get_decoded_log_names())
    if cmd == 'one':
        ml = ddp.load_pickle_file(ddp.pkl_dir+one_file_name)
        print(ml[0])
        # ddp.extract_rsrp_snr_ecl(ddp.pkl_dir + one_file_name)
        # ddp.get_rsrp_snr_ecl_list(ml)
    if cmd == 'test':
        ml = ddp.load_pickle_file(ddp.pkl_dir+one_file_name)
        log_name = 'LL1_RAR_UL_GRANT'
        log_name = 'LL1_PUSCH_CALC_TX_POWER'
        log_name = 'LL1_LOG_ECL_INFO'
        log_name = 'EMM_PSM_STATUS_IND'
        ddp.take_out_specific_logs_from_msg_list(ml, log_name, 'visualize_seq_num/实验楼2F_消防栓/')

    if cmd == 'ecl':
        ddp.extract_rsrp_snr_ecl(ddp.pkl_dir+one_file_name)
    if cmd == 'stat':
        ddp.msg_list_stats(ddp.pkl_dir+one_file_name)
    if cmd == 'dci':
        ddp.extract_dci(ddp.pkl_dir + one_file_name)
    if cmd == 'rar':
        ddp.extract_msg3_repetition(ddp.pkl_dir + one_file_name)
    if cmd == 'all':
        '''
        Note1: remember to change the function in pipeline_process_all()
        Note2: select between the two file lists below. They are different.
        '''
        file_list = ddp.get_decoded_log_names()
        # file_list = ddp.get_rse_csv_names()
        # file_list = ddp.get_bler_report_csv_names()
        ddp.pipeline_process_all(file_list)