Ejemplo n.º 1
0
def processLogDirectory(logDirRoot, resultCSVPath):
    '''This methos is used to iterate a given log file directory to process each LISP trace log and write the result to a CSV file'''

    #Do not forget to process uniquely log files...
    import csv
    with open(resultCSVPath, 'wb') as csvfile:
        spamwriter = csv.writer(csvfile, dialect='excel', delimiter=';')
        spamwriter.writerow([
            'Log File Name', 'Locator Count Coherence', 'Round Type Set',
            'Different Locator Count', 'Different Locator',
            'Locator count flap'
        ])
        for log_file in os.listdir(logDirRoot):
            #Do not forget to verify that the current file to be processed is a real log file
            #Otherwise this program may be collapsed.
            if log_file.endswith(".log"):
                file_path = os.path.join(logDirRoot, log_file)
                R = RoundInstanceFactory(file_path)
                csv_row = [file_path]
                csv_row.append(R.RLOCSetCoherent)
                csv_row.append(R.round_type_list)
                csv_row.append(R.getLocatorCountSet())
                csv_row.append(R.getLocatorSet())
                # csv_row.append(R.isLocatorCountFlap())
                # csv_row.append(R.isLocatorsFlap())
                # Output case
                csv_row.append(R.jugeLogCase())
                csv_row.extend(R.locator_addr_list)
                spamwriter.writerow(csv_row)
Ejemplo n.º 2
0
def processLogDirectory(logDirRoot, resultCSVPath):
    '''This methos is used to iterate a given log file directory to process each LISP trace log and write the result to a CSV file'''
    
    #Do not forget to process uniquely log files...
    import csv
    with open(resultCSVPath, 'wb') as csvfile:
        spamwriter = csv.writer(csvfile, dialect='excel',delimiter=';')
        spamwriter.writerow(
            [
                'Log File Name',
                'Locator Count Coherence',
                'Round Type Set',
                'Different Locator Count',
                'Different Locator',
                'Locator count flap'
            ]
        )
        for log_file in os.listdir(logDirRoot):
            #Do not forget to verify that the current file to be processed is a real log file
            #Otherwise this program may be collapsed.
            if log_file.endswith(".log"):
                file_path = os.path.join(logDirRoot,log_file)
                R = RoundInstanceFactory(file_path)
                csv_row = [file_path]
                csv_row.append(R.RLOCSetCoherent)
                csv_row.append(R.round_type_list)
                csv_row.append(R.getLocatorCountSet())
                csv_row.append(R.getLocatorSet())
                # csv_row.append(R.isLocatorCountFlap())
                # csv_row.append(R.isLocatorsFlap())
                # Output case
                csv_row.append(R.jugeLogCase())
                csv_row.extend(R.locator_addr_list)
                spamwriter.writerow(csv_row)
Ejemplo n.º 3
0
def worker(vantage, log_file, q):
    '''stupidly simulates long running process'''
    R = RoundInstanceFactory(log_file)
    #csv_row = [arg, R.isRLOCSetCoherent(), R.getRoundTypeSet()]
    csv_row = [vantage, log_file, R.EID, R.resolver]
    csv_row.append(",".join(
        R.MAPPING_ENTRY))  # Yue added MAPPING_ENTRY into CSV files
    csv_row.append(R.coherent)
    csv_row.append(R.RLOCSetCoherent)  # print 'Locator Count Consistence'
    csv_row.append(R.TECoherent)  # print 'TE coherent'

    csv_row.append(R.case)  # Add judge logfile case

    csv_row.append(",".join(R.round_type_list))  # print 'Round Type List'

    # Here add 2 rows: locator_count_list and locator_list
    csv_row.append(len(
        R.locator_count_list))  # print 'Different Locators Count'
    csv_row.append(",".join(
        R.locator_count_list))  # print 'Locators Count Set'

    csv_row.append(len(R.locator_list))  # print 'Different locators'
    csv_row.append(R.getLocatorSet())  # print 'Locators set'

    #Here add 2 rows: locator_count_list and locator_list
    #csv_row.append(R.isLocatorCountFlap())
    #csv_row.append(R.isLocatorsFlap())

    # 显示Case1,3,4的具体变化情况
    # Case1的具体变化情况 nd_number, change_time, pattern = R.statistics_new_deployment()
    csv_row.extend(R.statistics_new_deployment())

    # Case3 & Case4 的具体变化情况 nd_number, change_time, pattern = R.statistics_new_deployment()
    csv_row.extend(R.statistics_Case3_Case4())

    # 这列忘了当时是在干嘛,就先注销了
    # csv_row.append(sys.getsizeof(R))

    # 每列只显示一个RLOC
    csv_row.extend(R.locator_addr_list)

    q.put(csv_row)
Ejemplo n.º 4
0
def worker(vantage, log_file, q):
    '''stupidly simulates long running process'''
    R = RoundInstanceFactory(log_file)
    #csv_row = [arg, R.isRLOCSetCoherent(), R.getRoundTypeSet()]
    csv_row = [vantage, log_file, R.EID, R.resolver]
    csv_row.append(",".join(R.MAPPING_ENTRY)) # Yue added MAPPING_ENTRY into CSV files
    csv_row.append(R.coherent)
    csv_row.append(R.RLOCSetCoherent)  # print 'Locator Count Consistence'
    csv_row.append(R.TECoherent)       # print 'TE coherent'

    csv_row.append(R.case)  # Add judge logfile case

    csv_row.append(",".join(R.round_type_list))   # print 'Round Type List'

    # Here add 2 rows: locator_count_list and locator_list
    csv_row.append(len(R.locator_count_list)) # print 'Different Locators Count'
    csv_row.append(",".join(R.locator_count_list)) # print 'Locators Count Set'

    csv_row.append(len(R.locator_list))    # print 'Different locators'
    csv_row.append(R.getLocatorSet())       # print 'Locators set'

    #Here add 2 rows: locator_count_list and locator_list
    #csv_row.append(R.isLocatorCountFlap())
    #csv_row.append(R.isLocatorsFlap())

    # 显示Case1,3,4的具体变化情况
    # Case1的具体变化情况 nd_number, change_time, pattern = R.statistics_new_deployment()
    csv_row.extend(R.statistics_new_deployment())

    # Case3 & Case4 的具体变化情况 nd_number, change_time, pattern = R.statistics_new_deployment()
    csv_row.extend(R.statistics_Case3_Case4())

    # 这列忘了当时是在干嘛,就先注销了
    # csv_row.append(sys.getsizeof(R))

    # 每列只显示一个RLOC
    csv_row.extend(R.locator_addr_list)

    q.put(csv_row)