def processLogDirectory(logDirRoot, resultCSVPath): '''This methos is used to iterate a given log file directory to process each LISP trace log and write the result to a CSV file''' #Do not forget to process uniquely log files... import csv with open(resultCSVPath, 'wb') as csvfile: spamwriter = csv.writer(csvfile, dialect='excel', delimiter=';') spamwriter.writerow([ 'Log File Name', 'Locator Count Coherence', 'Round Type Set', 'Different Locator Count', 'Different Locator', 'Locator count flap' ]) for log_file in os.listdir(logDirRoot): #Do not forget to verify that the current file to be processed is a real log file #Otherwise this program may be collapsed. if log_file.endswith(".log"): file_path = os.path.join(logDirRoot, log_file) R = RoundInstanceFactory(file_path) csv_row = [file_path] csv_row.append(R.RLOCSetCoherent) csv_row.append(R.round_type_list) csv_row.append(R.getLocatorCountSet()) csv_row.append(R.getLocatorSet()) # csv_row.append(R.isLocatorCountFlap()) # csv_row.append(R.isLocatorsFlap()) # Output case csv_row.append(R.jugeLogCase()) csv_row.extend(R.locator_addr_list) spamwriter.writerow(csv_row)
def generateMultiCSVRows(logDirRoot): res_str = [] res_str.append( ['Log File Name', 'Locator Count Coherence', 'Round Type Set']) for lists in os.listdir(logDirRoot): if lists.endswith(".log"): file_path = os.path.join(logDirRoot, lists) R = RoundInstanceFactory(file_path) csv_row = [file_path] csv_row.extend(R.basicCheck()) res_str.append(csv_row) return res_str
def worker(vantage, log_file, q): '''stupidly simulates long running process''' R = RoundInstanceFactory(log_file) #csv_row = [arg, R.isRLOCSetCoherent(), R.getRoundTypeSet()] csv_row = [vantage, log_file, R.EID, R.resolver] csv_row.append(",".join( R.MAPPING_ENTRY)) # Yue added MAPPING_ENTRY into CSV files csv_row.append(R.coherent) csv_row.append(R.RLOCSetCoherent) # print 'Locator Count Consistence' csv_row.append(R.TECoherent) # print 'TE coherent' csv_row.append(R.case) # Add judge logfile case csv_row.append(",".join(R.round_type_list)) # print 'Round Type List' # Here add 2 rows: locator_count_list and locator_list csv_row.append(len( R.locator_count_list)) # print 'Different Locators Count' csv_row.append(",".join( R.locator_count_list)) # print 'Locators Count Set' csv_row.append(len(R.locator_list)) # print 'Different locators' csv_row.append(R.getLocatorSet()) # print 'Locators set' #Here add 2 rows: locator_count_list and locator_list #csv_row.append(R.isLocatorCountFlap()) #csv_row.append(R.isLocatorsFlap()) # 显示Case1,3,4的具体变化情况 # Case1的具体变化情况 nd_number, change_time, pattern = R.statistics_new_deployment() csv_row.extend(R.statistics_new_deployment()) # Case3 & Case4 的具体变化情况 nd_number, change_time, pattern = R.statistics_new_deployment() csv_row.extend(R.statistics_Case3_Case4()) # 这列忘了当时是在干嘛,就先注销了 # csv_row.append(sys.getsizeof(R)) # 每列只显示一个RLOC csv_row.extend(R.locator_addr_list) q.put(csv_row)
import argparse parser = argparse.ArgumentParser( description= 'This utility is devoted to convert an input LISP trace log file into a formatted CSV file.' ) parser.add_argument('log', action="store", help='indicate the LISP trace log file path.') parser.add_argument( '--dst', action="store", dest='destination', default="log/", help='indicate which directory stores the generated files.') args = parser.parse_args() print '**************************The summary of provided arguments for this utility shown as following:**********************' print 'logfile =', args.log print 'output generated csv directory =', args.destination default_csv_name = os.path.basename(args.log) + '.csv' # The following variable stores the full path for generated CSV file. csv_file_path = os.path.join(args.destination, default_csv_name) print 'generated csv file\'s full path =', csv_file_path RoundInstanceFactory(args.log).write2csv(csv_file_path) # 测是函数
def worker(csv_traces_dir, log_file_full_path): r = RoundInstanceFactory(log_file_full_path) csv_file_path = csv_traces_dir + "/" + os.path.basename( log_file_full_path) + ".csv" r.write2csv(csv_file_path)