def main(): args = sys.argv[1:] # Проверка количества аргументов if len(args) < 5 or (len(args) & 1) != 1: raise ValueError(f'Invalid number of arguments: {len(args)}') f_in = args[0] f_out = args[1] prof = args[2].upper() sections = [] for i in range(3, len(args), 2): try: start = float(args[i]) end = float(args[i + 1]) except ValueError: raise ValueError(f'Incorrect input: [{start}, {end}]') # Проверка корректности интервала if start > end or start < 0: raise ValueError(f'Incorrect section: [{start}, {end}]') # Проверка дублирования интервала sec = Section(start, end) if not sec in sections: sections.append(sec) # Для .zip файлов if f_in.endswith('.zip'): with zipfile.ZipFile(f_in, 'r') as zp: f_in = f_in[:-4] + '.csv' if f_in in zp.namelist(): zp.extract(f_in) else: raise ValueError(f'File {f_in} was not found in archive') if prof == 'CPU': cpu = CPU(f_in, sections) cpu.filter() top_processes = cpu.get_top_processes() top_modules = cpu.get_top_modules() # Вывод в файл log_cpu(f_out, top_processes, top_modules) elif prof == 'FILE_IO': file_io = FileIO(f_in, sections) file_io.filter() top_durations = file_io.get_top_durations() top_sizes = file_io.get_top_sizes() # Вывод в файл log_file_io(f_out, top_durations, top_sizes)
def test_FileIO_methods(self): f_in = 'file_io_test.csv' sections = [Section(11, 15), Section(15, 17)] with open(f_in, 'w') as f: f.write( f'Process,Event Type,Event SubType,Thread,Start (s),End (s),Duration (µs),IRP,File Object,Size (B),File Path,Offset,Flags,Result,Count\n' ) f.write( f'p1,Create,,,"11,1","11,2","100000",,,0,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p1,Create,,,"11,1","12,1","1000000",,,0,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p1,Read,,,"13,1","14,2","1100000",,,1,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p1,Read,,,"13,1","14,2","1100000",,,2,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p1,Read,,,"15,1","16,2","1100000",,,3,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p1,Read,,,"15,1","16,2","1100000",,,4,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p1,Write,,,"12,1","12,4","300000",,,5,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p1,Write,,,"15,1","15,4","300000",,,6,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p2,Create,,,"11,1","11,2","100000",,,0,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p2,Create,,,"11,1","12,1","1000000",,,0,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p2,Read,,,"13,1","14,2","1100000",,,1,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p2,Read,,,"13,1","14,2","1100000",,,2,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p2,Read,,,"15,1","16,2","1100000",,,3,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p2,Read,,,"15,1","16,2","1100000",,,4,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p2,Write,,,"12,1","12,4","300000",,,5,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p2,Write,,,"15,1","15,4","300000",,,6,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p3,Create,,,"11,1","11,2","100000",,,0,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p3,Create,,,"11,1","12,1","1000000",,,0,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p3,Read,,,"13,1","14,2","1100000",,,1,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p3,Read,,,"13,1","14,2","1100000",,,2,C:\\Program Files...,0,...,...,1\n' ) f.write( f'p3,Read,,,"15,1","16,2","1100000",,,3,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p3,Read,,,"15,1","16,2","1100000",,,4,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p3,Write,,,"12,1","12,4","300000",,,5,C:\\ProgramData...,0,...,...,1\n' ) f.write( f'p3,Write,,,"15,1","15,4","300000",,,6,C:\\Program Files...,0,...,...,1\n' ) file_io = FileIO(f_in, sections) file_io.filter() top_durations = file_io.get_top_durations() top_sizes = file_io.get_top_sizes() expected_top_durations = { Section(11, 15): [('p1', 300000), ('p2', 300000), ('p3', 300000)], Section(15, 17): [('p1', 300000), ('p2', 300000), ('p3', 300000)] } expected_top_sizes = { Section(11, 15): [('p1', 1), ('p2', 1), ('p3', 1)], Section(15, 17): [('p1', 7), ('p2', 7), ('p3', 7)] } self.assertEquals(top_durations, expected_top_durations) self.assertEquals(top_sizes, expected_top_sizes)