def _parse_file_expect_result(expect_result: ExpectResult): group_file_values = expect_result.groupFileValues main_property_index = expect_result.mainPropertyIndex group_values = [[]] * len(expect_result.groupColumns) for columnIndex in range(len(expect_result.groupColumns)): for file_index in range(len(group_file_values[columnIndex])): group_value_file = FileUtil.get_file_path( group_file_values[columnIndex][file_index]) assert os.path.exists( group_value_file ), "'groupFileValue' is not exist, group_value_file:" + str( group_value_file) if group_value_file.endswith(".sql"): __group_value = FileUtil.get_sql_file(group_value_file) if __group_value is not None and len(__group_value) != 0: group_values[columnIndex] = group_values[ columnIndex] + __group_value else: __group_value = FileUtil.get_file_lines(group_value_file) if __group_value is not None and len(__group_value) != 0: group_values[columnIndex] = group_values[ columnIndex] + __group_value expect_group_values = [[''] * len(expect_result.groupColumns)] * len( group_values[main_property_index]) for row_index in range(len(group_values[main_property_index])): for cell_index in range(len(expect_result.groupColumns)): expect_group_values[row_index][cell_index] = group_values[ cell_index][row_index] expect_result.groupValues = expect_group_values expect_result.matchGroupMainColumnNum = [0] * len( expect_result.groupValues) delattr(expect_result, "groupFileValues")
def _parse_file_expect_result(expect_result: ExpectResult): expect_value_files = expect_result.expectValueFiles expect_num_files = expect_result.expectNumFiles expect_values = [] expect_nums = [] for index in range(len(expect_value_files)): expect_value_file = FileUtil.get_file_path( expect_value_files[index]) expect_num_file = FileUtil.get_file_path(expect_num_files[index]) assert os.path.exists(expect_value_file) and os.path.exists( expect_num_file), "expect_value_files or expect_num_files" \ "is not exist, expect_value_files:" + str( expect_value_files) + "expect_num_files:" + str(expect_num_files) if expect_value_file.endswith(".sql"): __sqls = FileUtil.get_sql_file(expect_value_file) if len(__sqls) != 0: expect_values.extend(__sqls) else: __sqls = FileUtil.get_file_lines(expect_value_file) if len(__sqls) != 0: expect_values.extend(__sqls) __nums = FileUtil.get_file_lines(expect_num_file) if len(__nums) != 0: expect_nums.extend(__nums) expect_result.expectValues = expect_values expect_result.expectNums = expect_nums delattr(expect_result, "expectValueFiles") delattr(expect_result, "expectNumFiles") SingleExpectResultParse._parse_str_expect_result(expect_result) pass
def test_read_file(self): assert ['select *\nfrom test1', 'SELECT * from\n\ntest.TESTTABLE'] == \ ReaderFileHandler.read_file("原始SQL", FileUtil.get_file_path("classpath:/test/sql/sql/test2.sql")) assert "['select * from test1', 'SELECT * from test.TESTTABLE']" == \ str(ReaderFileHandler.read_file("标准化SQL", FileUtil.get_file_path("classpath:/test/sql/sql/test2.sql")))
def test_get_files(self): """ 测试根据目录和后缀获取指定文件 :return: """ sql_dir = FileUtil.get_project_path() + "test" files = FileUtil.get_files(sql_dir, ".sql") for file in files: print(file)
def __statistic_analysis_data(filter_content: str): """ 统计分析数据 :return: """ __output_dir = MessageConfig.output_dir if os.path.exists(__output_dir): post_data_excel_reader_delegate = PostDataExcelReaderDelegate() reader = SingleExpectResultParse() access_properties = reader.access_properties() logon_properties = reader.logon_properties() reader = GroupExpectResultParse() access_properties.extend(reader.access_properties()) logon_properties.extend(reader.logon_properties()) __access_files = FileUtil.get_files_prefix( __output_dir, AuditType.ACCESS.analysis_pre_file_name) access_strategy = StrategyDelegate(access_properties, AuditType.ACCESS, filter_content) for access_file in __access_files: book = xlrd.open_workbook(access_file, 'r+b') sheets = book.sheets() for sheet in sheets: header = sheet.row_values(0) for index in range(1, sheet.nrows): data = dict(zip(header, sheet.row_values(index))) post_data_excel_reader_delegate.merge_data( data, AuditType.ACCESS) access_strategy.statistic_data(data) __logon_files = FileUtil.get_files_prefix( __output_dir, AuditType.LOGON.analysis_pre_file_name) logon_strategy = StrategyDelegate(logon_properties, AuditType.LOGON, filter_content) for logon_file in __logon_files: book = xlrd.open_workbook(logon_file, 'r+b') sheets = book.sheets() for sheet in sheets: header = sheet.row_values(0) for index in range(1, sheet.nrows): data = dict(zip(header, sheet.row_values(index))) post_data_excel_reader_delegate.merge_data( data, AuditType.LOGON) logon_strategy.statistic_data(data) access_strategy.analysis_data() logon_strategy.analysis_data() pass
def test_get_project_path(self): """ 测试获取项目根目录方法 :return: """ self.assertTrue( FileUtil.get_project_path().endswith("auditMessageTest"))
def test_read_excel(self): """ 测试读写excel :return: """ excel_path = FileUtil.get_project_path() + self.excel_url book = xlrd.open_workbook(excel_path, 'w+b') sheets = book.sheets() sheet1 = sheets[0] print('表格总页数', len(sheets)) nrows = sheet1.nrows print('表格总行数', nrows) ncols = sheet1.ncols print('表格总列数', ncols) row3_values = sheet1.row_values(2) print('第3行值', row3_values) row0_values = sheet1.row_values(0) row1_values = sheet1.row_values(1) data = dict(zip(row0_values, row1_values)) print(data) col3_values = sheet1.col_values(2) print('第3列值', col3_values) cell_3_3 = sheet1.cell(2, 2).value print('第3行第3列的单元格的值:', cell_3_3)
def _parse_dir_expect_result(expect_result: ExpectResult): data_dir = expect_result.dataDir value_suffix = expect_result.expectValueSuffix expect_num_suffix = expect_result.expectNumSuffix assert data_dir is not None and data_dir.strip( ) != '', "'dataDir' is required" assert value_suffix is not None and value_suffix.strip( ) != '', "'valueSuffix' is required" assert expect_num_suffix is not None and expect_num_suffix.strip( ) != '', "'expectNumSuffix' is required" expect_value_files = FileUtil.get_files(data_dir, value_suffix) expect_num_files = [] for file in expect_value_files: file_path = file[:file.find(value_suffix)] + expect_num_suffix if os.path.exists(file_path): expect_num_files.append(file_path) expect_result.expectValueFiles = expect_value_files expect_result.expectNumFiles = expect_num_files delattr(expect_result, "dataDir") delattr(expect_result, "expectValueSuffix") delattr(expect_result, "expectNumSuffix") SingleExpectResultParse._parse_file_expect_result(expect_result) pass
def __shutdown(): """ 退出程序 :return: """ time.sleep(MessageConfig.shutdown_wait_time) operating_system = SystemUtil.get_operating_system() if operating_system == 'Windows': command = FileUtil.get_project_path() + "\\bin\\shutdownJar.bat" else: command = FileUtil.get_project_path() + "\\bin\\shutdownJar.sh" p = subprocess.Popen(command, stdout=open(MessageConfig.log_dir + "shutdown.log", "w"), stderr=subprocess.STDOUT) p.communicate() p.wait()
def __start_receive(need_clean: bool = False, filter_content: str = ''): """ 启动接收消息方法 :return: """ __shutdown() if need_clean: __files = [] __files += FileUtil.get_files_prefix( MessageConfig.output_dir, AuditType.ACCESS.analysis_pre_file_name) __files += FileUtil.get_files_prefix( MessageConfig.output_dir, AuditType.LOGON.analysis_pre_file_name) for __file in __files: os.remove(__file) file_path = FileUtil.get_project_path( ) + "\\config\\receive-0.0.1-SNAPSHOT.jar" operating_system = SystemUtil.get_operating_system() if operating_system == 'Windows': command = "start javaw -jar " + file_path else: command = "nohup java -jar " + file_path arg0 = "--spring.activemq.broker-url=tcp://" + MessageConfig.host + ":" + str( MessageConfig.port) arg1 = "--spring.activemq.user="******"--spring.activemq.password="******"--message.output.path=" + MessageConfig.output_dir arg4 = "--message.filters=" + filter_content cmd = [command, arg0, arg1, arg2, arg3, arg4] command = " ".join(cmd) p = subprocess.Popen(command, shell=True, stdout=open(MessageConfig.log_dir + "receive.log", "w"), stderr=subprocess.STDOUT) p.communicate() p.wait() time.sleep(MessageConfig.analysis_wait_time) __shutdown()
def _parse_dir_expect_result(expect_result: ExpectResult): group_data_dir = expect_result.groupDataDir group_suffix_values = expect_result.groupSuffixValues assert group_data_dir is not None and group_data_dir.strip( ) != '', "'groupDataDir' is required" assert group_suffix_values is not None and len(group_suffix_values) != 0, \ "'groupSuffixValues' is required" _group_value_files = [[]] * len(expect_result.groupColumns) for index in range(len(expect_result.groupColumns)): _group_value_files[index] = FileUtil.get_files( group_data_dir, group_suffix_values[index]) expect_result.groupFileValues = _group_value_files delattr(expect_result, "groupDataDir") delattr(expect_result, "groupSuffixValues") GroupExpectResultParse._parse_file_expect_result(expect_result)
def test_append_write_excel(self): """ 测试excel追加写 :return: """ excel_url = '\\test\\LogonAudit_1586092319878.xls' excel_path = FileUtil.get_project_path() + self.excel_url book = xlrd.open_workbook(excel_path, 'w+b') # sheets = book.sheet_names() # worksheet = book.sheet_by_name(sheets[0]) # nrows = worksheet.nrows nrows = book.sheets()[0].nrows new_workbook = copy(book) new_worksheet = new_workbook.get_sheet(0) for i in range(0, 10): new_worksheet.write(nrows, i, i) new_workbook.save(excel_path)
def test_read_json(self): reader = SingleExpectResultParse( MessageConfig.single_expect_result_file) logon_strategy = StrategyDelegate(reader.logon_properties(), AuditType.LOGON) access_strategy = StrategyDelegate(reader.access_properties(), AuditType.ACCESS) logon_excel_path = FileUtil.get_project_path( ) + '\\test\\LogonAudit_1586413078263.xls' book = xlrd.open_workbook(logon_excel_path, 'w+b') sheets = book.sheets() for sheet in sheets: header = sheet.row_values(0) for index in range(1, sheet.nrows): data = dict(zip(header, sheet.row_values(index))) logon_strategy.main_statistic_data(data) logon_strategy.analysis_data() for expectResult in reader.logon_properties(): # print(expectResult) pass access_excel_path = FileUtil.get_project_path( ) + '\\test\\AccessAudit_1586413078448.xls' book = xlrd.open_workbook(access_excel_path, 'w+b') sheets = book.sheets() for sheet in sheets: header = sheet.row_values(0) for index in range(1, sheet.nrows): data = dict(zip(header, sheet.row_values(index))) access_strategy.main_statistic_data(data) access_strategy.analysis_data() for expectResult in reader.access_properties(): # print(expectResult) pass file = FileUtil.get_project_path() + "/config/columnDesc.conf" cp = configparser.ConfigParser() cp.read(file, encoding="utf-8") print([item[1] for item in cp.items("logOff")]) print(HeadersConfig.get_section_columns("logOff")) print('消息类型' in HeadersConfig.get_section_columns("logOff")) print('消息类型1' in HeadersConfig.get_section_columns("logOff")) print(FileUtil.get_file_lines(file)) sql_file = FileUtil.get_project_path() + "\\test\\sql\\sql\\test2.sql" print(FileUtil.get_sql_file(sql_file)) reader = GroupExpectResultParse(MessageConfig.group_expect_result_file, StrategyType.MULTIPLE_FIELDS_MATCH) for expectResult in reader.access_properties(): print(expectResult) pass access_strategy = StrategyDelegate(reader.access_properties(), AuditType.ACCESS) for sheet in sheets: header = sheet.row_values(0) for index in range(1, sheet.nrows): data = dict(zip(header, sheet.row_values(index))) access_strategy.main_statistic_data(data) access_strategy.analysis_data()