def convert_json_to_existing_txt_policies(json_file, output_file_name): existing_policies = read_json_file(json_file) print('Existing policies generated in:', output_file_name) output_file = open(output_file_name, 'w') each_row = 0 if len(existing_policies) == 0: output_file.write('No existing policies') return 1 existing_policies = sorted(existing_policies, key=lambda k: k['Input-Row-ID']) output_file.write('-' * 190) output_file.write('\n') output_file.write(('{}\t{}\t\t{}\t{}\t\t{}\t\t\t\t\t\t\t{}\n').format( 'Record Number', 'Source Address', 'Destination Address', 'Port', 'Policy Name', 'Firewall')) output_file.write('-' * 190) output_file.write('\n') existing_pol = [] while each_row < len(existing_policies): source_ip = existing_policies[each_row]['Source-IP'] dst_ip = existing_policies[each_row]['Destination-IP'] port = existing_policies[each_row]['Port'] policy_name = existing_policies[each_row]['Policy-Name'] row_id = existing_policies[each_row]['Input-Row-ID'] firewall = existing_policies[each_row]['Firewall'] existing_pol.append(row_id) output_file.write(('{}\t\t{}\t\t{}\t\t{}\t\t{}\t\t\t\t{}\n').format( row_id, source_ip, dst_ip, port, policy_name, firewall)) each_row += 1 total_existing_pol = set(existing_pol) total_existing_pol_cnt = len(total_existing_pol) logger.info(('Total existing policies: {}').format(total_existing_pol_cnt)) return 1
def convert_json_to_txt_issues(json_file, output_file_name): route_issues = read_json_file(json_file) print("Red Flags generated in:", output_file_name) output_file = open(output_file_name, 'w') if len(route_issues) == 0: output_file.write("No red flags") return 1 output_file.write("\n") output_file.write( "FiRMS Red Flag (FRF) Code details at: http://goto/FiRMS-RF-Codes") output_file.write("\n\n") output_file.write("-" * 190) output_file.write("\n") #output_file.write("{}\t{}\t{}\t{}\t{}\t\t{}\t\t{}\t{}\t{}\t{}\t{}\n".format ('Rec_Num','Src_Addr','Dst_Addr','Port','Src_Fw','Src-Fw Ingress Zone','Src-Fw Egress Zone','Dst_ Fw','Dst-Fw Ingress Zone','Dst-Fw Egress Zone','FiRMS RF Codes') ) output_file.write("{}\t{}\t{}\t{}\t\t{}\t\t{}\t{}\t\t{}\t{}\t{}\n".format( 'Rec#', 'Src_Addr', 'Dst_Addr', 'Src_Fw', 'SF_Ing_Zone', 'SF_Egr_Zone', 'Dst_Fw', 'DF_Ing_Zone', 'DF_Egr_Zone', 'Red_Flag_Code')) output_file.write("-" * 190) output_file.write("\n") route_issues = sorted(route_issues, key=lambda k: k['Input-Row-ID']) each_row = 0 while (each_row < len(route_issues)): source_ip = route_issues[each_row]['Source-IP'] dst_ip = route_issues[each_row]['Destination-IP'] src_firewall = route_issues[each_row]['Source-Firewall'] dst_firewall = route_issues[each_row]['Destination-Firewall'] port = route_issues[each_row]['Port'] exc_message = route_issues[each_row]['Routing-Issue-Reason'] src_zone = route_issues[each_row]['Source-Zone'] src_zone_2 = route_issues[each_row]['Source-Zone-2'] dst_zone = route_issues[each_row]['Dest-Zone'] dst_zone_2 = route_issues[each_row]['Dest-Zone-2'] row_id = route_issues[each_row]['Input-Row-ID'] if 'FRF-1003' in exc_message: src_firewall = "" if 'FRF-1004' in exc_message: dst_firewall = "" #output_file.write("{}\t{}\t{}\t{}\t\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format (each_row,source_ip,dst_ip,port,src_firewall,src_zone,dst_zone,dst_firewall,src_zone_2,dst_zone_2,exc_message)) output_file.write( "{}\t{}\t{}\t{}\t{}\t\t{}\t\t{}\t\t{}\t{}\t\t{}\n".format( row_id, source_ip, dst_ip, src_firewall, src_zone, dst_zone, dst_firewall, src_zone_2, dst_zone_2, exc_message)) each_row += 1 return 1
def test3_ReadJsonFile(self): json_data_out=read_json_file(self.json_file) self.update('json_data',json_data_out) self.messager("Reading json passed")
def setUpClass(cls): (cls.srno,cls.username,cls.pid,cls.input_file,cls.logfile)=cls.getConfig() cls.json_file=excel_to_json_convert(cls.input_file,cls.logfile) cls.json_data=read_json_file(cls.json_file)
def generate_sep_fw_pol(json_file, output_dir): policies = read_json_file(json_file) if 'no_policies' in policies.keys(): return 1 compare_cmd = policies['compare_cmd'] policies.pop('compare_cmd', None) is_policies = 0 for each_fw in policies.keys(): output_file_name = output_dir + each_fw + '.txt' output_file = open(output_file_name, 'w') output_keys = policies[each_fw].keys() if len(output_keys) == 0: continue is_policies = 1 output_file.write('\n') if 'new_src_cmd' in policies[each_fw].keys(): for each_src_cmd in policies[each_fw]['new_src_cmd'].keys(): output_file.write( policies[each_fw]['new_src_cmd'][each_src_cmd]) policies[each_fw].pop('new_src_cmd') if 'new_dst_cmd' in policies[each_fw].keys(): for each_dst_cmd in policies[each_fw]['new_dst_cmd'].keys(): output_file.write( policies[each_fw]['new_dst_cmd'][each_dst_cmd]) policies[each_fw].pop('new_dst_cmd') if 'new_app_cmd' in policies[each_fw].keys(): for each_app_cmd in policies[each_fw]['new_app_cmd'].keys(): output_file.write( policies[each_fw]['new_app_cmd'][each_app_cmd]) policies[each_fw].pop('new_app_cmd') for each_cmd in sorted(policies[each_fw].keys()): for each_key in policies[each_fw][each_cmd].keys(): if each_key == 'src_cmd': src_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'dst_cmd': dst_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'app_cmd': app_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'permit_cmd': permit_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'row_num': row_num = policies[each_fw][each_cmd][each_key] row_num_final = None for row in row_num: if len(row_num) > 1: if row_num_final != None: row_num_final = str(row_num_final) + ',' + str(row) else: row_num_final = row else: row_num_final = row row_num_message = 'Policies recommended for record number(s): %s' % row_num_final output_file.write('\n') output_file.write(row_num_message) output_file.write('\n') for src in src_cmds: output_file.write(src) output_file.write('\n') for dst in dst_cmds: output_file.write(dst) output_file.write('\n') for app in app_cmds: output_file.write(app) output_file.write('\n') for permit in permit_cmds: output_file.write(permit) output_file.write('\n') if is_policies == 1: output_file.write('\n') output_file.write(compare_cmd) output_file.close() return
def convert_json_to_txt_policies(json_file, output_file_name): policies = read_json_file(json_file) output_file = open(output_file_name, 'w') if 'no_policies' in policies.keys(): output_file.write(policies['no_policies']) print('Policies recommended in:', output_file_name) return 1 compare_cmd = policies['compare_cmd'] policies.pop('compare_cmd', None) is_policies = 0 for each_fw in policies.keys(): output_keys = policies[each_fw].keys() if len(output_keys) == 0: continue is_policies = 1 print_fw = u'***%s \n =============================================== \n' % each_fw output_file.write('\n') output_file.write(print_fw) output_file.write('\n') if 'new_src_cmd' in policies[each_fw].keys(): for each_src_cmd in policies[each_fw]['new_src_cmd'].keys(): output_file.write( policies[each_fw]['new_src_cmd'][each_src_cmd]) policies[each_fw].pop('new_src_cmd') if 'new_dst_cmd' in policies[each_fw].keys(): for each_dst_cmd in policies[each_fw]['new_dst_cmd'].keys(): output_file.write( policies[each_fw]['new_dst_cmd'][each_dst_cmd]) policies[each_fw].pop('new_dst_cmd') if 'new_app_cmd' in policies[each_fw].keys(): for each_app_cmd in policies[each_fw]['new_app_cmd'].keys(): output_file.write( policies[each_fw]['new_app_cmd'][each_app_cmd]) policies[each_fw].pop('new_app_cmd') for each_cmd in sorted(policies[each_fw].keys()): for each_key in policies[each_fw][each_cmd].keys(): if each_key == 'src_cmd': src_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'dst_cmd': dst_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'app_cmd': app_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'permit_cmd': permit_cmds = policies[each_fw][each_cmd][each_key] elif each_key == 'row_num': row_num = policies[each_fw][each_cmd][each_key] row_num_final = None for row in row_num: if len(row_num) > 1: if row_num_final != None: row_num_final = str(row_num_final) + ',' + str(row) else: row_num_final = row else: row_num_final = row row_num_message = 'Policies recommended for record number(s) %s' % row_num_final output_file.write('\n') output_file.write(row_num_message) output_file.write('\n') for src in src_cmds: output_file.write(src) output_file.write('\n') for dst in dst_cmds: output_file.write(dst) output_file.write('\n') for app in app_cmds: output_file.write(app) output_file.write('\n') for permit in permit_cmds: output_file.write(permit) output_file.write('\n') if is_policies == 1: output_file.write('\n') output_file.write(compare_cmd) print('Policies recommended in:', output_file_name) return 1
def pre_process(input_file, sr_number, username, unix_proc_no, logfile): global logger log_json_path = LOG_CFG_PATH + '/logging.json' logger = Logger(name='firms_fpb', config_path=log_json_path, log_file=logfile) start_excel_time = time.time() logger.info('Pre Process Start Time: {}'.format(start_excel_time)) # Convert the excel sheet to json logger.info('Convert excel to JSON') json_file = excel_to_json_convert(input_file, logfile) end_excel_time = time.time() - start_excel_time # Validate the json logger.info('Validating JSON') is_valid_json = validate_json(json_file, logfile) #exit(1) # If excel sheet is not valid if not is_valid_json: logger.error('Invalid excel sheet provided') print("Invalid excel sheet provided") exit(1) start_read_json_time = time.time() # Read the JSON data json_data = read_json_file(json_file) end_read_json_time = time.time() - start_excel_time print("Total Time Taken for reading json: ", time.strftime("%H:%M:%S", time.gmtime(end_read_json_time))) exit(1) start_prep_time = time.time() # Open the json file and create the data structure for storing into db (data_to_load, no_of_batches) = prepare_data(json_data, username, sr_number, unix_proc_no) end_prep_time = time.time() - start_prep_time print("Total Time Taken for preparing data: ", time.strftime("%H:%M:%S", time.gmtime(end_prep_time))) # Create the insert query and put it here insert_query = "insert into incoming_request (sr_number,username,src_ip,dst_ip,port,protocol,id,src_ip_int,dst_ip_int,process_no,unix_process_no,inc_req_row_id,request_date) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13)" start_load_time = time.time() logger.info('Loading input data into incoming_request table') logger.info('Load start time: {}'.format(start_load_time)) try: #print ("pre processing") is_loaded = load_data(data_to_load, insert_query) except Exception as e: logger.error('Failed to insert data for pre-processing', e) print(e) end_load_time = time.time() total_load_time = end_load_time - start_load_time logger.info('Load end time: {}'.format(end_load_time)) #print("Total Time Taken for loading the data: ", time.strftime("%H:%M:%S", time.gmtime(total_load_time))) return no_of_batches