def main(): #read decryption password from file. password = decrypt_password_file("password") #default password is 123. #counter for wrong times input. wrong_t = 0 #ask for user password. user_pass = raw_input("Please provide your password:\n") #maximum tries equals 3. while wrong_t < 2: if user_pass == str(password).translate(None, ' \n'): # successful login. # show main menu and get user's choice. choice = show_menu() # perform appropriate action according to the user's choice. perform_action(choice, password) sys.exit(0) #wrong password given by the user if this point is reached. print "WRONG PASSWORD ENTERED! PLEASE GIVE YOUR PASSWORD AGAIN:\n" user_pass = raw_input() #read new user input. wrong_t = wrong_t + 1 #increase number of tries. #password given wrong 3 times if this point is reached so exit for safety. print "WRONG PASSWORD ENTERED TOO MANY TIMES, THE SYSTEM WILL EXIT FOR SAFETY!\n" print "ALL FILES WILL BE DELETED!!!\n" delete_files() sys.exit(-1)
def external_orca(g: nx.Graph, gname: str): if not isinstance(g, nx.Graph): g = nx.Graph(g) # convert it into a simple graph self_loop_edges = list(nx.selfloop_edges(g)) if len(self_loop_edges) > 0: g.remove_edges_from(self_loop_edges) if nx.number_connected_components(g) > 1: g = g.subgraph(max(nx.connected_components(g), key=len)) if nx.is_directed(g): selfloops = g.selfloop_edges() g.remove_edges_from(selfloops) # removing self-loop edges g = nx.convert_node_labels_to_integers(g, first_label=0) file_dir = 'src/scratch' input_path = f'./{file_dir}/{gname}.in' with open(input_path, 'w') as f: f.write(f'{g.order()} {g.size()}\n') for u, v in g.edges(): f.write(f'{u} {v}\n') args = ['', '4', f'./{file_dir}/{gname}.in', f'./{file_dir}/{gname}.out'] if 'Windows' in platform.platform(): args[0] = './src/orca/orca.exe' elif 'Linux' in platform.platform(): args[0] = './src/orca/orca_linux' else: args[0] = './src/orca/orca_mac' process = subprocess.run(' '.join(args), shell=True, stdout=subprocess.DEVNULL) if process.returncode != 0: print('Error in ORCA') output_path = f'./{file_dir}/{gname}.out' assert check_file_exists( output_path), f'output file @ {output_path} not found in GCD' df = pd.read_csv(output_path, sep=' ', header=None) # delete both the input and output files delete_files(input_path, output_path) return df
def generate(self, num_graphs: int, gen_id: int) -> List[nx.Graph]: dump = f'./src/netgan/dumps' gname = f'{self.input_graph.name}_{self.trial}' pickle_path = f'{dump}/{gname}.pkl.gz' proc = sub.run( f'conda init bash; . ~/.bashrc; conda activate netgan; python src/netgan/gen.py {gname} {pickle_path} {num_graphs}', shell=True) # , stdout=sub.DEVNULL) assert proc.returncode == 0, 'error in NetGAN generate' output_pickle_path = f'{dump}/{gname}_graphs.pkl.gz' generated_graphs = [] for i, gen_graph in enumerate(load_pickle(output_pickle_path)): gen_graph.name = f'{self.input_graph.name}_{self.trial}_{i + 1}' # adding the number of graph gen_graph.gen_id = gen_id generated_graphs.append(gen_graph) delete_files(output_pickle_path) return generated_graphs
def _gen(self, gname: str, gen_id: int) -> nx.Graph: """ call KronGen """ orig_n = self.input_graph.order() kron_iters = int( math.log2(orig_n) ) # floor of log2 gives a bound on kronecker iteration count if math.fabs(2**kron_iters - orig_n) > math.fabs(2**(kron_iters + 1) - orig_n): kron_iters += 1 assert 'initiator_matrix' in self.params, 'Initiator matrix not found' matrix = self.params['initiator_matrix'] output_file = f'./src/kronecker/{self.initial_gname}_{self.trial}_kron.txt' if len(matrix) == 0: # KronFit failed CP.print_blue(f'Error in KronGen: "{self.input_graph.name}"') raise Exception('Generation failed!') else: bash_code = f'cd src/kronecker; ./{self.krongen_exec} -o:{self.initial_gname}_{self.trial}_kron.txt -m:"{matrix}" -i:{kron_iters}' completed_process = sub.run(bash_code, shell=True, stdout=sub.PIPE) if completed_process.returncode != 0 or not check_file_exists( output_file): CP.print_blue(f'Error in KronGen: "{self.input_graph.name}"') raise Exception('Generation failed!') else: graph = nx.read_edgelist(output_file, nodetype=int, create_using=nx.Graph()) graph.name = gname delete_files(output_file) graph.gen_id = gen_id return graph
def warberry(): start_time = time.time() #move previous files in /Results move_files(int(start_time)) delete_files() version = bcolors.TITLE + (''' _ _ ___ ____________ ___________________ __ | | | |/ _ \ | ___ \ ___ \ ___| ___ \ ___ \ \ / / | | | / /_\ \| |_/ / |_/ / |__ | |_/ / |_/ /\ V / | |/\| | _ || /| ___ \ __|| /| / \ / \ /\ / | | || |\ \| |_/ / |___| |\ \| |\ \ | | \/ \/\_| |_/\_| \_\____/\____/\_| \_\_| \_| \_/ TACTICAL EXPLOITATION v5 @sec_groundzero [email protected] ''') + bcolors.ENDC parser = OptionParser(usage="usage: sudo %prog [options]", version=version) parser.add_option("-a", "--attack", action="store", dest="attacktype", default="-A", help="Attack Mode." + bcolors.WARNING + " Default: --attack" + bcolors.ENDC) parser.add_option("-p", "--packets", action="store", dest="packets", default=20, type=int, help="# of Network Packets to capture" + bcolors.WARNING + " Default: 20" + bcolors.ENDC) parser.add_option("-x", "--expire", action="store", dest="expire", default=20, type=int, help="Time for packet capture to stop" + bcolors.WARNING + " Default: 20s" + bcolors.ENDC) parser.add_option( "-I", "--interface", action="store", dest="iface", default="eth0", help="Network Interface to use." + bcolors.WARNING + " Default: eth0" + bcolors.ENDC, choices=['eth0', 'eth1', 'wlan0', 'wlan1', 'wlan2', 'at0']) parser.add_option("-N", "--name", action="store", dest="name", default="WarBerry", help="Hostname to use." + bcolors.WARNING + " Default: Auto" + bcolors.ENDC) parser.add_option("-i", "--intensity", action="store", dest="intensity", default="-T1", help="Port scan intensity." + bcolors.WARNING + " Default: T1" + bcolors.ENDC, choices=['-T1', '-T2', '-T3', '-T4']) parser.add_option("-P", "--poison", action="store_false", dest="poison", default=True, help="Turn Poisoning off." + bcolors.WARNING + " Default: On" + bcolors.ENDC) parser.add_option("-t", "--time", action="store", dest="time", default=900, type=int, help="Responder Timeout Seconds") parser.add_option("-Q", "--quick", action="store_true", dest="fast", default=False, help="Scan using threads." + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-H", "--hostname", action="store_false", dest="hostname", default=True, help="Do not change WarBerry hostname" + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-e", "--enumeration", action="store_true", dest="enum", default=False, help="Disable enumeration mode." + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-M", "--malicious", action="store_true", dest="malicious", default=False, help="Enable Malicious only mode" + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-B", "--bluetooth", action="store_true", dest="btooth", default=False, help="Enable Bluetooth Scanning" + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-W", "--wifi", action="store_true", dest="wifi", default=False, help="Enable WiFi Scanning" + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-r", "--recon", action="store_true", dest="reconmode", default=False, help="Enable Recon only mode. " + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-S", "--sniffer", action="store_true", dest="sniffer", default=False, help="Enable Sniffer only mode." + bcolors.WARNING + " Default: Off" + bcolors.ENDC) parser.add_option("-C", "--clear", action="store_true", dest="clear", default=False, help="Clear previous output folders in ../Results") parser.add_option("-m", "--man", action="store_true", dest="manpage", default=False, help="Print WarBerry man pages") (options, args) = parser.parse_args() if options.clear == True: clear_output() elif options.manpage == True: subprocess.call('clear', shell=True) banner_full_help() elif options.attacktype == "-A" or options.attacktype == '--attack': subprocess.call('clear', shell=True) banner() if not os.geteuid() == 0: print bcolors.FAIL + '*** You are not running as root and some modules will fail ***\nRun again with sudo.' + bcolors.ENDC sys.exit(-1) dhcp_check() if (os.path.isfile('/sys/class/net/' + options.iface + '/carrier') == True): iface = options.iface else: for ifaces in os.listdir("/sys/class/net/"): if ifaces[0] == "e": file_iface = open("/sys/class/net/" + ifaces + "/carrier") if file_iface.readline()[0] == "1": iface = ifaces host_name = options.name int_ip = iprecon(iface) if (int_ip == None): exit else: if options.malicious == True: netmask = netmask_recon(iface) with open('../Results/running_status', 'w') as status: status.write("<root>") status.write("Entering poisoning mode\n") poison_time = options.time poison(iface, poison_time) status.write("</root>") else: netmask = netmask_recon(iface) CIDR = subnet(int_ip, netmask) status_str = str(scope_definition(iface, CIDR)) packets = options.packets expire = options.expire status_str += str(sniffer(iface, packets, expire)) status_str += str(hostnames(CIDR)) status_str += str(nbtscan(CIDR)) with open('../Results/running_status', 'w') as status: status.write(status_str) if host_name != "WarBerry": manual_namechange(host_name) if options.hostname == True and host_name == "WarBerry": namechange() if options.reconmode == False: intensity = options.intensity status_str = "" if options.fast == False: status_str += str( single_port_scanner(CIDR, intensity, iface)) else: status_str += str( thread_port_scanner(CIDR, intensity, iface)) if options.enum == False: status_str += str(shares_enum(iface)) status_str += str(smb_users(iface)) status_str += str(webs_prep()) status_str += str(http_title_enum(iface)) status_str += str(nfs_enum(iface)) status_str += str(waf_enum(iface)) status_str += str(robots_txt()) status_str += str(mysql_enum(iface)) status_str += str(mssql_enum(iface)) status_str += str(ftp_enum(iface)) #status_str +=str(snmp_enum(iface)) status_str += str(sip_methods_enum(iface)) status_str += str(sip_users_enum(iface)) status_str += str(os_enum(CIDR, iface)) #enum4linux() #with open('../Results/running_status', 'a') as status: #status.write("Completed enum4linux Enumeration\n") status_str += str(zone_transfers(CIDR, iface)) with open('../Results/running_status', 'a') as status: status.write(status_str) if options.btooth == True: bluetooth_enum() with open('../Results/running_status', 'a') as status: status.write("Completed bluetooth scan\n") if options.wifi == True: wifi_enum() with open('../Results/running_status', 'a') as status: status.write("Completed wifi networks scan\n") print "" print bcolors.TITLE + "All scripts completed. Check the /Results directory" + bcolors.ENDC print " " if options.poison == True: with open('../Results/running_status', 'a') as status: status.write("Entering poisoning mode\n") poison_time = options.time poison(iface, poison_time) elif options.attacktype == '-S' or options.attacktype == '--sniffer': status_str = "" iface = options.iface packets = options.packets subprocess.call('clear', shell=True) status_str += str(sniffer(iface, packets)) create_xmls() encrypt_files() #Sytem exit due to finish. print bcolors.TITLE + "Warberry is now finished. The system will now exit.\n" + bcolors.ENDC print bcolors.TITLE + "Time of execution: " + "--- %s seconds ---\n" % ( time.time() - start_time) + bcolors.ENDC sys.exit(0)
def _gen(self, gname: str, gen_id: int) -> nx.Graph: g = self.input_graph # fix BTER to use the directory.. CP.print_blue('Starting BTER...') graph_path = f'./src/bter/{g.name}_{self.trial}.mat' np.savetxt(graph_path, nx.to_numpy_matrix(g), fmt='%d') matlab_code = [ 'mex -largeArrayDims tricnt_mex.c;', 'mex -largeArrayDims ccperdegest_mex.c;', f"G = dlmread('{g.name}_{self.trial}.mat');", 'G = sparse(G);', f"graphname = '{g.name}_{self.trial}';", '', 'nnodes = size(G, 1);', 'nedges = nnz(G) / 2;', r"fprintf('nodes: %d edges: %d\n', nnodes, nedges);", '', 'nd = accumarray(nonzeros(sum(G,2)),1);', "maxdegree = find(nd>0,1,'last');", r"fprintf('Maximum degree: %d\n', maxdegree);", '', '[ccd,gcc] = ccperdeg(G);', r"fprintf('Global clustering coefficient: %.2f\n', gcc);", '', r"fprintf('Running BTER...\n');", 't1=tic;', '[E1,E2] = bter(nd,ccd);', 'toc(t1);', r"fprintf('Number of edges created by BTER: %d\n', size(E1,1) + size(E2,1));", '', "fprintf('Turning edge list into adjacency matrix (including dedup)...');", 't2=tic;', 'G_bter = bter_edges2graph(E1,E2);', 'toc(t2);', r"fprintf('Number of edges in dedup''d graph: %d\n', nnz(G)/2);", '', 'G_bter = full(G_bter);', r"dlmwrite('{}_{}_bter.mat', G_bter, ' ');".format( g.name, self.trial), 'quit;' ] matlab_code_filename = f'{g.name}_{self.trial}_code.m' matlab_code_path = f'./src/bter/{matlab_code_filename}' print('\n'.join(matlab_code), file=open(matlab_code_path, 'w')) output_path = f'./src/bter/{g.name}_{self.trial}_bter.mat' start_time = time() completed_process = sub.run( f'cd src/bter; cat {matlab_code_filename} | matlab -nosplash -nodesktop', shell=True, stdout=sub.DEVNULL, stderr=sub.DEVNULL) CP.print_blue(f'BTER ran in {round(time() - start_time, 3)} secs') if completed_process.returncode != 0 or not check_file_exists( output_path): CP.print_blue('BTER failed!') raise Exception('Generation failed!') else: bter_mat = np.loadtxt(output_path, dtype=int) g_bter = nx.from_numpy_matrix(bter_mat, create_using=nx.Graph()) g_bter.name = gname g_bter.gen_id = gen_id delete_files(graph_path, output_path, matlab_code_path) return g_bter
def run(self, use_pickle: bool) -> None: """ New runner - uses list of graphs :param use_pickle: :return: """ pickle_ext = '.pkl.gz' self.graphs = [] if use_pickle: if check_file_exists(self.graphs_pickle_path + pickle_ext): # the whole pickle exists graphs = load_pickle(self.graphs_pickle_path + pickle_ext) #assert len(graphs) == 21, f'Expected 21 graphs, found {len(graphs)}' assert len( graphs ) == self.num_generations + 1, f'Expected 21 graphs, found {len(graphs)}' CP.print_green( f'Using completed pickle at {self.graphs_pickle_path + pickle_ext!r}. Loaded {len(graphs)} graphs' ) return else: temp_file_pattern = re.compile( f'list_(\d+)_{self.trial}_temp_(\d+).pkl.gz') dir_name = '/'.join(self.graphs_pickle_path.split('/')[:-1]) input_files = [ f for f in os.listdir(dir_name) if re.match(temp_file_pattern, f) ] if len(input_files) > 0: assert len( input_files ) == 1, f'More than one matches found: {input_files}' input_file = input_files[0] total_generations, progress = map( int, temp_file_pattern.fullmatch(input_file).groups()) graphs = load_pickle(join(dir_name, input_file)) assert len( graphs ) == progress + 1, f'Found {len(graphs)}, expected: {progress}' CP.print_blue( f'Partial pickle found at {input_file!r} trial: {self.trial} progress: {progress}/{total_generations}' ) self.graphs = graphs remaining_generations = self.num_generations - len(self.graphs) tqdm.write( f'Running Infinity Mirror on {self.initial_graph.name!r} {self.initial_graph.order(), self.initial_graph.size()} {self.model.model_name!r} {remaining_generations} generations' ) pbar = tqdm(total=remaining_generations, bar_format='{l_bar}{bar}|[{elapsed}<{remaining}]', ncols=50) if len(self.graphs) == 0: self.initial_graph.level = 0 self.graphs = [self.initial_graph] self.features = [None] completed_trial = False for i in range(len(self.graphs) - 1, self.num_generations): if i == len(self.graphs) - 1: curr_graph = self.graphs[-1] # use the last graph level = i + 1 try: fit_time_start = time.perf_counter() self.model.update( new_input_graph=curr_graph) # update the model fit_time = time.perf_counter() - fit_time_start except Exception as e: fit_time = np.nan print(f'Model fit failed {e}') break try: gen_time_start = time.perf_counter() generated_graphs = self.model.generate( num_graphs=self.num_graphs, gen_id=level) # generate a new set of graphs gen_time = time.perf_counter() - gen_time_start except Exception as e: gen_time = np.nan print(f'Generation failed {e}') break if self.features: self.features.append(self.model.params) curr_graph = generated_graphs[ 0] # we are only generating one graph curr_graph.name = f'{self.initial_graph.name}_{level}_{self.trial}' curr_graph.gen = level self.graphs.append(curr_graph) temp_pickle_path = self.graphs_pickle_path + f'_temp_{level}{pickle_ext}' prev_temp_pickle_path = self.graphs_pickle_path + f'_temp_{level-1}{pickle_ext}' temp_features_path = self.graphs_features_path + f'_temp_{level}{pickle_ext}' prev_temp_features_path = self.graphs_features_path + f'_temp_{level-1}{pickle_ext}' save_pickle(obj=self.graphs, path=temp_pickle_path) save_pickle(obj=self.features, path=temp_features_path) delete_files(prev_temp_pickle_path) delete_files(prev_temp_features_path) self.write_timing_csv(iter_=level, fit_time=fit_time, gen_time=gen_time) if level == self.num_generations: completed_trial = True pbar.update(1) pbar.close() if completed_trial: # only delete the temp pickle if the trial finishes successfully delete_files( temp_pickle_path ) # delete the temp file if the loop finishes normally delete_files( temp_features_path ) # delete the temp file if the loop finishes normally CP.print_green( f'List of {len(self.graphs)} Graphs is pickled at "{self.graphs_pickle_path + pickle_ext}"' ) save_pickle(obj=self.graphs, path=self.graphs_pickle_path + pickle_ext) save_pickle(obj=self.features, path=self.graphs_features_path + pickle_ext) return