def run_health_check(): global CONFIG, binaryDownload logging.info("Health check requested by server!") logging.info("Retrieving health check settings...") query = copy_and_set_token(dict_getHealthCheck, CONFIG.get_value('token')) req = JsonRequest(query) ans = req.execute() if ans is None: logging.error("Failed to get health check!") sleep(5) return elif ans['response'] != 'SUCCESS': logging.error("Error on getting health check: " + str(ans)) sleep(5) return binaryDownload.check_version(ans['crackerBinaryId']) check_id = ans['checkId'] logging.info("Starting check ID " + str(check_id)) # write hashes to file hash_file = open("hashlists/health_check.txt", "w") hash_file.write("\n".join(ans['hashes'])) hash_file.close() # delete old file if necessary if os.path.exists("hashlists/health_check.out"): os.unlink("hashlists/health_check.out") # run task cracker = HashcatCracker(ans['crackerBinaryId'], binaryDownload) start = int(time.time()) [states, errors] = cracker.run_health_check(ans['attack'], ans['hashlistAlias']) end = int(time.time()) # read results if os.path.exists("hashlists/health_check.out"): founds = file_get_contents("hashlists/health_check.out").replace( "\r\n", "\n").split("\n") else: founds = [] num_gpus = len(states[0].get_temps()) query = copy_and_set_token(dict_sendHealthCheck, CONFIG.get_value('token')) query['checkId'] = check_id query['start'] = start query['end'] = end query['numGpus'] = num_gpus query['numCracked'] = len(founds) - 1 query['errors'] = errors req = JsonRequest(query) ans = req.execute() if ans is None: logging.error("Failed to send health check results!") sleep(5) return elif ans['response'] != 'OK': logging.error("Error on sending health check results: " + str(ans)) sleep(5) return logging.info("Health check completed successfully!")
def loop(): global binaryDownload, CONFIG logging.debug("Entering loop...") task = Task() chunk = Chunk() files = Files() hashlist = Hashlist() task_change = True last_task_id = 0 while True: CONFIG.update() if task.get_task() is not None: last_task_id = task.get_task()['taskId'] task.load_task() if task.get_task() is None: task_change = True continue else: if task.get_task()['taskId'] is not last_task_id: task_change = True if not binaryDownload.check_version(task.get_task()['crackerId']): task_change = True task.reset_task() continue if not files.check_files(task.get_task()['files'], task.get_task()['taskId']): task_change = True continue if task_change and not hashlist.load_hashlist(task.get_task()['hashlistId']): continue if task_change: logging.info("Got cracker binary type " + binaryDownload.get_version()['name']) if binaryDownload.get_version()['name'].lower() == 'hashcat': cracker = HashcatCracker(task.get_task()['crackerId'], binaryDownload) else: cracker = GenericCracker(task.get_task()['crackerId'], binaryDownload) task_change = False chunk_resp = chunk.get_chunk(task.get_task()['taskId']) if chunk_resp == 0: task.reset_task() continue elif chunk_resp == -1: # measure keyspace cracker.measure_keyspace(task.get_task(), chunk) continue elif chunk_resp == -2: # measure benchmark logging.info("Benchmark task...") result = cracker.run_benchmark(task.get_task()) if result == 0: sleep(10) task.reset_task() # some error must have occurred on benchmarking continue # send result of benchmark query = copyAndSetToken(dict_sendBenchmark, CONFIG.get_value('token')) query['taskId'] = task.get_task()['taskId'] query['result'] = result query['type'] = task.get_task()['benchType'] req = JsonRequest(query) ans = req.execute() if ans is None: logging.error("Failed to send benchmark!") sleep(5) task.reset_task() continue elif ans['response'] != 'SUCCESS': logging.error("Error on sending benchmark: " + str(ans)) sleep(5) task.reset_task() continue else: logging.info("Server accepted benchmark!") continue # run chunk logging.info("Start chunk...") cracker.run_chunk(task.get_task(), chunk.chunk_data())
def loop(): global binaryDownload, CONFIG logging.debug("Entering loop...") task = Task() chunk = Chunk() files = Files() hashlist = Hashlist() task_change = True last_task_id = 0 cracker = None while True: CONFIG.update() files.deletion_check( ) # check if there are deletion orders from the server if task.get_task() is not None: last_task_id = task.get_task()['taskId'] task.load_task() if task.get_task_id() == -1: # get task returned to run a health check run_health_check() task.reset_task() continue elif task.get_task() is None: task_change = True continue else: if task.get_task()['taskId'] is not last_task_id: task_change = True # try to download the needed cracker (if not already present) if not binaryDownload.check_version(task.get_task()['crackerId']): task_change = True task.reset_task() continue # if prince is used, make sure it's downloaded (deprecated, as preprocessors are integrated generally now) if 'usePrince' in task.get_task() and task.get_task()['usePrince']: if not binaryDownload.check_prince(): continue # if preprocessor is used, make sure it's downloaded if 'usePreprocessor' in task.get_task() and task.get_task( )['usePreprocessor']: if not binaryDownload.check_preprocessor(task): continue # check if all required files are present if not files.check_files(task.get_task()['files'], task.get_task()['taskId']): task.reset_task() continue # download the hashlist for the task if task_change and not hashlist.load_hashlist( task.get_task()['hashlistId']): task.reset_task() continue if task_change: # check if the client version is up-to-date and load the appropriate cracker binaryDownload.check_client_version() logging.info("Got cracker binary type " + binaryDownload.get_version()['name']) if binaryDownload.get_version()['name'].lower() == 'hashcat': cracker = HashcatCracker(task.get_task()['crackerId'], binaryDownload) else: cracker = GenericCracker(task.get_task()['crackerId'], binaryDownload) # if it's a task using hashcat brain, we need to load the found hashes if task_change and 'useBrain' in task.get_task() and task.get_task( )['useBrain'] and not hashlist.load_found( task.get_task()['hashlistId'], task.get_task()['crackerId']): task.reset_task() continue task_change = False chunk_resp = chunk.get_chunk(task.get_task()['taskId']) if chunk_resp == 0: task.reset_task() continue elif chunk_resp == -1: # measure keyspace if not cracker.measure_keyspace(task, chunk): # failure case task.reset_task() continue elif chunk_resp == -3: run_health_check() task.reset_task() continue elif chunk_resp == -2: # measure benchmark logging.info("Benchmark task...") result = cracker.run_benchmark(task.get_task()) if result == 0: sleep(10) task.reset_task() # some error must have occurred on benchmarking continue # send result of benchmark query = copy_and_set_token(dict_sendBenchmark, CONFIG.get_value('token')) query['taskId'] = task.get_task()['taskId'] query['result'] = result query['type'] = task.get_task()['benchType'] req = JsonRequest(query) ans = req.execute() if ans is None: logging.error("Failed to send benchmark!") sleep(5) task.reset_task() continue elif ans['response'] != 'SUCCESS': logging.error("Error on sending benchmark: " + str(ans)) sleep(5) task.reset_task() continue else: logging.info("Server accepted benchmark!") continue # check if we have an invalid chunk if chunk.chunk_data() is not None and chunk.chunk_data( )['length'] == 0: logging.error("Invalid chunk size (0) retrieved! Retrying...") task.reset_task() continue # run chunk logging.info("Start chunk...") cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) if cracker.agent_stopped(): # if the chunk was aborted by a stop from the server, we need to ask for a task again first task.reset_task() task_change = True binaryDownload.check_client_version()