def test_blank_hashlist(self): """ Run task with blank hashlist """ self._add_hashlist(alg_id=0) self._add_work_task() dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd = WorkerThread( self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'done' assert wtask['uncracked_before'] == 0 assert len(wtask['out_file']) == 0
def test_add_hashes_to_exists_list_with_founds(self): """ Testing add hashes to exists list with already found hashes """ self._add_hashlist(parsed=0, tmp_path='/tmp/1.txt', status='wait') file_put_contents( '/tmp/1.txt', 'c\nd\ne\n', ) self._add_hash(hash='a', summ=md5('a')) self._add_hash(hash='b', summ=md5('b')) self._add_hash(hash='c', summ=md5('c')) self._add_hashlist(id=2) self._add_hash(hashlist_id=2, hash='a', summ=md5('a'), cracked=1, password='******') self.thrd.start() time.sleep(5) assert self.db.fetch_col("SELECT hash FROM hashes WHERE hashlist_id = 1 ORDER BY hash") == \ ['a', 'b', 'c', 'd', 'e'] assert self.db.fetch_one( "SELECT password FROM hashes " "WHERE hashlist_id = 1 AND cracked = 1 AND hash = 'a'") == 'aaa'
def test_part(self): file_put_contents('/tmp/test.txt', 'aaa\nbbb\nccc\nddd') gen = FileGenerator('/tmp/test.txt', parts=2, part=1) assert gen.get() == "aaa" assert gen.get() == "bbb" assert gen.get() is None
def test_blank_hashlist(self): """ Run task with blank hashlist """ self._add_hashlist(alg_id=0) self._add_work_task() dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd = WorkerThread(self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'done' assert wtask['uncracked_before'] == 0 assert len(wtask['out_file']) == 0
def test_parse_outfile_and_fill_found_hashes(self, have_salt, hashlists, hashes, outfile_content): """ Test of parse_outfile_and_fill_found_hashes() """ for hashlist in hashlists: self._add_hashlist(id=hashlist['id'], name=hashlist['name'], alg_id=hashlist['alg_id'], have_salts=have_salt) for _hash in hashes: self._add_hash(id=_hash['id'], hashlist_id=_hash['hashlist_id'], hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ']) file_put_contents("/tmp/test.txt", outfile_content) assert [] == self.db.fetch_all("SELECT h.id, h.password, h.cracked FROM hashes h, hashlists hl " "WHERE hl.id = h.hashlist_id AND hl.alg_id = 3 AND LENGTH(h.password) " "AND h.cracked") self.thrd.parse_outfile_and_fill_found_hashes({'out_file': '/tmp/test.txt'}, {'alg_id': 3}) test_data = [ {'id': 1, 'password': '******', 'cracked': 1}, {'id': 2, 'password': '******', 'cracked': 1} ] assert test_data == self.db.fetch_all( "SELECT h.id, h.password, h.cracked FROM hashes h, hashlists hl WHERE hl.id = h.hashlist_id " "AND hl.alg_id = 3 AND LENGTH(h.password) AND h.cracked") assert [{'id': 3, 'password': '', 'cracked': 0}] == self.db.fetch_all( "SELECT h.id, h.password, h.cracked FROM hashes h, hashlists hl WHERE hl.id = h.hashlist_id " "AND hl.alg_id = 4")
def test_load_simple_list(self, have_salts, hashes_content, count_expected, hashes_expected, hashes_found): """ Loading simple list in db :param have_salts: Does hashlist has salt? :param hashes_content: Text content of hashlist :param count_expected: How many hashes we expected in db? :param hashes_expected: Rows with hashes we expected in db? :param hashes_found: Rows with found hashes after load, we expected :return: """ self._add_hashlist(have_salts=have_salts, parsed=0, tmp_path='/tmp/1.txt', status='wait') file_put_contents('/tmp/1.txt', hashes_content) if len(hashes_found): self._add_hashlist(id=2, have_salts=have_salts, parsed=1, status='ready') for _hash in hashes_found: self._add_hash( hashlist_id=2, hash=_hash['hash'], salt=_hash['salt'], password=_hash['password'], cracked=_hash['cracked'], summ=_hash['summ'] ) self.thrd.start() time.sleep(5) assert count_expected == self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hashlist_id = 1") for _hash in hashes_expected: assert self.db.fetch_one( "SELECT COUNT(id) FROM hashes WHERE hashlist_id = 1 " "AND hash = {0} AND salt = {1} AND summ = {2} AND password = {3} AND cracked = {4}". format(self.db.quote(_hash['hash']), self.db.quote(_hash['salt']), self.db.quote(_hash['summ']), self.db.quote(_hash['password']), _hash['cracked']) ) == 1
def __init__(self, queue, counter, result, params): """ :type params: ParamsThreadParams """ AbstractRawThread.__init__(self) self.retested_words = {} self.queue = queue self.url = params.url self.counter = counter self.result = result self.value = params.value self.max_params_length = params.max_params_length self.ignore_words_re = params.ignore_words_re self.not_found_re = params.not_found_re self.not_found_size = params.not_found_size self.method = params.method self.retest_codes = params.retest_codes self.retest_re = params.retest_re self.delay = params.delay if not os.path.exists(self.tmp_filepath): file_put_contents(self.tmp_filepath, "test") self.files_params_fh = open(self.tmp_filepath, "rb")
def test_simple_out(self, have_salts, out_content, expected_cracked_count, hashes): """ Parse simple outfile """ file_put_contents('/tmp/1.txt', out_content) self._add_hashlist(have_salts=have_salts) self._add_work_task(out_file='/tmp/1.txt', status='waitoutparse') self._add_task() for _hash in hashes: self._add_hash(hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ'], cracked=_hash['cracked']) self.thrd.start() time.sleep(5) assert self.db.fetch_one( "SELECT status FROM task_works WHERE id = 1") == 'done' assert len(hashes) - expected_cracked_count == self.db.fetch_one( "SELECT uncracked_after FROM task_works WHERE id = 1") for _hash in hashes: assert _hash['password'] == self.db.fetch_one( "SELECT password FROM hashes WHERE hash = {0}".format( self.db.quote(_hash['hash'])))
def test_no_parts(self): file_put_contents('/tmp/test.txt', 'aaa\nbbb\nccc') gen = FileGenerator('/tmp/test.txt', parts=0, part=0) assert gen.get() == "aaa" assert gen.get() == "bbb" assert gen.get() == "ccc" assert gen.get() is None
def test_sort_file(self): """ test for sort_file() """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, 'a\nc\nb\nb\nc') sorted_file = self.thrd.sort_file({'tmp_path': test_file}) assert file_get_contents(sorted_file) == 'a\nb\nc\n'
def scan_links(self, links): """ Scan links """ for link in links: self.last_action = int(time.time()) url = SpiderCommon.gen_url(link, self.host) start_time = int(round(time.time() * 1000)) pre_url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] if self.delay: time.sleep(self.delay) try: self.browser.get(url) time.sleep(1) #content_type = response.headers['content-type'].split(";")[0] \ # if (response.headers['content-type'].find(";") != -1) \ # else response.headers['content-type'] #if 299 < response.status_code < 400: # SpiderCommon.insert_links([response.headers['Location']], url, self.host) #else: #new_links = self.links_parser.parse_links('text/html', str(self.browser.page_source), link) #SpiderCommon.insert_links(new_links, url, self.host) source = str( self.browser.page_source.encode('utf8', errors='ignore')) new_links = self.links_parser.parse_links( 'text/html', source, link) SpiderCommon.insert_links(new_links, url, self.host) if self.not_found_re.findall(self.browser.page_source): link['code'] = 404 result_time = int(round(time.time() * 1000)) - start_time file_put_contents( "{0}{1}/{2}".format(Registry().get('data_path'), self.host, md5(pre_url)), str(self.browser.page_source.encode('utf-8'))) link['size'] = len(self.browser.page_source) link['time'] = result_time if int(link['code']) == 0: del link['code'] except BaseException as e: if not str(e).count('Timed out waiting for page load'): print str(e) self.up_requests_count() SpiderCommon.links_checked(links)
def test_gen_md_part(self): test_set = [ '0bbb', '1bbb', '2bbb', '3bbb', '4bbb', '5bbb', '6bbb', '7bbb', '8bbb', '9bbb', ] test_set.sort() file_put_contents('/tmp/test.txt', 'aaa\nbbb\nccc') gen = CombineGenerator('?d,1,1', '/tmp/test.txt', parts=3, part=2, template='%m%%d%') for test_phrase in test_set: assert gen.get() == test_phrase assert gen.get() is None
def test_gen_md_no_parts(self): test_set = [ '0aaa', '1aaa', '2aaa', '3aaa', '4aaa', '5aaa', '6aaa', '7aaa', '8aaa', '9aaa', '0bbb', '1bbb', '2bbb', '3bbb', '4bbb', '5bbb', '6bbb', '7bbb', '8bbb', '9bbb', '0ccc', '1ccc', '2ccc', '3ccc', '4ccc', '5ccc', '6ccc', '7ccc', '8ccc', '9ccc', ] file_put_contents('/tmp/test.txt', 'aaa\nbbb\nccc') gen = CombineGenerator('?d,1,1', '/tmp/test.txt', parts=0, part=0, template='%m%%d%') for test_phrase in test_set: assert gen.get() == test_phrase assert gen.get() is None
def test_build_hybride_dict(self): """ Test of dicts build mechanism for hybride attacks """ if os.path.exists("/tmp/1/"): shutil.rmtree("/tmp/1/") os.mkdir("/tmp/1/") file_put_contents("/tmp/1/1.dict", "bbb\naaa\n") file_put_contents("/tmp/1/2.dict", "ddd\nccc\n") hybrite_dict_path = self.thrd.build_hybride_dict("/tmp/1/") assert os.path.exists(hybrite_dict_path) assert file_get_contents(hybrite_dict_path) == "aaa\nbbb\nccc\nddd\n"
def scan_links(self, links): """ Scan links """ for link in links: self.last_action = int(time.time()) url = SpiderCommon.gen_url(link, self.host, self.protocol) start_time = int(round(time.time() * 1000)) pre_url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] if self.delay: time.sleep(self.delay) try: self.browser.get(url) time.sleep(1) #content_type = response.headers['content-type'].split(";")[0] \ # if (response.headers['content-type'].find(";") != -1) \ # else response.headers['content-type'] #if 299 < response.status_code < 400: # SpiderCommon.insert_links([response.headers['Location']], url, self.host) #else: #new_links = self.links_parser.parse_links('text/html', str(self.browser.page_source), link) #SpiderCommon.insert_links(new_links, url, self.host) source = str(self.browser.page_source.encode('utf8', errors='ignore')) new_links = self.links_parser.parse_links('text/html', source, link) SpiderCommon.insert_links(new_links, url, self.host) if self.not_found_re.findall(self.browser.page_source): link['code'] = 404 result_time = int(round(time.time() * 1000)) - start_time file_put_contents("{0}{1}/{2}".format( Registry().get('data_path'), self.host, md5(pre_url) ), str(self.browser.page_source.encode('utf-8'))) link['size'] = len(self.browser.page_source) link['time'] = result_time if int(link['code']) == 0: del link['code'] except BaseException as e: if not str(e).count('Timed out waiting for page load'): print str(e) self.up_requests_count() SpiderCommon.links_checked(links)
def test_add_hashes_to_exists_list(self): """ Test adding hashes to exists hashlist """ self._add_hashlist(parsed=0, tmp_path='/tmp/1.txt', status='wait') file_put_contents('/tmp/1.txt', 'c\nd\ne\n', ) self._add_hash(hash='a') self._add_hash(hash='b') self._add_hash(hash='c') self.thrd.start() time.sleep(5) assert ['a', 'b', 'c', 'd', 'e'] == \ self.db.fetch_col("SELECT hash FROM hashes WHERE hashlist_id = 1 ORDER BY hash")
def test_clean_stdout_file(self): """ Test of method which clean stdout file from automate-status entries """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) self.thrd.work_task = {'path_stdout': '/tmp/test.txt'} self.thrd.clean_stdout_file() assert file_get_contents(test_file) == "" file_put_contents(test_file, "some\nSTATUS ...\ntest\n\n\ntest2\r\n") self.thrd.clean_stdout_file() assert file_get_contents(test_file) == "some\ntest\ntest2\n"
def scan_links(self, links): """ Scan links """ req_func = getattr(self.http, 'get') for link in links: self.last_action = int(time.time()) self.counter.up() url = SpiderCommon.gen_url(link, self.host, self.protocol) start_time = int(round(time.time() * 1000)) pre_url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] if self.delay: time.sleep(self.delay) response = req_func(url) self.src.up() if response is not None: result_time = int(round(time.time() * 1000)) - start_time if 'content-type' in response.headers: content_type = response.headers['content-type'].split(";")[0] \ if (response.headers['content-type'].find(";") != -1) \ else response.headers['content-type'] else: content_type = 'unknown/unknown' if 299 < response.status_code < 400: SpiderCommon.insert_links([response.headers['Location']], url, self.host) else: new_links = self.links_parser.parse_links(content_type, str(response.content), link) SpiderCommon.insert_links(new_links, url, self.host) file_put_contents( "{0}{1}/{2}".format( Registry().get('data_path'), self.host, md5(pre_url) ), str(response.content) ) link['size'] = len(response.content) if response is not None else 0 link['code'] = response.status_code if response is not None else 0 link['time'] = result_time if response is not None else 0 SpiderCommon.links_checked(links)
def test_build_cmd_hybride_dictmask(self): """ Building cmd for hybride (dict+mask) attack """ dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=json.dumps({'mask':'?l?d?u?s', 'dict': 1}), type='dictmask') self.thrd.work_task['out_file'] = '/tmp/out.txt' self.thrd.work_task['session_name'] = '/tmp/session.txt' cmd = self.thrd.build_cmd( self.db.fetch_row("SELECT * FROM tasks WHERE id = 1"), 1, '/tmp/test.txt' ) assert cmd == [ '{0}/cudaHashcat64.bin'.format(Registry().get('config')['main']['path_to_hc']), '-m900', '--outfile-format=5', '--status-automat', '--status-timer=4', '--status', '--potfile-disable', '--outfile=/tmp/out.txt', '--session=/tmp/session.txt', '-a6', '/tmp/test.txt', self.thrd.work_task['hybride_dict'], '?l?d?u?s'] self.thrd.work_task['out_file'] = '/tmp/out.txt' self.thrd.work_task['session_name'] = '/tmp/session.txt' self.db.update( "tasks", {"custom_charset1": "abc", "custom_charset2": "def", "custom_charset3": "ghi", "custom_charset4": "klm", "increment": 1, "increment_min": 1, "increment_max": 2}, "id = 1") cmd = self.thrd.build_cmd( self.db.fetch_row("SELECT * FROM tasks WHERE id = 1"), 1, '/tmp/test.txt' ) assert cmd == [ '{0}/cudaHashcat64.bin'.format(Registry().get('config')['main']['path_to_hc']), '-m900', '--outfile-format=5', '--status-automat', '--status-timer=4', '--status', '--potfile-disable', '--outfile=/tmp/out.txt', '--session=/tmp/session.txt', '--custom-charset1=abc', '--custom-charset2=def', '--custom-charset3=ghi', '--custom-charset4=klm', '-a6', '/tmp/test.txt', self.thrd.work_task['hybride_dict'], '?l?d?u?s']
def test_load_proxies(self, param_name): file_put_contents("/tmp/test.txt", "") proxies_mock = ProxiesMock() Registry().set('proxies', proxies_mock) module = ModuleMock(False) module.options = { param_name: WSOption("proxies", "File with list of proxies", "", False, ['--proxies']) } module.options[param_name].value = "/tmp/test.txt" module.load_proxies() assert "/tmp/test.txt" in proxies_mock.loaded_path
def test_sorted_file_to_db(self, have_salt, content, expected_content): """ Test sorted_file_to_db() :param have_salt: does hashlist has salt :param content: hashlist content (txt) :param expected_content: expected hashlist content after convertation :return: """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, content) file_to_db = self.thrd.sorted_file_to_db_file(test_file, {'have_salts': have_salt, 'id': 1, 'delimiter': ':'}) assert file_get_contents(file_to_db) == expected_content assert self.db.fetch_one("SELECT status FROM hashlists WHERE id = 1") == "preparedb"
def scan_links(self, links): """ Scan links """ req_func = getattr(self.http, 'get') for link in links: self.last_action = int(time.time()) self.counter.up() url = SpiderCommon.gen_url(link, self.host) start_time = int(round(time.time() * 1000)) pre_url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] if self.delay: time.sleep(self.delay) response = req_func(url) self.src.up() if response is not None: result_time = int(round(time.time() * 1000)) - start_time if 'content-type' in response.headers: content_type = response.headers['content-type'].split(";")[0] \ if (response.headers['content-type'].find(";") != -1) \ else response.headers['content-type'] else: content_type = 'unknown/unknown' if 299 < response.status_code < 400: SpiderCommon.insert_links([response.headers['Location']], url, self.host) else: new_links = self.links_parser.parse_links( content_type, str(response.content), link) SpiderCommon.insert_links(new_links, url, self.host) file_put_contents( "{0}{1}/{2}".format(Registry().get('data_path'), self.host, md5(pre_url)), str(response.content)) link['size'] = len(response.content) if response is not None else 0 link['code'] = response.status_code if response is not None else 0 link['time'] = result_time if response is not None else 0 SpiderCommon.links_checked(links)
def test_add_hashes_to_exists_list(self): """ Test adding hashes to exists hashlist """ self._add_hashlist(parsed=0, tmp_path='/tmp/1.txt', status='wait') file_put_contents( '/tmp/1.txt', 'c\nd\ne\n', ) self._add_hash(hash='a') self._add_hash(hash='b') self._add_hash(hash='c') self.thrd.start() time.sleep(5) assert ['a', 'b', 'c', 'd', 'e'] == \ self.db.fetch_col("SELECT hash FROM hashes WHERE hashlist_id = 1 ORDER BY hash")
def test_gen_dm_no_parts(self): test_set = [ 'aaa0', 'bbb0', 'ccc0', 'aaa1', 'bbb1', 'ccc1', 'aaa2', 'bbb2', 'ccc2', 'aaa3', 'bbb3', 'ccc3', 'aaa4', 'bbb4', 'ccc4', 'aaa5', 'bbb5', 'ccc5', 'aaa6', 'bbb6', 'ccc6', 'aaa7', 'bbb7', 'ccc7', 'aaa8', 'bbb8', 'ccc8', 'aaa9', 'bbb9', 'ccc9', ] test_set.sort() file_put_contents('/tmp/test.txt', 'aaa\nbbb\nccc') gen = CombineGenerator('?d,1,1', '/tmp/test.txt', parts=0, part=0, template='%d%%m%') for test_phrase in test_set: assert gen.get() == test_phrase assert gen.get() is None
def test_load_simple_list(self, have_salts, hashes_content, count_expected, hashes_expected, hashes_found): """ Loading simple list in db :param have_salts: Does hashlist has salt? :param hashes_content: Text content of hashlist :param count_expected: How many hashes we expected in db? :param hashes_expected: Rows with hashes we expected in db? :param hashes_found: Rows with found hashes after load, we expected :return: """ self._add_hashlist(have_salts=have_salts, parsed=0, tmp_path='/tmp/1.txt', status='wait') file_put_contents('/tmp/1.txt', hashes_content) if len(hashes_found): self._add_hashlist(id=2, have_salts=have_salts, parsed=1, status='ready') for _hash in hashes_found: self._add_hash(hashlist_id=2, hash=_hash['hash'], salt=_hash['salt'], password=_hash['password'], cracked=_hash['cracked'], summ=_hash['summ']) self.thrd.start() time.sleep(5) assert count_expected == self.db.fetch_one( "SELECT COUNT(id) FROM hashes WHERE hashlist_id = 1") for _hash in hashes_expected: assert self.db.fetch_one( "SELECT COUNT(id) FROM hashes WHERE hashlist_id = 1 " "AND hash = {0} AND salt = {1} AND summ = {2} AND password = {3} AND cracked = {4}" .format(self.db.quote(_hash['hash']), self.db.quote(_hash['salt']), self.db.quote(_hash['summ']), self.db.quote(_hash['password']), _hash['cracked'])) == 1
def test_build_cmd_dict(self): """ Building cmd for dict attack """ dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd.work_task['out_file'] = '/tmp/out.txt' self.thrd.work_task['session_name'] = '/tmp/session.txt' cmd = self.thrd.build_cmd( self.db.fetch_row("SELECT * FROM tasks WHERE id = 1"), 1, '/tmp/test.txt' ) assert cmd == [ '{0}/cudaHashcat64.bin'.format(Registry().get('config')['main']['path_to_hc']), '-m900', '--outfile-format=5', '--status-automat', '--status-timer=4', '--status', '--potfile-disable', '--outfile=/tmp/out.txt', '--session=/tmp/session.txt', '-a0', '/tmp/test.txt', '/tmp//dicts_for_1/*.dict'] self.thrd.work_task['out_file'] = '/tmp/out.txt' self.thrd.work_task['session_name'] = '/tmp/session.txt' self._add_rule() self.db.update("tasks", {"rule": 1}, "id = 1") cmd = self.thrd.build_cmd( self.db.fetch_row("SELECT * FROM tasks WHERE id = 1"), 0, '/tmp/test.txt' ) assert cmd == [ '{0}/cudaHashcat64.bin'.format(Registry().get('config')['main']['path_to_hc']), '-m900', '--outfile-format=5', '--status-automat', '--status-timer=4', '--status', '--potfile-disable', '--outfile=/tmp/out.txt', '--session=/tmp/session.txt', '--restore', '-r /tmp/rules//1.rule', '-a0', '/tmp/test.txt', '/tmp//dicts_for_1/*.dict']
def test_maskdict_task(self): """ Run hybride mask+dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('1ccc')) self._add_hash(hash=md5('789')) self._add_work_task() self._add_task(source=json.dumps({ 'mask': '?d', 'dict': 1 }), type='maskdict') dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb\n") file_put_contents(dicts_path + "/2.dict", "ccc\nddd\n") self.thrd = WorkerThread( self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents( wtask['out_file']) == '49a14108270c0596ac1d70c3c4f82a10:31636363\n'
def test_run_2(self): """ Have one ready hashlist. Load new hashlist. Wait common hashlist build. Start brute on it. Found one hash which exists in first & second hashlists. Rebuild common hashlist """ self._add_hashlist(alg_id=4, uncracked=3) self._add_hash(hash=md5('333'), summ=md5(md5('333'))) self._add_hash(hash=md5('444'), summ=md5(md5('444'))) self._add_hash(hash=md5('ccc'), summ=md5(md5('ccc'))) self._add_hashlist(id=2, parsed=0, tmp_path='/tmp/1.txt', status='wait', alg_id=4) file_put_contents('/tmp/1.txt', '{0}\n{1}\n{2}\n'.format(md5('111'), md5('333'), md5('ccc'))) process = Popen("python ../../hbs.py", stdout=PIPE, stdin=PIPE, stderr=PIPE, shell=True, preexec_fn=os.setsid) time.sleep(10) assert self.db.fetch_one("SELECT name FROM hashlists WHERE common_by_alg = 4") == 'All-MD5' assert self.db.fetch_one("SELECT id FROM hashlists WHERE common_by_alg = 4") == 3 self._add_work_task(hashlist_id=3) self._add_task(source='?l?l?l', type='mask') time.sleep(20) os.killpg(os.getpgid(process.pid), signal.SIGTERM) #stdout = process.stdout.read() stderr = process.stderr.read()\ .replace('Warning: Using a password on the command line interface can be insecure.\n', '') assert stderr == '' assert self.db.fetch_one("SELECT COUNT(id) FROM hashes") == 9 assert self.db.fetch_one( "SELECT COUNT(id) FROM hashes WHERE cracked=1 AND password='******' AND hash = '{0}'".format(md5('ccc')) ) == 2 assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE cracked=1") == 2 assert self.db.fetch_one("SELECT status FROM task_works WHERE id=1") == "done" assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE hashlist_id = 3") == 3
def test_add_hashes_to_exists_list_with_founds(self): """ Testing add hashes to exists list with already found hashes """ self._add_hashlist(parsed=0, tmp_path='/tmp/1.txt', status='wait') file_put_contents('/tmp/1.txt', 'c\nd\ne\n', ) self._add_hash(hash='a', summ=md5('a')) self._add_hash(hash='b', summ=md5('b')) self._add_hash(hash='c', summ=md5('c')) self._add_hashlist(id=2) self._add_hash(hashlist_id=2, hash='a', summ=md5('a'), cracked=1, password='******') self.thrd.start() time.sleep(5) assert self.db.fetch_col("SELECT hash FROM hashes WHERE hashlist_id = 1 ORDER BY hash") == \ ['a', 'b', 'c', 'd', 'e'] assert self.db.fetch_one("SELECT password FROM hashes " "WHERE hashlist_id = 1 AND cracked = 1 AND hash = 'a'") == 'aaa'
def test_sorted_file_to_db(self, have_salt, content, expected_content): """ Test sorted_file_to_db() :param have_salt: does hashlist has salt :param content: hashlist content (txt) :param expected_content: expected hashlist content after convertation :return: """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, content) file_to_db = self.thrd.sorted_file_to_db_file(test_file, { 'have_salts': have_salt, 'id': 1, 'delimiter': ':' }) assert file_get_contents(file_to_db) == expected_content assert self.db.fetch_one( "SELECT status FROM hashlists WHERE id = 1") == "preparedb"
def test_simple_out(self, have_salts, out_content, expected_cracked_count, hashes): """ Parse simple outfile """ file_put_contents('/tmp/1.txt', out_content) self._add_hashlist(have_salts=have_salts) self._add_work_task(out_file='/tmp/1.txt', status='waitoutparse') self._add_task() for _hash in hashes: self._add_hash(hash=_hash['hash'], salt=_hash['salt'], summ=_hash['summ'], cracked=_hash['cracked']) self.thrd.start() time.sleep(5) assert self.db.fetch_one("SELECT status FROM task_works WHERE id = 1") == 'done' assert len(hashes) - expected_cracked_count == self.db.fetch_one( "SELECT uncracked_after FROM task_works WHERE id = 1") for _hash in hashes: assert _hash['password'] == self.db.fetch_one( "SELECT password FROM hashes WHERE hash = {0}".format(self.db.quote(_hash['hash'])))
def test_load_file_in_db(self, content, expected_hashes): """ Test load_file_in_db() :param content: text content for in-db load :param expected_hashes: expected rows from db :return: """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, content) file_put_contents('/tmp/test1.txt', '') self.thrd.load_file_in_db(test_file) for test_hash in expected_hashes: test_row = self.db.fetch_row("SELECT id, hash, salt FROM hashes WHERE id = {0}".format(test_hash['id'])) assert test_hash['hash'] == test_row['hash'] assert test_hash['salt'] == test_row['salt'] assert self.db.fetch_one("SELECT COUNT(id) FROM hashes") == 3 assert os.path.exists('/tmp/test1.txt')
def test_dict_task(self): """ Run simple dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('ccc')) self._add_hash(hash=md5('789')) self._add_work_task() dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd = WorkerThread( self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents( wtask['out_file']) == '9df62e693988eb4e1e1444ece0578579:636363\n'
def test_dict_task(self): """ Run simple dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('ccc')) self._add_hash(hash=md5('789')) self._add_work_task() dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd = WorkerThread(self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents(wtask['out_file']) == '9df62e693988eb4e1e1444ece0578579:636363\n'
def test_maskdict_task(self): """ Run hybride mask+dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('1ccc')) self._add_hash(hash=md5('789')) self._add_work_task() self._add_task(source=json.dumps({'mask': '?d', 'dict': 1}), type='maskdict') dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb\n") file_put_contents(dicts_path + "/2.dict", "ccc\nddd\n") self.thrd = WorkerThread(self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents(wtask['out_file']) == '49a14108270c0596ac1d70c3c4f82a10:31636363\n'
def test_load_file_in_db(self, content, expected_hashes): """ Test load_file_in_db() :param content: text content for in-db load :param expected_hashes: expected rows from db :return: """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, content) file_put_contents('/tmp/test1.txt', '') self.thrd.load_file_in_db(test_file) for test_hash in expected_hashes: test_row = self.db.fetch_row( "SELECT id, hash, salt FROM hashes WHERE id = {0}".format( test_hash['id'])) assert test_hash['hash'] == test_row['hash'] assert test_hash['salt'] == test_row['salt'] assert self.db.fetch_one("SELECT COUNT(id) FROM hashes") == 3 assert os.path.exists('/tmp/test1.txt')
def test_build_dicts(self): """ Test of symlinks on dicts build mechanism, for simple dicts attacks """ tmp_dir = Registry().get('config')['main']['tmp_dir'] dicts_path = Registry().get('config')['main']['dicts_path'] if os.path.exists(tmp_dir + '/dicts_for_1'): shutil.rmtree(tmp_dir + '/dicts_for_1') os.mkdir(tmp_dir + '/dicts_for_1') assert tmp_dir + '/dicts_for_1' == self.thrd.build_dicts(0, {}) shutil.rmtree(tmp_dir + '/dicts_for_1') self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_dict(id=3, hash='3', group_id=2) file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") path_to_dict = self.thrd.build_dicts(1, {'type': 'dict', 'source': 1}) assert not os.path.exists(tmp_dir + '/dicts_for_2') assert os.path.exists(path_to_dict) assert os.path.exists(path_to_dict + "/1.dict") assert os.path.exists(path_to_dict + "/2.dict") assert not os.path.exists(path_to_dict + "/3.dict") assert os.path.islink(path_to_dict + "/1.dict") assert os.path.islink(path_to_dict + "/2.dict") assert file_get_contents(path_to_dict + "/1.dict") == "aaa\nbbb" assert file_get_contents(path_to_dict + "/2.dict") == "ccc\nddd"
def test_load_headers_from_file(self): file_put_contents("/tmp/test.txt", "A: B\nC: D") http = Http() http.load_headers_from_file("/tmp/test.txt") assert {'A': 'B', 'C': 'D'} == http.headers
def test_load_headers_from_file_error(self): file_put_contents("/tmp/test.txt", "A: B\nC") http = Http() with pytest.raises(WSException) as ex: http.load_headers_from_file("/tmp/test.txt") assert "Wrong header line" in str(ex)
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerUrlsJob() U = UrlsModel() if os.path.exists('/tmp/fuzzer-urls.txt'): os.remove('/tmp/fuzzer-urls.txt') urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) for url in urls: if url['url'].count('?'): to_add = self._generate_fuzz_urls(url['url']) for item in to_add: file_put_contents('/tmp/fuzzer-urls.txt', item + "\n", True) generator = FileGenerator('/tmp/fuzzer-urls.txt') q.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) counter = WSCounter(1, 60, generator.lines_count) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result ) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) timeout_threads_count = 0 while len(w_thrds): for worker in w_thrds: if worker.done or Registry().get('proxy_many_died'): del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get('config')['main']['kill_thread_after_secs']): self.logger.log( "Thread killed by time, resurected {0} times from {1}".format( timeout_threads_count, Registry().get('config')['main']['timeout_threads_resurect_max_count'] ) ) del w_thrds[w_thrds.index(worker)] if timeout_threads_count <= int(Registry().get('config')['main']['timeout_threads_resurect_max_count']): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result ) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) timeout_threads_count += 1 time.sleep(2) if result: self.logger.log("\nPut results into DB...") Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']) )) if int(Registry().get('config')['main']['put_data_into_db']): _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word: {0}'.format(", ".join(fuzz['words'])) ) added += 1 if _id else 0 self.logger.log("Added {0} new requests in database".format(added)) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerUrlsJob() U = UrlsModel() if os.path.exists('/tmp/fuzzer-urls.txt'): os.remove('/tmp/fuzzer-urls.txt') urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) for url in urls: if url['url'].count('?'): to_add = self._generate_fuzz_urls(url['url']) for item in to_add: file_put_contents('/tmp/fuzzer-urls.txt', item + "\n", True) generator = FileGenerator('/tmp/fuzzer-urls.txt') q.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) counter = WSCounter(1, 60, generator.lines_count) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done or Registry().get('proxy_many_died'): del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get( 'config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) if result: self.logger.log("\nPut results into DB...") Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']))) _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word: {0}'.format(", ".join(fuzz['words']))) added += 1 if _id else 0 self.logger.log("Added {0} new requests in database".format(added)) self.done = True
def test_run_4(self): """ Have 2 hashlists. Start one task by first, add second task with same priority. Stop first "manually", second start and done. After that first "manually" return to work. """ self._add_hashlist(alg_id=4, uncracked=3) self._add_hash(hash=md5('333'), summ=md5(md5('333'))) self._add_hash(hash=md5('444'), summ=md5(md5('444'))) self._add_hash(hash=md5('ccccccc'), summ=md5(md5('ccccccc'))) self._add_hashlist(id=2, alg_id=23, uncracked=3) self._add_hash(hashlist_id=2, hash=md5(md5('333')), summ=md5(md5(md5('333')))) self._add_hash(hashlist_id=2, hash=md5(md5('444')), summ=md5(md5(md5('444')))) self._add_hash(hashlist_id=2, hash=md5(md5('zzzweeg')), summ=md5(md5(md5('zzzweeg')))) self._add_hashlist(id=3, alg_id=4, common_by_alg=4, uncracked=3) self._add_hash(hashlist_id=3, hash=md5('333'), summ=md5(md5('333'))) self._add_hash(hashlist_id=3, hash=md5('444'), summ=md5(md5('444'))) self._add_hash(hashlist_id=3, hash=md5('ccccccc'), summ=md5(md5('ccccccc'))) self._add_hashlist(id=4, alg_id=23, common_by_alg=23, uncracked=3) self._add_hash(hashlist_id=4, hash=md5(md5('333')), summ=md5(md5(md5('333')))) self._add_hash(hashlist_id=4, hash=md5(md5('444')), summ=md5(md5(md5('444')))) self._add_hash(hashlist_id=4, hash=md5(md5('zzzweeg')), summ=md5(md5(md5('zzzweeg')))) process = Popen("python ../../hbs.py", stdout=PIPE, stdin=PIPE, stderr=PIPE, shell=True, preexec_fn=os.setsid) self._add_work_task(hashlist_id=2) self._add_task(source='?l?l?l?l?l?l?l', type='mask') start_time = int(time.time()) while self.db.fetch_one("SELECT status FROM task_works WHERE id = 1") != "work": if int(time.time()) - start_time > 5: pytest.fail("Timeout start first task") self._add_work_task(id=2, hashlist_id=1) self.db.update("task_works", {'status': 'go_stop'}, "id = 1") start_time = int(time.time()) while self.db.fetch_one("SELECT status FROM task_works WHERE id = 2") != "work": if int(time.time()) - start_time > 20: pytest.fail("Timeout start second task") self.db.update("task_works", {'status': 'wait'}, "id = 1") start_time = int(time.time()) while self.db.fetch_one("SELECT status FROM task_works WHERE id = 1") != "work": file_put_contents( '/tmp/time.txt', "{0}{1}\n".format( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), self.db.fetch_one("SELECT status FROM task_works WHERE id = 1") ), True) if int(time.time()) - start_time > 40: pytest.fail("Timeout start first task after stop") time.sleep(1) assert self.db.fetch_one("SELECT status FROM task_works WHERE id = 2") == "done" time.sleep(30) assert self.db.fetch_one("SELECT status FROM task_works WHERE id = 1") == "done" os.killpg(os.getpgid(process.pid), signal.SIGTERM) #stdout = process.stdout.read() stderr = process.stderr.read()\ .replace('Warning: Using a password on the command line interface can be insecure.\n', '') assert stderr == '' assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE cracked=1 AND password='******'") == 1 assert self.db.fetch_one("SELECT COUNT(id) FROM hashes WHERE cracked=1 AND password='******'") == 1 assert self.db.fetch_one("SELECT DISTINCT status FROM task_works") == "done"
def scan_action(self): """ Scan action """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] if os.path.exists('/tmp/bf-urls.txt'): os.remove('/tmp/bf-urls.txt') urls = self.build_objects(self.options['host'].value) for url in urls: file_put_contents('/tmp/bf-urls.txt', url + "\n", True) q = BackupsFinderJob() generator = FileGenerator('/tmp/bf-urls.txt') q.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) counter = WSCounter(5, 300, generator.lines_count) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SBackupsFinderThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result ) else: worker = BackupsFinderThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['not-found-codes'].value.lower(), self.options['not-found-size'].value, self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) timeout_threads_count = 0 while len(w_thrds): for worker in w_thrds: if Registry().get('proxy_many_died') or Registry().get('positive_limit_stop'): worker.done = True time.sleep(3) if worker.done: del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get('config')['main']['kill_thread_after_secs']): self.logger.log( "Thread killed by time, resurected {0} times from {1}".format( timeout_threads_count, Registry().get('config')['main']['timeout_threads_resurect_max_count'] ) ) del w_thrds[w_thrds.index(worker)] if timeout_threads_count <= int(Registry().get('config')['main']['timeout_threads_resurect_max_count']): if self.options['selenium'].value: worker = SBackupsFinderThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result ) else: worker = BackupsFinderThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['not-found-codes'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) timeout_threads_count += 1 time.sleep(2) if Registry().get('positive_limit_stop'): self.logger.log("\nMany positive detections. Please, look items logs") self.logger.log("Last items:") for i in range(1, 5): print result[-i] exit(0) if result: print "\n", for item in result: print item if int(Registry().get('config')['main']['put_data_into_db']): if result: self.logger.log("\nPut found into DB...") Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for backup in result: _id = Requests.add( project_id, host_id, backup, "", {}, self.options['method'].value, self.options['protocol'].value.lower(), 'backups', 'May be important backup' ) added += 1 if _id else 0 self.logger.log("Found backups: {0}, new: {1}".format(len(result), added)) self.logger.log(str(result), _print=False) self.done = True