def _logger(self, first_stop=False): """ Check logger work :param first_stop: :param selenium: :return: """ logs_path = "{0}/logs/form-bruter/{1}/".format(wrpath, t("%Y-%m-%d")) time_dir = sorted(os.listdir(logs_path))[-1] run_log_data = file_get_contents("{0}/{1}/run.log".format(logs_path, time_dir)) assert "Loaded 4 words from source" in run_log_data assert "qwerty" in run_log_data assert os.path.exists("{0}/{1}/items/123.txt".format(logs_path, time_dir)) assert os.path.exists("{0}/{1}/items/test.txt".format(logs_path, time_dir)) assert os.path.exists("{0}/{1}/items/qwerty.txt".format(logs_path, time_dir)) assert os.path.exists("{0}/{1}/items/abcde.txt".format(logs_path, time_dir)) == (not first_stop) assert '<input type="password" name="password" />' \ in file_get_contents("{0}/{1}/items/123.txt".format(logs_path, time_dir)) assert '<input type="password" name="password" />' \ in file_get_contents("{0}/{1}/items/test.txt".format(logs_path, time_dir)) assert '<input type="password" name="password" />' \ not in file_get_contents("{0}/{1}/items/qwerty.txt".format(logs_path, time_dir)) assert 'Success!' in file_get_contents("{0}/{1}/items/qwerty.txt".format(logs_path, time_dir))
def _logger(self, content=True, good_items=None, bad_items=None, in_output=None): """ Check logger work :param content: We check content? :param good_items: This items was found :param bad_items: This items was not found :param in_output: check this phrases in output :return: """ good_items = good_items or [] bad_items = bad_items or [] in_output = in_output or [] logs_path = "{0}/logs/dafs/{1}/".format(wrpath, t("%Y-%m-%d")) time_dir = sorted(os.listdir(logs_path))[-1] run_log_data = file_get_contents("{0}/{1}/run.log".format(logs_path, time_dir)) assert "Loaded {0} words from source".format(self._how_many_variants()) in run_log_data for item in in_output: assert item in run_log_data for item in bad_items: assert os.path.exists("{0}/{1}/items/{2}.txt".format(logs_path, time_dir, item)) if self.check_method != 'head': assert ('<h1>Not Found</h1>' if content else '') in \ file_get_contents("{0}/{1}/items/{2}.txt".format(logs_path, time_dir, item)) for item in good_items: assert os.path.exists("{0}/{1}/items/{2}.txt".format(logs_path, time_dir, item)) if self.check_method != 'head': assert '<h1>Not Found</h1>' not in \ file_get_contents("{0}/{1}/items/{2}.txt".format(logs_path, time_dir, item))
def test_clean_stdout_file(self): """ Test of method which clean stdout file from automate-status entries """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) self.thrd.work_task = {'path_stdout': '/tmp/test.txt'} self.thrd.clean_stdout_file() assert file_get_contents(test_file) == "" file_put_contents(test_file, "some\nSTATUS ...\ntest\n\n\ntest2\r\n") self.thrd.clean_stdout_file() assert file_get_contents(test_file) == "some\ntest\ntest2\n"
def ex(self, _exception): """ Log func for exceptions """ # Very ugly hack, will be fixed tmp_file_name = "/tmp/{0}{1}.txt".format("wsexc", random.randint(1, 9999)) fh = open(tmp_file_name, "w") traceback.print_stack(file=fh) fh.close() trace_text = file_get_contents(tmp_file_name) os.remove(tmp_file_name) exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log_str = "Exception {1}:\n{2} ({3}): {4}\n{0}\n{5}{0}\n".format( "{0:=^20}".format(""), exc_type, fname, exc_tb.tb_lineno, str(_exception), trace_text, ) self.log(log_str, _print=False)
def test_mask_task(self): """ Run simple mask task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('ccc')) self._add_hash(hash=md5('789')) self._add_work_task() self._add_task(source='?l?l?l', type='mask') self.thrd = WorkerThread( self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents( wtask['out_file']) == '9df62e693988eb4e1e1444ece0578579:636363\n'
def test_mask_task(self): """ Run simple mask task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('ccc')) self._add_hash(hash=md5('789')) self._add_work_task() self._add_task(source='?l?l?l', type='mask') self.thrd = WorkerThread(self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents(wtask['out_file']) == '9df62e693988eb4e1e1444ece0578579:636363\n'
def _scan(self, config, test_data, test_files_content, url_decrease, insert_root=True, additional_run_params=None): """ Common scan method :param config: :param test_data: :param test_files_content: :param url_decrease: :param insert_root: :param additional_run_params: :return: """ additional_run_params = additional_run_params or [] self._prepare_db(insert_root) run_params = [ './main.py', 'prj', 'Spider', 'scan', '--host=wrtest.com' ] run_params.extend(additional_run_params) run_params.append('--threads=3' if '--selenium=1' not in run_params else '--threads=1') self._replace_config(config) os.chdir(wrpath) out = subprocess.check_output(run_params) self._restore_config() self.output_errors(out) urls = self.db.fetch_all("SELECT * FROM urls ORDER BY id ASC") assert len(urls) == len(test_data) assert len(urls)-url_decrease == len(os.listdir(Registry().get('data_path') + 'wrtest.com/')) for url in urls: if url['url'] == '/slow.php': assert url['response_time'] > 4 test_data_row = test_data[url['url']] for field in test_data_row: assert test_data_row[field] == url[field] for url in test_files_content: data = file_get_contents(Registry().get('data_path') + 'wrtest.com/' + md5(url)) assert data.find(test_files_content[url]) > -1 assert self.db.fetch_one("SELECT COUNT(id) FROM urls_base WHERE name='/' AND parent_id=0 AND host_id=1") == 1 assert self.db.fetch_one("SELECT COUNT(id) FROM urls_base WHERE name='deep' AND parent_id=1 AND host_id=1") == 1 assert self.db.fetch_one( "SELECT COUNT(id) FROM urls_base WHERE name='moredeep' AND parent_id=" "(SELECT id FROM urls_base WHERE name='deep' AND parent_id=1 AND host_id=1) AND host_id=1" ) == 1 assert self.db.fetch_one( "SELECT COUNT(id) FROM urls_base WHERE name='dir1' AND parent_id=" "(SELECT id FROM urls_base WHERE name='moredeep' AND host_id=1) AND host_id=1" ) == 1 assert self.db.fetch_one("SELECT COUNT(id) FROM urls_base_params") > 0 return out
def test_sort_file(self): """ test for sort_file() """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, 'a\nc\nb\nb\nc') sorted_file = self.thrd.sort_file({'tmp_path': test_file}) assert file_get_contents(sorted_file) == 'a\nb\nc\n'
def _check_logger(self): """Check logger work""" logs_path = "{0}/logs/cms/{1}/".format(wrpath, t("%Y-%m-%d")) time_dir = sorted(os.listdir(logs_path))[-1] run_log_data = file_get_contents("{0}/{1}/run.log".format(logs_path, time_dir)) assert "Loaded {0} variants".format(self._how_many_variants()) in run_log_data if not self.selenium: for item in self.found_items: if self.check_method is not 'head': assert item['sum'] == md5sum("{0}/{1}/items/{2}".format(logs_path, time_dir, item['log']))
def test_put_all_hashes_of_alg_in_file(self): """ Test put_all_hashes_of_alg_in_file() """ self._add_hashlist() self._add_hash(hash='a', summ='111') self._add_hash(summ='222') self._add_hash(hash='b', summ='333') path = self.thrd.put_all_hashes_of_alg_in_file(3) assert os.path.exists(path) assert file_get_contents(path) == 'a\nb\n' self._add_hashlist(id=2, have_salts=1, alg_id=4) self._add_hash(hashlist_id=2, hash='a', salt='b', summ='111') self._add_hash(hashlist_id=2, summ='222') self._add_hash(hashlist_id=2, hash='c', salt='d', summ='333') path = self.thrd.put_all_hashes_of_alg_in_file(4) assert os.path.exists(path) assert file_get_contents(path) == 'a{0}b\nc{0}d\n'.format(self.thrd.DELIMITER)
def test_build_hybride_dict(self): """ Test of dicts build mechanism for hybride attacks """ if os.path.exists("/tmp/1/"): shutil.rmtree("/tmp/1/") os.mkdir("/tmp/1/") file_put_contents("/tmp/1/1.dict", "bbb\naaa\n") file_put_contents("/tmp/1/2.dict", "ddd\nccc\n") hybrite_dict_path = self.thrd.build_hybride_dict("/tmp/1/") assert os.path.exists(hybrite_dict_path) assert file_get_contents(hybrite_dict_path) == "aaa\nbbb\nccc\nddd\n"
def test_make_hashlist(self): """ Test of hashlist building from db """ self._add_hash(hash='a') self._add_hash(hash='b') self._add_hash(hash='c') self._add_hashlist(id=2, have_salts=1) self._add_hash(hashlist_id=2, hash='a', salt='1') self._add_hash(hashlist_id=2, hash='b', salt='2') self._add_hash(hashlist_id=2, hash='c', salt='3') file_path = self.thrd.make_hashlist() assert os.path.exists(file_path) assert file_get_contents(file_path) == "a\nb\nc\n" self.thrd.work_task['hashlist_id'] = 2 file_path = self.thrd.make_hashlist() assert os.path.exists(file_path) assert file_get_contents(file_path) == "a:1\nb:2\nc:3\n"
def test_make_hashlist(self): """ Test make_hashlist() """ self._add_hash(hash='a') self._add_hash(hash='b') self._add_hash(hash='c') self._add_hashlist(id=2, have_salts=1) self._add_hash(hashlist_id=2, hash='a', salt='1') self._add_hash(hashlist_id=2, hash='b', salt='2') self._add_hash(hashlist_id=2, hash='c', salt='3') file_path = self.thrd.make_hashlist(1) assert os.path.exists(file_path) assert file_get_contents(file_path) == "a\nb\nc\n" file_path = self.thrd.make_hashlist(2) assert os.path.exists(file_path) assert "a{0}1\nb{0}2\nc{0}3\n".format( FinderInsideProThread.UNIQUE_DELIMITER) == file_get_contents( file_path)
def test_delete(self): self.db.q("INSERT INTO projects (id, name, descr) VALUES(1, 'test', 'desc1')") assert self.db.fetch_one("SELECT COUNT(id) FROM projects") == 1 self._replace_config('normal') os.chdir(wrpath) proc = subprocess.Popen([ './main.py', 'test', 'Projects', 'delete' ], stdin=subprocess.PIPE, stdout=open('/tmp/unittests-output', 'w')) proc.communicate('y\n') self._restore_config() self.output_errors(file_get_contents('/tmp/unittests-output')) assert self.db.fetch_one("SELECT COUNT(id) FROM projects") == 0
def test_build(self): report_path = "/tmp/testreportxml" xmloutput = XmlOutput(report_path) test_data_progress = [ "testprogress1", "testprogress2", "testprogress3", "testprogress4", "testprogress5", "testprogress5" ] xmloutput.put_progress(test_data_progress[0], test_data_progress[1], test_data_progress[2], test_data_progress[3], test_data_progress[4], test_data_progress[5]) for test_data in test_data_progress: assert test_data in file_get_contents(report_path + "-progress.xml") test_data_result = ["testprogress6", "testprogress7"] xmloutput.put_result({test_data_result[0]: test_data_result[1]}) for test_data in test_data_result: assert test_data in file_get_contents(report_path + "-result.xml") test_data_error = ["testprogress8", "testprogress8"] xmloutput.put_error(test_data_error[0], test_data_error[1]) for test_data in test_data_error: assert test_data in file_get_contents(report_path + "-errors.xml")
def test_build_dicts(self): """ Test of symlinks on dicts build mechanism, for simple dicts attacks """ tmp_dir = Registry().get('config')['main']['tmp_dir'] dicts_path = Registry().get('config')['main']['dicts_path'] if os.path.exists(tmp_dir + '/dicts_for_1'): shutil.rmtree(tmp_dir + '/dicts_for_1') os.mkdir(tmp_dir + '/dicts_for_1') assert tmp_dir + '/dicts_for_1' == self.thrd.build_dicts(0, {}) shutil.rmtree(tmp_dir + '/dicts_for_1') self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_dict(id=3, hash='3', group_id=2) file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") path_to_dict = self.thrd.build_dicts(1, {'type': 'dict', 'source': 1}) assert not os.path.exists(tmp_dir + '/dicts_for_2') assert os.path.exists(path_to_dict) assert os.path.exists(path_to_dict + "/1.dict") assert os.path.exists(path_to_dict + "/2.dict") assert not os.path.exists(path_to_dict + "/3.dict") assert os.path.islink(path_to_dict + "/1.dict") assert os.path.islink(path_to_dict + "/2.dict") assert file_get_contents(path_to_dict + "/1.dict") == "aaa\nbbb" assert file_get_contents(path_to_dict + "/2.dict") == "ccc\nddd"
def test_sorted_file_to_db(self, have_salt, content, expected_content): """ Test sorted_file_to_db() :param have_salt: does hashlist has salt :param content: hashlist content (txt) :param expected_content: expected hashlist content after convertation :return: """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, content) file_to_db = self.thrd.sorted_file_to_db_file(test_file, {'have_salts': have_salt, 'id': 1, 'delimiter': ':'}) assert file_get_contents(file_to_db) == expected_content assert self.db.fetch_one("SELECT status FROM hashlists WHERE id = 1") == "preparedb"
def __init__(self): config = configparser.ConfigParser() config.read(os.getcwd() + '/' + 'config.ini') try: db = mysql.connector.connect(host=config['db']['host'], user=config['db']['user'], password=config['db']['pass'], database=config['db']['database']) db.autocommit = True except mysql.connector.errors.ProgrammingError as e: print " ERROR: Can`t connect to MySQL server! ({0})".format(str(e)) exit(0) try: mc = MongoClient(host=config['mongo']['host'], port=int(config['mongo']['port'])) mongo_collection = getattr(mc, config['mongo']['collection']) except pymongo.errors.ConnectionFailure as e: print " ERROR: Can`t connect to MongoDB server! ({0})".format( str(e)) exit(0) R = Registry() R.set('config', config) R.set('db', db) R.set('mongo', mongo_collection) R.set('wr_path', os.getcwd()) R.set('data_path', os.getcwd() + '/data/') R.set('http', Http()) R.set('proxies', Proxies()) R.set( 'ndb', Database(config['db']['host'], config['db']['user'], config['db']['pass'], config['db']['database'])) R.set( 'fuzzer_evil_value', file_get_contents(Registry().get('wr_path') + "/bases/fuzzer-evil-value.txt").strip()) R.set('proxy_many_died', False) R.set('positive_limit_stop', False) if " ".join(sys.argv).count('selenium') and int( config['selenium']['virtual_display']): display = Display(visible=0, size=(800, 600)) display.start() R.set('display', display)
def __init__(self): config = configparser.ConfigParser() config.read(os.getcwd() + '/' + 'config.ini') try: db = mysql.connector.connect( host=config['db']['host'], user=config['db']['user'], password=config['db']['pass'], database=config['db']['database'] ) db.autocommit = True except mysql.connector.errors.ProgrammingError as e: print " ERROR: Can`t connect to MySQL server! ({0})".format(str(e)) exit(0) try: mc = MongoClient(host=config['mongo']['host'], port=int(config['mongo']['port'])) mongo_collection = getattr(mc, config['mongo']['collection']) except pymongo.errors.ConnectionFailure as e: print " ERROR: Can`t connect to MongoDB server! ({0})".format(str(e)) exit(0) R = Registry() R.set('config', config) R.set('db', db) R.set('mongo', mongo_collection) R.set('wr_path', os.getcwd()) R.set('data_path', os.getcwd() + '/data/') R.set('http', Http()) R.set('proxies', Proxies()) R.set( 'ndb', Database(config['db']['host'], config['db']['user'], config['db']['pass'], config['db']['database']) ) R.set( 'fuzzer_evil_value', file_get_contents(Registry().get('wr_path') + "/bases/fuzzer-evil-value.txt").strip() ) R.set('proxy_many_died', False) R.set('positive_limit_stop', False) if " ".join(sys.argv).count('selenium') and int(config['selenium']['virtual_display']): display = Display(visible=0, size=(800, 600)) display.start() R.set('display', display)
def test_delete(self): self.db.q("INSERT INTO `urls`" "(id, project_id, host_id, hash, url, referer, response_code, response_time, " "size, when_add, who_add, descr, spidered)" "VALUES(1, 1, 1, MD5('/'), '/', '', 0, 0, 0, 1, 'human', '', 0)") assert self.db.fetch_one("SELECT COUNT(id) FROM urls") == 1 self._replace_config('normal') os.chdir(wrpath) proc = subprocess.Popen([ './main.py', 'test', 'Urls', 'delete', '--host=wrtest.com', '--url=/' ], stdin=subprocess.PIPE, stdout=open('/tmp/unittests-output', 'w')) proc.communicate('y\n') self._restore_config() self.output_errors(file_get_contents('/tmp/unittests-output')) assert self.db.fetch_one("SELECT COUNT(id) FROM urls") == 0
def test_maskdict_task(self): """ Run hybride mask+dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('1ccc')) self._add_hash(hash=md5('789')) self._add_work_task() self._add_task(source=json.dumps({ 'mask': '?d', 'dict': 1 }), type='maskdict') dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb\n") file_put_contents(dicts_path + "/2.dict", "ccc\nddd\n") self.thrd = WorkerThread( self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents( wtask['out_file']) == '49a14108270c0596ac1d70c3c4f82a10:31636363\n'
def test_make_hashlist(self): """ Test make_hashlist() """ self._add_hash(hash='a') self._add_hash(hash='b') self._add_hash(hash='c') self._add_hashlist(id=2, have_salts=1) self._add_hash(hashlist_id=2, hash='a', salt='1') self._add_hash(hashlist_id=2, hash='b', salt='2') self._add_hash(hashlist_id=2, hash='c', salt='3') file_path = self.thrd.make_hashlist(1) assert os.path.exists(file_path) assert file_get_contents(file_path) == "a\nb\nc\n" file_path = self.thrd.make_hashlist(2) assert os.path.exists(file_path) assert "a{0}1\nb{0}2\nc{0}3\n".format(FinderInsideProThread.UNIQUE_DELIMITER) == file_get_contents(file_path)
def test_sorted_file_to_db(self, have_salt, content, expected_content): """ Test sorted_file_to_db() :param have_salt: does hashlist has salt :param content: hashlist content (txt) :param expected_content: expected hashlist content after convertation :return: """ test_file = '/tmp/test.txt' if os.path.exists(test_file): os.remove(test_file) file_put_contents(test_file, content) file_to_db = self.thrd.sorted_file_to_db_file(test_file, { 'have_salts': have_salt, 'id': 1, 'delimiter': ':' }) assert file_get_contents(file_to_db) == expected_content assert self.db.fetch_one( "SELECT status FROM hashlists WHERE id = 1") == "preparedb"
def test_dict_task(self): """ Run simple dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('ccc')) self._add_hash(hash=md5('789')) self._add_work_task() dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd = WorkerThread( self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents( wtask['out_file']) == '9df62e693988eb4e1e1444ece0578579:636363\n'
def test_dict_task(self): """ Run simple dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('ccc')) self._add_hash(hash=md5('789')) self._add_work_task() dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb") file_put_contents(dicts_path + "/2.dict", "ccc\nddd") self._add_task(source=1) self.thrd = WorkerThread(self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents(wtask['out_file']) == '9df62e693988eb4e1e1444ece0578579:636363\n'
def __init__(self): config = configparser.ConfigParser() config.read(os.getcwd() + '/' + 'config.ini') R = Registry() R.set('config', config) R.set('wr_path', os.getcwd()) R.set('data_path', os.getcwd() + '/data/') R.set('http', Http()) R.set('ua', random_ua()) R.set('proxies', Proxies()) R.set('tmp_files', []) R.set( 'fuzzer_evil_value', file_get_contents(Registry().get('wr_path') + "/bases/fuzzer/evil-value.txt").strip()) R.set('proxy_many_died', False) R.set('positive_limit_stop', False) if " ".join(sys.argv).count('selenium') and int( config['selenium']['virtual_display']): display = Display(visible=0, size=(800, 600)) display.start() R.set('display', display)
def test_maskdict_task(self): """ Run hybride mask+dict task """ self._add_hashlist(alg_id=0, uncracked=4) self._add_hash(hash=md5('123')) self._add_hash(hash=md5('456')) self._add_hash(hash=md5('1ccc')) self._add_hash(hash=md5('789')) self._add_work_task() self._add_task(source=json.dumps({'mask': '?d', 'dict': 1}), type='maskdict') dicts_path = Registry().get('config')['main']['dicts_path'] self._add_dict_group() self._add_dict() self._add_dict(id=2, hash='2') file_put_contents(dicts_path + "/1.dict", "aaa\nbbb\n") file_put_contents(dicts_path + "/2.dict", "ccc\nddd\n") self.thrd = WorkerThread(self.db.fetch_row("SELECT * FROM task_works WHERE id = 1")) self.thrd.catch_exceptions = False self.thrd.start() start_time = int(time.time()) while True: if self.thrd.done: break if int(time.time()) - start_time > 5: pytest.fail("Long time of WorkerThread") time.sleep(1) wtask = self.db.fetch_row("SELECT * FROM task_works WHERE id = 1") assert wtask['status'] == 'waitoutparse' assert wtask['uncracked_before'] == 4 assert os.path.exists(wtask['out_file']) assert file_get_contents(wtask['out_file']) == '49a14108270c0596ac1d70c3c4f82a10:31636363\n'
def ex(self, _exception): """ Log func for exceptions """ # Very ugly hack, will be fixed tmp_file_name = "/tmp/{0}{1}.txt".format("wsexc", random.randint(1, 9999)) fh = open(tmp_file_name, "w") traceback.print_stack(file=fh) fh.close() trace_text = file_get_contents(tmp_file_name) os.remove(tmp_file_name) exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] log_str = "Exception {1}:\n{2} ({3}): {4}\n{0}\n{5}{0}\n".format( "{0:=^20}".format(""), exc_type, fname, exc_tb.tb_lineno, str(_exception), trace_text, ) self.log(log_str, _print=True)