def prepare_first_pages(host): """ Prepare link on first page in MongoDB. Add root url if urls for this host not exists. """ pid = Registry().get('pData')['id'] coll = Registry().get('mongo').spider_urls coll.drop() Urls = UrlsModel() urls = Urls.list_by_host_name_for_spider(pid, host) if not len(urls): Registry().get('logger').log("Spider: Root URL was added automaticaly") Urls.add( pid, HostsModel().get_id_by_name(pid, host), '/', who_add='spider' ) urls = Urls.list_by_host_name_for_spider(pid, host) for url in urls: url = urlparse(url['url']) data = { 'hash': md5(str(url.path + url.query)), 'path': url.path, 'query': url.query, 'time': 0, 'code':0, 'checked': 0, 'getted' : 0, 'referer': '', 'size': 0, 'founder': 'spider' } coll.insert(data) coll.create_index([('hash', 1)], unique=True, dropDups=True) coll.create_index([('checked', 1)])
def links_in_spider_base(pid, host): """ Put found links in MySQL """ links_per_time_limit = 50 c = WSCounter(1, 60, int(Registry().get('mongo').spider_urls.count()/links_per_time_limit)) Urls = UrlsModel() host_id = HostsModel().get_id_by_name(pid, host) urls_add = [] skip = 0 while True: links = mongo_result_to_list( Registry().get('mongo').spider_urls.find().skip(skip).limit(links_per_time_limit) ) for link in links: url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] urls_add.append({ 'url': url, 'referer': link['referer'], 'response_code': link['code'], 'response_time': link['time'], 'size': link['size'], 'who_add': 'spider', 'spidered': link['checked'] }) Urls.add_mass(pid, host_id, urls_add) urls_add = [] to_update = { 'spidered': [], 'code': [], 'time': [], 'size': [] } for link in links: url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] if link['checked']: to_update['spidered'].append({'url': url, 'value': 1}) to_update['code'].append({'url': url, 'value': link['code']}) to_update['time'].append({'url': url, 'value': link['time']}) to_update['size'].append({'url': url, 'value': link['size']}) Urls.update_url_field_mass(pid, host, 'spidered', to_update['spidered']) Urls.update_url_field_mass(pid, host, 'response_code', to_update['code']) Urls.update_url_field_mass(pid, host, 'response_time', to_update['time']) Urls.update_url_field_mass(pid, host, 'size', to_update['size']) skip += len(links) c.up() if len(links) < links_per_time_limit: break
def _insert_urls(self, urls): """ Insert found urls in DB """ pid = Registry().get('pData')['id'] host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) U = UrlsModel() added = 0 for url in urls: if isinstance(url, str) or isinstance(url, unicode): if U.add(pid, host_id, url, '', 0, 0, 'pre'): added += 1 else: if U.add(pid, host_id, url['url'], '', url['code'], url['time'], 'pre'): added += 1 return added
def setup(self): self.model = UrlsModel() self.db.q("TRUNCATE TABLE urls") self.db.q("TRUNCATE TABLE urls_base") self.db.q("TRUNCATE TABLE urls_base_params") self.db.q("TRUNCATE TABLE projects") self.db.q("TRUNCATE TABLE hosts") self.db.q("TRUNCATE TABLE ips")
def _insert_urls(self, urls): """ Add found urls in db """ UrlsBase = UrlsBaseModel() pid = Registry().get('pData')['id'] host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) Urls = UrlsModel() added = 0 for url in urls: if Urls.add(pid, host_id, url['url'], '', url['code'], url['time'], 'dafs'): added += 1 paths = urlparse(url['url']).path.split("/") while len(paths) != 1: del paths[-1] if Urls.add(pid, host_id, "/".join(paths) + "/", '', 0, 0, 'dafs'): added += 1 UrlsBase.add_url(host_id, url['url']) return added
def prepare_first_pages(host): """ Prepare link on first page in MongoDB. Add root url if urls for this host not exists. """ pid = Registry().get('pData')['id'] coll = Registry().get('mongo').spider_urls coll.drop() Urls = UrlsModel() urls = Urls.list_by_host_name_for_spider(pid, host) if not len(urls): Registry().get('logger').log( "Spider: Root URL was added automaticaly") Urls.add(pid, HostsModel().get_id_by_name(pid, host), '/', who_add='spider') urls = Urls.list_by_host_name_for_spider(pid, host) for url in urls: url = urlparse(url['url']) data = { 'hash': md5(str(url.path + url.query)), 'path': url.path, 'query': url.query, 'time': 0, 'code': 0, 'checked': 0, 'getted': 0, 'referer': '', 'size': 0, 'founder': 'spider' } coll.insert(data) coll.create_index([('hash', 1)], unique=True, dropDups=True) coll.create_index([('checked', 1)])
class Test_UrlsModel(Common): """Unit tests for UrlsModel""" model = None def setup(self): self.model = UrlsModel() self.db.q("TRUNCATE TABLE urls") self.db.q("TRUNCATE TABLE urls_base") self.db.q("TRUNCATE TABLE urls_base_params") self.db.q("TRUNCATE TABLE projects") self.db.q("TRUNCATE TABLE hosts") self.db.q("TRUNCATE TABLE ips") def test_one_add(self): assert self.db.fetch_one("SELECT 1 FROM urls") is None _id = self.model.add(1, 2, '/1/', '/ref', 200, 10, 'dafs', 1, 100, 'desc') assert bool(_id) test_url = self.db.fetch_row("SELECT * FROM urls WHERE id = " + str(_id)) assert test_url['project_id'] == 1 assert test_url['host_id'] == 2 assert test_url['hash'] == md5('/1/') assert test_url['url'] == '/1/' assert test_url['referer'] == '/ref' assert test_url['response_code'] == 200 assert test_url['response_time'] == 10 assert test_url['size'] == 100 assert test_url['descr'] == 'desc' assert test_url['spidered'] == 1 def test_add_mass(self): assert self.db.fetch_one("SELECT 1 FROM urls") is None data = [ {'url': '/1/', 'referer': '/ref1/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/2/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/3/', 'referer': '/ref3/', 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/4/', 'referer': '/ref4/', 'response_code': 401, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/5/', 'referer': '/ref5/', 'response_code': 401, 'response_time': 10, 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/6/', 'referer': '/ref6/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'size': 20, 'descr': 'some descr'}, {'url': '/7/', 'referer': '/ref7/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'descr': 'some descr'}, {'url': '/8/', 'referer': '/ref8/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20} ] test_data = [ {'url': '/1/', 'referer': '/ref1/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/2/', 'referer': '', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/3/', 'referer': '/ref3/', 'response_code': 0, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/4/', 'referer': '/ref4/', 'response_code': 401, 'response_time': 0, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/5/', 'referer': '/ref5/', 'response_code': 401, 'response_time': 10, 'who_add': 'human', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/6/', 'referer': '/ref6/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 0, 'size': 20, 'descr': 'some descr'}, {'url': '/7/', 'referer': '/ref7/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 0, 'descr': 'some descr'}, {'url': '/8/', 'referer': '/ref8/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs', 'spidered': 1, 'size': 20, 'descr': ''}, ] self.model.add_mass(1, 2, data) for test_url in self.db.fetch_all("SELECT * FROM urls ORDER BY id ASC"): test_key = test_url['id']-1 assert test_url['project_id'] == 1 assert test_url['host_id'] == 2 assert test_url['hash'] == md5(test_data[test_key]['url']) assert test_url['url'] == test_data[test_key]['url'] assert test_url['referer'] == \ ('' if 'referer' not in test_data[test_key].keys() else test_data[test_key]['referer']) assert test_url['response_code'] == \ (0 if 'response_code' not in test_data[test_key].keys() else test_data[test_key]['response_code']) assert test_url['response_time'] == \ (0 if 'response_time' not in test_data[test_key].keys() else test_data[test_key]['response_time']) assert test_url['size'] == (0 if 'size' not in test_data[test_key].keys() else test_data[test_key]['size']) assert test_url['who_add'] == \ ('human' if 'who_add' not in test_data[test_key].keys() else test_data[test_key]['who_add']) assert test_url['descr'] == \ ('' if 'descr' not in test_data[test_key].keys() else test_data[test_key]['descr']) assert test_url['spidered'] == \ (0 if 'spidered' not in test_data[test_key].keys() else test_data[test_key]['spidered']) def test_add_mass_wo_url(self): with pytest.raises(WSException) as ex: self.model.add_mass(1, 2, [{'aaa': 'bbb'}]) assert "ERROR: URL row must have a 'url' key" in str(ex) def test_add_mass_left_field(self): with pytest.raises(WSException) as ex: self.model.add_mass(1, 2, [{'url': 'aaa', 'aaa': 'bbb'}]) assert "ERROR: Key 'aaa' must not be in url data" in str(ex) def test_list_by_host_name(self): self.db.q("INSERT INTO `hosts` (`id`, `project_id`, `ip_id`, `name`, `descr`) VALUES(2, 1, 1, 'test.com', '')") data = [ {'url': '/1/', 'referer': '/ref1/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs1', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/2/', 'referer': '/ref2/', 'response_code': 402, 'response_time': 10, 'who_add': 'dafs2', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/3/', 'referer': '/ref3/', 'response_code': 403, 'response_time': 10, 'who_add': 'dafs3', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/4/', 'referer': '/ref4/', 'response_code': 200, 'response_time': 10, 'who_add': 'dafs4', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/5/', 'referer': '/ref5/', 'response_code': 201, 'response_time': 10, 'who_add': 'dafs5', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/6/', 'referer': '/ref6/', 'response_code': 301, 'response_time': 10, 'who_add': 'dafs6', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/7/', 'referer': '/ref7/', 'response_code': 501, 'response_time': 10, 'who_add': 'dafs7', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/8/', 'referer': '/ref8/', 'response_code': 101, 'response_time': 10, 'who_add': 'dafs8', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, ] self.model.add_mass(1, 2, data) urls = self.model.list_by_host_name(1, 'test.com') assert len(urls) == len(data) k = 0 for url in urls: assert url['url'] == data[k]['url'] assert url['code'] == data[k]['response_code'] assert url['time'] == data[k]['response_time'] assert url['who_add'] == data[k]['who_add'] assert url['descr'] == data[k]['descr'] k += 1 def test_list_by_host_name_for_spider(self): self.db.q("INSERT INTO `hosts` (`id`, `project_id`, `ip_id`, `name`, `descr`) VALUES(2, 1, 1, 'test.com', '')") data = [ {'url': '/1/', 'referer': '/ref1/', 'response_code': 401, 'response_time': 10, 'who_add': 'dafs1', 'spidered': 0, 'size': 20, 'descr': 'some descr'}, {'url': '/2/', 'referer': '/ref2/', 'response_code': 402, 'response_time': 10, 'who_add': 'dafs2', 'spidered': 0, 'size': 20, 'descr': 'some descr'}, {'url': '/3/', 'referer': '/ref3/', 'response_code': 403, 'response_time': 10, 'who_add': 'dafs3', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/4/', 'referer': '/ref4/', 'response_code': 200, 'response_time': 10, 'who_add': 'dafs4', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/5/', 'referer': '/ref5/', 'response_code': 201, 'response_time': 10, 'who_add': 'dafs5', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/6/', 'referer': '/ref6/', 'response_code': 301, 'response_time': 10, 'who_add': 'dafs6', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/7/', 'referer': '/ref7/', 'response_code': 501, 'response_time': 10, 'who_add': 'dafs7', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, {'url': '/8/', 'referer': '/ref8/', 'response_code': 101, 'response_time': 10, 'who_add': 'dafs8', 'spidered': 1, 'size': 20, 'descr': 'some descr'}, ] self.model.add_mass(1, 2, data) data = [data[0], data[1]] urls = self.model.list_by_host_name_for_spider(1, 'test.com') assert len(urls) == len(data) k = 0 for url in urls: assert url['url'] == data[k]['url'] assert url['code'] == data[k]['response_code'] assert url['time'] == data[k]['response_time'] assert url['who_add'] == data[k]['who_add'] assert url['descr'] == data[k]['descr'] k += 1 def test_exists(self): self.db.q("INSERT INTO `hosts` (`id`, `project_id`, `ip_id`, `name`, `descr`) VALUES(2, 1, 1, 'test.com', '')") self.model.add(1, 2, '/1/', '/ref', 200, 10, 'dafs', 1, 100, 'desc') assert self.model.exists(1, 'test.com', '/1/') assert not self.model.exists(1, 'test.com', '/2/') def test_delete(self): self.db.q("INSERT INTO `hosts` (`id`, `project_id`, `ip_id`, `name`, `descr`) VALUES(2, 1, 1, 'test.com', '')") self.model.add(1, 2, '/1/', '/ref', 200, 10, 'dafs', 1, 100, 'desc') assert self.model.exists(1, 'test.com', '/1/') self.model.delete(1, 'test.com', '/1/') assert not self.model.exists(1, 'test.com', '/1/') def test_update_url_field(self): self.db.q("INSERT INTO `hosts` (`id`, `project_id`, `ip_id`, `name`, `descr`) VALUES(2, 1, 1, 'test.com', '')") self.model.add(1, 2, '/1/', '/ref', 200, 10, 'dafs', 1, 100, 'desc') assert self.db.fetch_one("SELECT response_code FROM urls") == 200 self.model.update_url_field(1, 'test.com', '/1/', 'response_code', 300) assert self.db.fetch_one("SELECT response_code FROM urls") == 300 def test_update_url_field_mass(self): self.db.q("INSERT INTO `hosts` (`id`, `project_id`, `ip_id`, `name`, `descr`) VALUES(2, 1, 1, 'test.com', '')") self.model.add(1, 2, '/1/', '/ref', 200, 10, 'dafs', 1, 100, 'desc') self.model.add(1, 2, '/2/', '/ref', 200, 10, 'dafs', 1, 100, 'desc') update_data = [{'url': '/1/', 'value': 300}, {'url': '/2/', 'value': 400}] self.model.update_url_field_mass(1, 'test.com', 'response_code', update_data) assert self.db.fetch_one("SELECT response_code FROM urls WHERE url='/1/'") == 300 assert self.db.fetch_one("SELECT response_code FROM urls WHERE url='/2/'") == 400
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerUrlsJob() U = UrlsModel() if os.path.exists('/tmp/fuzzer-urls.txt'): os.remove('/tmp/fuzzer-urls.txt') urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) for url in urls: if url['url'].count('?'): to_add = self._generate_fuzz_urls(url['url']) for item in to_add: file_put_contents('/tmp/fuzzer-urls.txt', item + "\n", True) generator = FileGenerator('/tmp/fuzzer-urls.txt') q.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) counter = WSCounter(1, 60, generator.lines_count) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done or Registry().get('proxy_many_died'): del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get( 'config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) if result: self.logger.log("\nPut results into DB...") Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']))) _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word: {0}'.format(", ".join(fuzz['words']))) added += 1 if _id else 0 self.logger.log("Added {0} new requests in database".format(added)) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() self.model = CmsModel() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = CmsJob() for item in self.model.all_paths_list(): q.put(item.strip()) self.logger.log("Loaded {0} variants.".format(q.qsize())) counter = WSCounter(1, 60, q.qsize()) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SCmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result ) else: worker = CmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['not-found-size'].value, self.options['not-found-codes'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) timeout_threads_count = 0 while len(w_thrds): for worker in w_thrds: if Registry().get('proxy_many_died'): worker.done = True time.sleep(3) if worker.done or Registry().get('positive_limit_stop'): del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get('config')['main']['kill_thread_after_secs']): self.logger.log( "Thread killed by time, resurected {0} times from {1}".format( timeout_threads_count, Registry().get('config')['main']['timeout_threads_resurect_max_count'] ) ) del w_thrds[w_thrds.index(worker)] if timeout_threads_count <= int(Registry().get('config')['main']['timeout_threads_resurect_max_count']): if self.options['selenium'].value: worker = SCmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result ) else: worker = CmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['not-found-codes'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) timeout_threads_count += 1 time.sleep(2) if Registry().get('positive_limit_stop'): self.logger.log("\nMany positive detections. Please, look items logs") self.logger.log("Last items:") for i in range(1, 5): print "{0} {1}".format(result[-i]['code'], result[-i]['path']) exit(0) pid = Registry().get('pData')['id'] host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) Urls = UrlsModel() UrlsBase = UrlsBaseModel() if int(Registry().get('config')['main']['put_data_into_db']): self.logger.log("\nInsert result info in DB...") _all = 0 added = 0 HostsInfo = HostsInfoModel() to_hosts_info = [] hash_ids = [] for link in result: hash_ids.append(self.model.get_hash_id_by_path(link['path'])) _all += 1 if Urls.add(pid, host_id, link['path'], '', link['code'], 0, 'cms'): added += 1 UrlsBase.add_url(host_id, link['path']) self.logger.log("\nFound {0} URLs, inserted in database (new) - {1}.".format(_all, added)) cms_list = self.model.cms_list() for cms_id in self.model.get_cms_by_hash_ids(hash_ids): cms_paths = self.model.get_cms_paths(cms_id) current_count = 0 for link in result: if link['path'] in cms_paths: current_count += 1 percent = int(current_count / len(cms_paths) * 100) if int(Registry().get('config')['cms']['percent']) <= percent: to_hosts_info.append({'name': cms_list[cms_id], 'percent': percent}) self.logger.log("{0}\t{1}%".format(cms_list[cms_id], percent)) if len(to_hosts_info): HostsInfo.set_info(pid, host_id, 'cms', json.dumps(to_hosts_info)) else: hash_ids = [] for link in result: hash_ids.append(self.model.get_hash_id_by_path(link['path'])) cms_list = self.model.cms_list() for cms_id in self.model.get_cms_by_hash_ids(hash_ids): cms_paths = self.model.get_cms_paths(cms_id) current_count = 0 for link in result: if link['path'] in cms_paths: current_count += 1 percent = int(current_count / len(cms_paths) * 100) if int(Registry().get('config')['cms']['percent']) <= percent: self.logger.log("{0}\t{1}%".format(cms_list[cms_id], percent)) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerHeadersJob() U = UrlsModel() urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) to_scan = [] for url in urls: to_scan.append(url['url']) q.load_dict(to_scan) self.logger.log("Loaded {0} variants.".format(len(to_scan))) counter = WSCounter(1, 60, len(to_scan)) w_thrds = [] for _ in range(int(self.options['threads'].value)): worker = FuzzerHeadersThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done: del w_thrds[w_thrds.index(worker)] time.sleep(2) Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4}, Header: {5})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']), fuzz['header'])) _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {fuzz['header']: Registry().get('fuzzer_evil_value')}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word(s): {0}'.format(", ".join(fuzz['words']))) added += 1 if _id else 0 self.logger.log("\nAdded {0} new requests in database".format(added)) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() self.model = CmsModel() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = CmsJob() for item in self.model.all_paths_list(): q.put(item.strip()) self.logger.log("Loaded {0} variants.".format(q.qsize())) counter = WSCounter(1, 60, q.qsize()) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SCmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result ) else: worker = CmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['not-found-codes'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if Registry().get('proxy_many_died'): worker.done = True time.sleep(3) if worker.done: del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get('config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) pid = Registry().get('pData')['id'] host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) Urls = UrlsModel() UrlsBase = UrlsBaseModel() self.logger.log("\nInsert result info in DB...") _all = 0 added = 0 HostsInfo = HostsInfoModel() to_hosts_info = [] hash_ids = [] for link in result: hash_ids.append(self.model.get_hash_id_by_path(link['path'])) _all += 1 if Urls.add(pid, host_id, link['path'], '', link['code'], 0, 'cms'): added += 1 UrlsBase.add_url(host_id, link['path']) self.logger.log("\nFound {0} URLs, inserted in database (new) - {1}.".format(_all, added)) cms_list = self.model.cms_list() for cms_id in self.model.get_cms_by_hash_ids(hash_ids): cms_paths = self.model.get_cms_paths(cms_id) current_count = 0 for link in result: if link['path'] in cms_paths: current_count += 1 percent = int(current_count / len(cms_paths) * 100) if int(Registry().get('config')['cms']['percent']) <= percent: to_hosts_info.append({'name': cms_list[cms_id], 'percent': percent}) self.logger.log("{0}\t{1}%".format(cms_list[cms_id], percent)) if len(to_hosts_info): HostsInfo.set_info(pid, host_id, 'cms', json.dumps(to_hosts_info)) self.done = True
def __init__(self, kernel): WSModule.__init__(self, kernel) self.model = UrlsModel()
class Urls(WSModule): """ Class of Urls module """ model = None log_path = '/dev/null' options = {} options_sets = { "list": { "host": WSOption("host", "Host for view urls", "", True, ['--host']), "like": WSOption("like", "Word for LIKE sql-expression, like %word%.", "", False, ['--like']), }, "export": { "host": WSOption("host", "Host for view urls", "", True, ['--host']), "like": WSOption("like", "Word for LIKE sql-expression, like %word%.", "", False, ['--like']), "without-host": WSOption( "without-host", "Print host in links (1) or no (0). Default (0).", "1", False, ['--without-host'] ), "protocol": WSOption( "protocol", "Protocol http or https (default - http)", "http", False, ['--protocol'] ), }, "delete": { "url": WSOption("url", "URL for add", "", True, ['--url']), "host": WSOption("host", "Host for add", "", True, ['--host']), }, "add": { "url": WSOption("url", "URL for add", "", True, ['--url']), "host": WSOption("host", "Host for add", "", True, ['--host']), "descr": WSOption("descr", "Description of URL", "", False, ['--descr']) } } def __init__(self, kernel): WSModule.__init__(self, kernel) self.model = UrlsModel() def validate_main(self): """ Check users params """ if not HostsModel().exists(Registry().get('pData')['id'], self.options['host'].value): raise WSException("Host '{0}' not found in this project!".format(self.options['host'].value)) if 'url' in self.options and self.options['url'].value[0] != '/': raise WSException("URL must start from the root ('/') !") def list_action(self): """ Action list of module """ self.validate_main() print "{0:=^111}".format("") print "|{0: ^99}|".format("URLs of host '{0}'".format(self.options['host'].value)) print "{0:=^111}".format("") print "| {0: ^23}| {1: ^7}| {2: ^8}| {3: ^23}| {4: ^19}| {5: ^8}| {6: ^8}|".\ format('URL', 'Code', 'Time', 'Description', 'Added', 'Who', 'Size') print "{0:=^111}".format("") urls = self.model.list_by_host_name( Registry().get('pData')['id'], self.options['host'].value, self.options['like'].value ) for url in urls: print "| {0: <23}| {1: ^7}| {2: <7} | {3: <23}| {4: <19}| {5: ^8}| {6: ^8}|".\ format( url['url'], url['code'], str(url['time']) + " sec", url['descr'], datetime.datetime.fromtimestamp(int(url['when_add'])).strftime('%Y-%m-%d %H:%M:%S'), url['who_add'], url['size'] ) print "{0:=^111}".format("") def export_action(self): """ Action list of module """ self.validate_main() urls = self.model.list_by_host_name( Registry().get('pData')['id'], self.options['host'].value, self.options['like'].value ) for url in urls: if not int(self.options['without-host'].value): link = url['url'] else: link = "{0}://{1}{2}".format( self.options['protocol'].value, self.options['host'].value, url['url'] ) print link def add_action(self): """ Action add of module """ pid = Registry().get('pData')['id'] self.validate_main() if self.model.exists(pid, self.options['host'].value, self.options['url'].value): raise WSException("URL '{0}' already exists in this project in host '{1}'!". format(self.options['url'].value, self.options['host'].value)) host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) if (self.options['url'].value[-1] == '/' and self.model.exists(pid, self.options['host'].value, self.options['url'].value[:-1])) or\ (self.options['url'].value[-1] != '/' and self.model.exists(pid, self.options['host'].value, self.options['url'].value + "/")): if raw_input('Url {0} have analogue in database (with or without end slash). ' 'Are you realy want to add it (y/n)?' .format(self.options['url'].value)).lower()[0] != 'y': print "Url {0} was not added!".format(self.options['url'].value) return self.model.add( Registry().get('pData')['id'], host_id, self.options['url'].value ) print " URL '{0}' successfully added to host '{1}'".\ format(self.options['url'].value, self.options['host'].value) def delete_action(self): """ Delete action of module """ self.validate_main() if not self.model.exists(Registry().get('pData')['id'], self.options['host'].value, self.options['url'].value): raise WSException("URL '{0}' not exists in this project in host '{1}'!". format(self.options['url'].value, self.options['host'].value)) self.model.delete(Registry().get('pData')['id'], self.options['host'].value, self.options['url'].value) print "URL '{0}' in host '{1}' successfully deleted."\ .format(self.options['host'].value, self.options['url'].value) def run(self, action): WSModule.run(self, action) self.done = True
class Urls(WSModule): """ Class of Urls module """ model = None log_path = '/dev/null' options = {} options_sets = { "list": { "host": WSOption("host", "Host for view urls", "", True, ['--host']), "like": WSOption("like", "Word for LIKE sql-expression, like %word%.", "", False, ['--like']), }, "export": { "host": WSOption("host", "Host for view urls", "", True, ['--host']), "like": WSOption("like", "Word for LIKE sql-expression, like %word%.", "", False, ['--like']), "without-host": WSOption("without-host", "Print host in links (1) or no (0). Default (0).", "1", False, ['--without-host']), "protocol": WSOption("protocol", "Protocol http or https (default - http)", "http", False, ['--protocol']), }, "delete": { "url": WSOption("url", "URL for add", "", True, ['--url']), "host": WSOption("host", "Host for add", "", True, ['--host']), }, "add": { "url": WSOption("url", "URL for add", "", True, ['--url']), "host": WSOption("host", "Host for add", "", True, ['--host']), "descr": WSOption("descr", "Description of URL", "", False, ['--descr']) } } def __init__(self, kernel): WSModule.__init__(self, kernel) self.model = UrlsModel() def validate_main(self): """ Check users params """ if not HostsModel().exists(Registry().get('pData')['id'], self.options['host'].value): raise WSException("Host '{0}' not found in this project!".format( self.options['host'].value)) if 'url' in self.options and self.options['url'].value[0] != '/': raise WSException("URL must start from the root ('/') !") def list_action(self): """ Action list of module """ self.validate_main() print "{0:=^111}".format("") print "|{0: ^99}|".format("URLs of host '{0}'".format( self.options['host'].value)) print "{0:=^111}".format("") print "| {0: ^23}| {1: ^7}| {2: ^8}| {3: ^23}| {4: ^19}| {5: ^8}| {6: ^8}|".\ format('URL', 'Code', 'Time', 'Description', 'Added', 'Who', 'Size') print "{0:=^111}".format("") urls = self.model.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value, self.options['like'].value) for url in urls: print "| {0: <23}| {1: ^7}| {2: <7} | {3: <23}| {4: <19}| {5: ^8}| {6: ^8}|".\ format( url['url'], url['code'], str(url['time']) + " sec", url['descr'], datetime.datetime.fromtimestamp(int(url['when_add'])).strftime('%Y-%m-%d %H:%M:%S'), url['who_add'], url['size'] ) print "{0:=^111}".format("") def export_action(self): """ Action list of module """ self.validate_main() urls = self.model.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value, self.options['like'].value) for url in urls: if not int(self.options['without-host'].value): link = url['url'] else: link = "{0}://{1}{2}".format(self.options['protocol'].value, self.options['host'].value, url['url']) print link def add_action(self): """ Action add of module """ pid = Registry().get('pData')['id'] self.validate_main() if self.model.exists(pid, self.options['host'].value, self.options['url'].value): raise WSException( "URL '{0}' already exists in this project in host '{1}'!". format(self.options['url'].value, self.options['host'].value)) host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) if (self.options['url'].value[-1] == '/' and self.model.exists(pid, self.options['host'].value, self.options['url'].value[:-1])) or\ (self.options['url'].value[-1] != '/' and self.model.exists(pid, self.options['host'].value, self.options['url'].value + "/")): if raw_input( 'Url {0} have analogue in database (with or without end slash). ' 'Are you realy want to add it (y/n)?'.format( self.options['url'].value)).lower()[0] != 'y': print "Url {0} was not added!".format( self.options['url'].value) return self.model.add(Registry().get('pData')['id'], host_id, self.options['url'].value) print " URL '{0}' successfully added to host '{1}'".\ format(self.options['url'].value, self.options['host'].value) def delete_action(self): """ Delete action of module """ self.validate_main() if not self.model.exists(Registry().get('pData')['id'], self.options['host'].value, self.options['url'].value): raise WSException( "URL '{0}' not exists in this project in host '{1}'!".format( self.options['url'].value, self.options['host'].value)) self.model.delete(Registry().get('pData')['id'], self.options['host'].value, self.options['url'].value) print "URL '{0}' in host '{1}' successfully deleted."\ .format(self.options['host'].value, self.options['url'].value) def run(self, action): WSModule.run(self, action) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerHeadersJob() U = UrlsModel() urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) to_scan = [] for url in urls: to_scan.append(url['url']) q.load_dict(to_scan) self.logger.log("Loaded {0} variants.".format(len(to_scan))) counter = WSCounter(1, 60, len(to_scan)) w_thrds = [] for _ in range(int(self.options['threads'].value)): worker = FuzzerHeadersThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done: del w_thrds[w_thrds.index(worker)] time.sleep(2) Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4}, Header: {5})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']), fuzz['header'] )) _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {fuzz['header']: Registry().get('fuzzer_evil_value')}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word(s): {0}'.format(", ".join(fuzz['words'])) ) added += 1 if _id else 0 self.logger.log("\nAdded {0} new requests in database".format(added)) self.done = True
def links_in_spider_base(pid, host): """ Put found links in MySQL """ links_per_time_limit = 50 c = WSCounter( 1, 60, int(Registry().get('mongo').spider_urls.count() / links_per_time_limit)) Urls = UrlsModel() host_id = HostsModel().get_id_by_name(pid, host) urls_add = [] skip = 0 while True: links = mongo_result_to_list( Registry().get('mongo').spider_urls.find().skip(skip).limit( links_per_time_limit)) for link in links: url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] urls_add.append({ 'url': url, 'referer': link['referer'], 'response_code': link['code'], 'response_time': link['time'], 'size': link['size'], 'who_add': 'spider', 'spidered': link['checked'] }) Urls.add_mass(pid, host_id, urls_add) urls_add = [] to_update = {'spidered': [], 'code': [], 'time': [], 'size': []} for link in links: url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] if link['checked']: to_update['spidered'].append({'url': url, 'value': 1}) to_update['code'].append({'url': url, 'value': link['code']}) to_update['time'].append({'url': url, 'value': link['time']}) to_update['size'].append({'url': url, 'value': link['size']}) Urls.update_url_field_mass(pid, host, 'spidered', to_update['spidered']) Urls.update_url_field_mass(pid, host, 'response_code', to_update['code']) Urls.update_url_field_mass(pid, host, 'response_time', to_update['time']) Urls.update_url_field_mass(pid, host, 'size', to_update['size']) skip += len(links) c.up() if len(links) < links_per_time_limit: break
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerUrlsJob() U = UrlsModel() if os.path.exists('/tmp/fuzzer-urls.txt'): os.remove('/tmp/fuzzer-urls.txt') urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) for url in urls: if url['url'].count('?'): to_add = self._generate_fuzz_urls(url['url']) for item in to_add: file_put_contents('/tmp/fuzzer-urls.txt', item + "\n", True) generator = FileGenerator('/tmp/fuzzer-urls.txt') q.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) counter = WSCounter(1, 60, generator.lines_count) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result ) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) timeout_threads_count = 0 while len(w_thrds): for worker in w_thrds: if worker.done or Registry().get('proxy_many_died'): del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get('config')['main']['kill_thread_after_secs']): self.logger.log( "Thread killed by time, resurected {0} times from {1}".format( timeout_threads_count, Registry().get('config')['main']['timeout_threads_resurect_max_count'] ) ) del w_thrds[w_thrds.index(worker)] if timeout_threads_count <= int(Registry().get('config')['main']['timeout_threads_resurect_max_count']): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result ) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) timeout_threads_count += 1 time.sleep(2) if result: self.logger.log("\nPut results into DB...") Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']) )) if int(Registry().get('config')['main']['put_data_into_db']): _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word: {0}'.format(", ".join(fuzz['words'])) ) added += 1 if _id else 0 self.logger.log("Added {0} new requests in database".format(added)) self.done = True