def links_in_spider_base(pid, host): """ Put found links in MySQL """ links_per_time_limit = 50 c = WSCounter( 1, 60, int(Registry().get('mongo').spider_urls.count() / links_per_time_limit)) Urls = UrlsModel() host_id = HostsModel().get_id_by_name(pid, host) urls_add = [] skip = 0 while True: links = mongo_result_to_list( Registry().get('mongo').spider_urls.find().skip(skip).limit( links_per_time_limit)) for link in links: url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] urls_add.append({ 'url': url, 'referer': link['referer'], 'response_code': link['code'], 'response_time': link['time'], 'size': link['size'], 'who_add': 'spider', 'spidered': link['checked'] }) Urls.add_mass(pid, host_id, urls_add) urls_add = [] to_update = {'spidered': [], 'code': [], 'time': [], 'size': []} for link in links: url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] if link['checked']: to_update['spidered'].append({'url': url, 'value': 1}) to_update['code'].append({'url': url, 'value': link['code']}) to_update['time'].append({'url': url, 'value': link['time']}) to_update['size'].append({'url': url, 'value': link['size']}) Urls.update_url_field_mass(pid, host, 'spidered', to_update['spidered']) Urls.update_url_field_mass(pid, host, 'response_code', to_update['code']) Urls.update_url_field_mass(pid, host, 'response_time', to_update['time']) Urls.update_url_field_mass(pid, host, 'size', to_update['size']) skip += len(links) c.up() if len(links) < links_per_time_limit: break
def links_in_spider_base(pid, host): """ Put found links in MySQL """ links_per_time_limit = 50 c = WSCounter(1, 60, int(Registry().get('mongo').spider_urls.count()/links_per_time_limit)) Urls = UrlsModel() host_id = HostsModel().get_id_by_name(pid, host) urls_add = [] skip = 0 while True: links = mongo_result_to_list( Registry().get('mongo').spider_urls.find().skip(skip).limit(links_per_time_limit) ) for link in links: url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] urls_add.append({ 'url': url, 'referer': link['referer'], 'response_code': link['code'], 'response_time': link['time'], 'size': link['size'], 'who_add': 'spider', 'spidered': link['checked'] }) Urls.add_mass(pid, host_id, urls_add) urls_add = [] to_update = { 'spidered': [], 'code': [], 'time': [], 'size': [] } for link in links: url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] if link['checked']: to_update['spidered'].append({'url': url, 'value': 1}) to_update['code'].append({'url': url, 'value': link['code']}) to_update['time'].append({'url': url, 'value': link['time']}) to_update['size'].append({'url': url, 'value': link['size']}) Urls.update_url_field_mass(pid, host, 'spidered', to_update['spidered']) Urls.update_url_field_mass(pid, host, 'response_code', to_update['code']) Urls.update_url_field_mass(pid, host, 'response_time', to_update['time']) Urls.update_url_field_mass(pid, host, 'size', to_update['size']) skip += len(links) c.up() if len(links) < links_per_time_limit: break
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) SpiderCommon.clear_old_data(self.options['host'].value) self.result = SpiderResult() self._options_to_registry() if self.options['full-new'].value: SpiderCommon.make_full_new_scan() SpiderCommon.prepare_first_pages(self.options['host'].value) if not os.path.exists(Registry().get('data_path') + self.options['host'].value): os.mkdir(Registry().get('data_path') + self.options['host'].value) os.chmod(Registry().get('data_path') + self.options['host'].value, 0o777) job = SpiderJob() src = SpiderRequestsCounter() counter = WSCounter(5, 300, 0) workers = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SSpiderThread( job, self.options['host'].value, src, self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter) else: worker = SpiderThread(job, self.options['host'].value, src, self.options['delay'].value, counter) worker.setDaemon(True) workers.append(worker) time.sleep(1) self.kernel.create_threads(workers) while not self.kernel.finished(): time.sleep(2) self.logger.log("\nPut results into DB...") SpiderCommon.links_in_database(Registry().get('pData')['id'], self.options['host'].value) self.logger.log("\nTotal links count: " + str(Registry().get('mongo').spider_urls.count())) self.logger.log(str(self.result))
def make_queue(self): """ Make work queue :return: """ self.queue = FuzzerHeadersJob() generator = FileGenerator(self.options['urls-file'].value) self.queue.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) self.counter = WSCounter.factory(generator.lines_count)
def make_queue(self): """ Make work queue :return: """ self.queue = FuzzerUrlsJob() generator = FileGenerator(self.source_temp_file) self.queue.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) self.counter = WSCounter.factory(generator.lines_count)
def links_in_urls_base(pid, host): """ Put links in url_base table (MySQL) for site tree build """ links_per_time_limit = 50 c = WSCounter(1, 60, Registry().get('mongo').spider_urls.count()/links_per_time_limit) UrlsBase = UrlsBaseModel() host_id = HostsModel().get_id_by_name(pid, host) skip = 0 while True: links = mongo_result_to_list( Registry().get('mongo').spider_urls.find().skip(skip).limit(links_per_time_limit) ) for link in links: url = link['path'] + '?' + link['query'] if len(link['query']) else link['path'] UrlsBase.add_url( host_id, url ) skip += len(links) c.up() if len(links) < links_per_time_limit: break
def links_in_urls_base(pid, host): """ Put links in url_base table (MySQL) for site tree build """ links_per_time_limit = 50 c = WSCounter( 1, 60, Registry().get('mongo').spider_urls.count() / links_per_time_limit) UrlsBase = UrlsBaseModel() host_id = HostsModel().get_id_by_name(pid, host) skip = 0 while True: links = mongo_result_to_list( Registry().get('mongo').spider_urls.find().skip(skip).limit( links_per_time_limit)) for link in links: url = link['path'] + '?' + link['query'] if len( link['query']) else link['path'] UrlsBase.add_url(host_id, url) skip += len(links) c.up() if len(links) < links_per_time_limit: break
def make_queue(self): self.queue = DafsJob() loaded = self.load_objects(self.queue) self.logger.log( "Loaded {0} words ({1}-{2}) from all {3}.".format( (loaded['end'] - loaded['start']), loaded['start'], loaded['end'], loaded['all']) if (int(self.options['parts'].value) and int(self.options['part'].value)) else "Loaded {0} words from source.".format(loaded['all']) ) self.counter = WSCounter.factory(loaded['all'] if not loaded['end'] else loaded['end'] - loaded['start'] )
def test_factory(self): Registry().set('config', { 'main': { 'counter_step': '1', 'counter_steps_for_new_string': '2' } }) counter = WSCounter.factory(3) assert 1 == counter.point assert 2 == counter.new_str assert 3 == counter.all assert int(time.time()) == counter.start_time assert int(time.time()) == counter.last_point_time
def make_queue(self): """ Make work queue :return: """ self.queue = ParamsBruterJob() loaded = self.load_objects(self.queue) self.logger.log("Loaded {0} words ({1}-{2}) from all {3}.".format(( loaded['end'] - loaded['start'] ), loaded['start'], loaded['end'], loaded['all']) if ( int(self.options['parts'].value) and int(self.options['part'].value) ) else "Loaded {0} words from source.".format(loaded['all'])) self.counter = WSCounter.factory( loaded['all'] if not loaded['end'] else loaded['end'] - loaded['start']) self.counter.point *= 10 self.counter.new_str *= 10
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerUrlsJob() U = UrlsModel() if os.path.exists('/tmp/fuzzer-urls.txt'): os.remove('/tmp/fuzzer-urls.txt') urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) for url in urls: if url['url'].count('?'): to_add = self._generate_fuzz_urls(url['url']) for item in to_add: file_put_contents('/tmp/fuzzer-urls.txt', item + "\n", True) generator = FileGenerator('/tmp/fuzzer-urls.txt') q.set_generator(generator) self.logger.log("Loaded {0} variants.".format(generator.lines_count)) counter = WSCounter(1, 60, generator.lines_count) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SFuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, counter, result) else: worker = FuzzerUrlsThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done or Registry().get('proxy_many_died'): del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get( 'config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) if result: self.logger.log("\nPut results into DB...") Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']))) _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word: {0}'.format(", ".join(fuzz['words']))) added += 1 if _id else 0 self.logger.log("Added {0} new requests in database".format(added)) self.done = True
def brute_action(self): """ Brute action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FormBruterJob() loaded = self.load_objects(q) self.logger.log("Loaded {0} words from dict ({1}-{2}).".format( loaded['all'], loaded['start'], loaded['end']) if ( int(self.options['parts'].value) and int(self.options['part'].value) ) else "Loaded {0} words from dict.".format(loaded['all'])) counter = WSCounter( 5, 300, loaded['all'] if not loaded['end'] else loaded['end'] - loaded['start']) w_thrds = [] pass_found = False for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SFormBruterThread( q, self.options['protocol'].value, self.options['host'].value, self.options['url'].value, self.options['false-phrase'].value, self.options['true-phrase'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-phrase'].value, self.options['conffile'].value, self.options['first-stop'].value.lower(), self.options['login'].value, #self.options['reload-form-page'].value.lower(), pass_found, counter, result) else: worker = FormBruterThread( q, self.options['protocol'].value, self.options['host'].value, self.options['url'].value, self.options['false-phrase'].value, self.options['true-phrase'].value, self.options['retest-codes'].value.lower(), self.options['delay'].value, self.options['confstr'].value, self.options['first-stop'].value.lower(), self.options['login'].value, pass_found, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): if Registry().get('proxy_many_died'): self.logger.log("Proxy many died, stop scan") for worker in w_thrds: if Registry().get('proxy_many_died') or Registry().get( 'positive_limit_stop'): worker.done = True time.sleep(3) if worker.done: del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get( 'config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) if Registry().get('positive_limit_stop'): self.logger.log( "Many positive detections. Please, look items logs") self.logger.log("Last items:") for i in range(1, 5): print result[-i] exit(0) self.logger.log("") self.logger.log("Passwords found:") for row in result: self.logger.log('\t' + row['word']) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) if self.options['url'].value.find(self.options['msymbol'].value) == -1: raise WSException( "Symbol of object position ({0}) not found in URL ({1}) ". format(self.options['msymbol'].value, self.options['url'].value)) result = [] q = DafsJob() loaded = self.load_objects(q) self.logger.log("Loaded {0} words from dict ({1}-{2}).".format( loaded['all'], loaded['start'], loaded['end']) if ( int(self.options['parts'].value) and int(self.options['part'].value) ) else "Loaded {0} words from dict.".format(loaded['all'])) counter = WSCounter( 5, 300, loaded['all'] if not loaded['end'] else loaded['end'] - loaded['start']) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SDafsThread( q, self.options['protocol'].value, self.options['host'].value, self.options['url'].value, self.options['method'].value.lower(), self.options['msymbol'].value, self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result) else: worker = DafsThread( q, self.options['protocol'].value, self.options['host'].value, self.options['url'].value, self.options['method'].value.lower(), self.options['msymbol'].value, self.options['not-found-re'].value, self.options['not-found-codes'].value.lower(), self.options['retest-codes'].value.lower(), self.options['delay'].value, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): if Registry().get('proxy_many_died'): self.logger.log("Proxy many died, stop scan") if Registry().get('proxy_many_died') or Registry().get( 'positive_limit_stop'): worker.done = True time.sleep(3) for worker in w_thrds: if worker.done: del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get( 'config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) if Registry().get('positive_limit_stop'): self.logger.log( "Many positive detections. Please, look items logs") self.logger.log("Last items:") for i in range(1, 5): print result[-i] exit(0) self.logger.log("\nInsert links in DB...") added = self._insert_urls(result) for result_row in result: self.logger.log("{0} {1}".format(result_row['code'], result_row['url'])) self.logger.log( "\nFound {0} URLs, inserted in database (new) - {1}.".format( len(result), added)) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() self.model = CmsModel() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = CmsJob() for item in self.model.all_paths_list(): q.put(item.strip()) self.logger.log("Loaded {0} variants.".format(q.qsize())) counter = WSCounter(1, 60, q.qsize()) w_thrds = [] for _ in range(int(self.options['threads'].value)): if self.options['selenium'].value: worker = SCmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['delay'].value, self.options['ddos-detect-phrase'].value, self.options['ddos-human-action'].value, self.options['browser-recreate-re'].value, counter, result ) else: worker = CmsThread( q, self.options['host'].value, self.options['url'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['not-found-re'].value, self.options['not-found-codes'].value.lower(), self.options['delay'].value, counter, result ) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if Registry().get('proxy_many_died'): worker.done = True time.sleep(3) if worker.done: del w_thrds[w_thrds.index(worker)] if int(time.time()) - worker.last_action > int(Registry().get('config')['main']['kill_thread_after_secs']): self.logger.log("Thread killed by time") del w_thrds[w_thrds.index(worker)] time.sleep(2) pid = Registry().get('pData')['id'] host_id = HostsModel().get_id_by_name(pid, self.options['host'].value) Urls = UrlsModel() UrlsBase = UrlsBaseModel() self.logger.log("\nInsert result info in DB...") _all = 0 added = 0 HostsInfo = HostsInfoModel() to_hosts_info = [] hash_ids = [] for link in result: hash_ids.append(self.model.get_hash_id_by_path(link['path'])) _all += 1 if Urls.add(pid, host_id, link['path'], '', link['code'], 0, 'cms'): added += 1 UrlsBase.add_url(host_id, link['path']) self.logger.log("\nFound {0} URLs, inserted in database (new) - {1}.".format(_all, added)) cms_list = self.model.cms_list() for cms_id in self.model.get_cms_by_hash_ids(hash_ids): cms_paths = self.model.get_cms_paths(cms_id) current_count = 0 for link in result: if link['path'] in cms_paths: current_count += 1 percent = int(current_count / len(cms_paths) * 100) if int(Registry().get('config')['cms']['percent']) <= percent: to_hosts_info.append({'name': cms_list[cms_id], 'percent': percent}) self.logger.log("{0}\t{1}%".format(cms_list[cms_id], percent)) if len(to_hosts_info): HostsInfo.set_info(pid, host_id, 'cms', json.dumps(to_hosts_info)) self.done = True
def scan_action(self): """ Scan action of module """ self.enable_logger() self.validate_main() self.pre_start_inf() if self.options['proxies'].value: Registry().get('proxies').load(self.options['proxies'].value) result = [] q = FuzzerHeadersJob() U = UrlsModel() urls = U.list_by_host_name(Registry().get('pData')['id'], self.options['host'].value) to_scan = [] for url in urls: to_scan.append(url['url']) q.load_dict(to_scan) self.logger.log("Loaded {0} variants.".format(len(to_scan))) counter = WSCounter(1, 60, len(to_scan)) w_thrds = [] for _ in range(int(self.options['threads'].value)): worker = FuzzerHeadersThread( q, self.options['host'].value, self.options['protocol'].value.lower(), self.options['method'].value.lower(), self.options['delay'].value, counter, result) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done: del w_thrds[w_thrds.index(worker)] time.sleep(2) Requests = RequestsModel() Hosts = HostsModel() project_id = Registry().get('pData')['id'] host_id = Hosts.get_id_by_name(project_id, self.options['host'].value) added = 0 for fuzz in result: self.logger.log("{0} {1}://{2}{3} (Word: {4}, Header: {5})".format( self.options['method'].value.upper(), self.options['protocol'].value.lower(), self.options['host'].value, fuzz['url'], ", ".join(fuzz['words']), fuzz['header'])) _id = Requests.add( project_id, host_id, urlparse(fuzz['url']).path, urlparse(fuzz['url']).query, {fuzz['header']: Registry().get('fuzzer_evil_value')}, self.options['method'].value, self.options['protocol'].value.lower(), 'fuzzer', 'Found word(s): {0}'.format(", ".join(fuzz['words']))) added += 1 if _id else 0 self.logger.log("\nAdded {0} new requests in database".format(added)) self.done = True
def brute_action(self): """ Action brute of module """ self.enable_logger() self.validate_main() self.pre_start_inf() q = DnsBruteJob() loaded = self.load_objects(q) self.logger.log("Loaded {0} words from dict.".format(loaded['all'])) counter = WSCounter(5, 300, loaded['all']) result = [] w_thrds = [] DnsRoller = Roller() DnsRoller.load_file(Registry().get('wr_path') + '/bases/dns-servers.txt') for _ in range(int(self.options['threads'].value)): we_need_server = True while we_need_server: we_need_server = False try: next_server = DnsRoller.get() #print "Next DNS " + next_server if self.options['protocol'].value == 'auto': try: dns.query.tcp(dns.message.make_query( 'test.com', 'A'), next_server, timeout=5) protocol = 'tcp' except socket.error: try: dns.query.udp(dns.message.make_query( 'test.com', 'A'), next_server, timeout=5) protocol = 'udp' except socket.error: #raise Exception('Can`t detect DNS-server protocol. Check addr.') we_need_server = True #TODO по-человечески сделать #print 'DNS protolol detected automaticaly: ' + protocol else: protocol = self.options['protocol'].value except dns.exception.Timeout: self.logger.log( "Check server {0}. Don`t work.".format(next_server)) we_need_server = True worker = DnsBruteThread(q, self.options['host'].value, protocol, self.options['msymbol'].value, next_server, self.options['delay'].value, result, counter) worker.setDaemon(True) worker.start() w_thrds.append(worker) time.sleep(1) while len(w_thrds): for worker in w_thrds: if worker.done or Registry().get('positive_limit_stop'): del w_thrds[w_thrds.index(worker)] time.sleep(2) self.logger.log("\nFound hosts:") for host in result: self.logger.log("\t{0} (DNS: {1})".format(host['name'], host['dns'])) self.logger.log("Found IPs:") uniq_hosts = [] for host in result: uniq_hosts.append(host['ip']) uniq_hosts = list(set(uniq_hosts)) for host in uniq_hosts: self.logger.log("\t" + host) if not Registry().get('positive_limit_stop'): self.logger.log("Put found hosts into DB...") added = self._insert_hosts(result) self.logger.log( "\nFound {0} hosts, inserted in database (new) - {1}.".format( len(result), added)) else: self.logger.log("Found") self.done = True