def __init__(self): self.active = dict() self.finished = dict() self.session_manager = Sessions() self._restore_tasks_from_db()
def fetch_ditionaries(self): session_spawner = Sessions() with session_spawner.get_session() as session: dicts_raw = (session.query(DictDatabase).filter( DictDatabase.dict_type == self.name).all()) dicts = list(map(lambda dictionary: dictionary.dict(), dicts_raw)) wordlists_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'wordlists') existing_dicts = os.listdir(wordlists_path) for dictionary in dicts: name = dictionary["name"] if dictionary["name"] not in existing_dicts: with open(os.path.join(wordlists_path, name), "w") as w: w.write(dictionary["content"])
def __init__(self, scope_manager): self.scope_manager = scope_manager self.cache = TasksCache() self.exchange = None self.tasks_queue = None self.sessions = Sessions()
class ScanManager(object): def __init__(self): self.session_spawner = Sessions() def count(self, project_uuid=None): """ Count amount of scans in the DB for the specific project uuid """ with self.session_spawner.get_session() as session: amount = session.query(ScanDatabase).filter( ScanDatabase.project_uuid == project_uuid).count() return amount
def __init__(self, task_id, task_type, target, params, project_uuid): # ID returned from the queue self.task_id = task_id # Name of the task (nmap, dnsscan ...) self.task_type = task_type # Target of the task self.target = target # Special parameters self.params = params # Project, on which the task has been launched self.project_uuid = project_uuid # Task's status self.status = None # Tasks progress self.progress = None # Tasks text self.text = None # Points the the asyncio.Process object # (if the task is launched via Popen) # Otherwise nothing self.proc = None # Keep track of the data self.stdout = None self.stderr = None self.exit_code = None self.exchange = None self.sessions = Sessions() # Point to the object of ORM self.create_db_record()
class TasksCache: def __init__(self): self.active = dict() self.finished = dict() self.session_manager = Sessions() self._restore_tasks_from_db() def _restore_tasks_from_db(self): with self.session_manager.get_session() as session: tasks_from_db = session.query(TaskDatabase).all() tasks = list( map( lambda x: ShadowTask(task_id=x.task_id, task_type=x.task_type, target=json.loads(x.target), params=json.loads(x.params), project_uuid=x.project_uuid, status=x.status, progress=x.progress, text=x.text, date_added=x.date_added, date_finished=x.date_finished, stdout=x.stdout, stderr=x.stderr), tasks_from_db)) for task in tasks: status = task.get_status()[0] if status == 'Aborted' or status == 'Finished': self.finished[task.task_id] = task else: self.active[task.task_id] = task def get_fresh_active(self, project_uuid, update_fresh=False): tasks = (list( filter( lambda task: task.fresh and (project_uuid is None or task.project_uuid == project_uuid), self._get_active_tasks()))) # for task in self._get_active_tasks(): # print(task.progress, task.project_uuid, project_uuid, task.fresh) if update_fresh: for task in tasks: task.set_fresh(False) return tasks def get_fresh_finished(self, project_uuid, update_fresh=False): tasks = (list( filter( lambda task: task.fresh and (project_uuid is None or task.project_uuid == project_uuid), self._get_finished_tasks()))) if update_fresh: for task in tasks: task.set_fresh(False) return tasks def get_active(self, project_uuid): return (list( filter( lambda task: (project_uuid is None or task.project_uuid == project_uuid), self._get_active_tasks()))) def get_finished(self, project_uuid): return (list( filter( lambda task: (project_uuid is None or task.project_uuid == project_uuid), self._get_finished_tasks()))) def _get_active_tasks(self): return self.active.values() def _get_finished_tasks(self): return self.finished.values() def add_tasks(self, tasks): for task in tasks: self.active[task.task_id] = task def update_task(self, body): task_id = body['task_id'] new_status = body['status'] new_progress = body['progress'] new_text = body['text'] new_stdout = body['new_stdout'] new_stderr = body['new_stderr'] task = self.active.get(task_id, None) if task is None: self.logger.error("The task id {} is not in cache".format(task_id)) return if new_status != task.status or new_progress != task.progress: task.set_fresh(True) self.logger.debug("Task {} updated. {}->{}, {}->{}".format( task.task_id, task.status, new_status, task.progress, new_progress)) task.set_status(new_status, new_progress, new_text, new_stdout, new_stderr) if task.quitted(): self.handle_quitted(task) return task def handle_quitted(self, task): self.finished[task.task_id] = task task.date_finished = datetime.utcnow() task.fresh = True del self.active[task.task_id] def get_active_task(self, task_id): return self.active.get(task_id, None) def cancel(self, task_id): task = self.get_active_task(task_id) if task: self.handle_quitted(task)
def __init__(self): self.sessions = Sessions() self.dicts = [] self.fetch_dicts()
def __init__(self, task_id, project_uuid): self.task_id = task_id self.project_uuid = project_uuid self.target_db_reference = None self.session_spawner = Sessions()
class Saver(object): def __init__(self, task_id, project_uuid): self.task_id = task_id self.project_uuid = project_uuid self.target_db_reference = None self.session_spawner = Sessions() def get_id(self, target): if self.target_db_reference is None: with self.session_spawner.get_session() as session: host_id = (session.query(HostDatabase.id).filter( HostDatabase.target == target).one_or_none()) if host_id is not None: ip_id = None else: ip_id = (session.query(IPDatabase.id).filter( IPDatabase.target == target).one_or_none()) self.target_db_reference = (ip_id, host_id) return self.target_db_reference def save(self, url, status, response): special_note = None if status in [301, 302, 307] and 'location' in [ h.lower() for h in response.headers ]: special_note = response.headers['location'] self.save_file(url, status, len(response), self.task_id, self.project_uuid, special_note=special_note) def save_file(self, url, status_code, content_length, task_id, project_uuid, special_note=None): try: parsed_url = urllib.parse.urlparse(url) try: target = parsed_url.netloc.split(':')[0] except Exception as exc: print(exc, 'during target parsing') target = parsed_url.netloc port_number = int(parsed_url.netloc.split(':')[1]) file_name = parsed_url.path ip_id, host_id = self.get_id(target) with self.session_spawner.get_session() as session: new_file = FileDatabase(file_id=str(uuid.uuid4()), file_name=file_name, host_id=host_id, ip_id=ip_id, port_number=port_number, file_path=url, status_code=status_code, content_length=content_length, special_note=special_note, task_id=task_id, project_uuid=project_uuid) session.add(new_file) except IntegrityError: pass except Exception as exc: print(exc)
class ScopeManager(object): """ ScopeManager keeps track of all ips and hosts in the system, exposing some interfaces for public use. """ def __init__(self): self.session_spawner = Sessions() self.debug_resolve_results = {} self.resolver_called = 0 def get_hosts_with_ports(self, filters_raw, project_uuid, page_number=None, page_size=None): """ Returns hosts associated with a given project. Not all hosts are returned. Only those that are within the described page. The hosts are filtered by host/ip/scans/files properties. IPs and scans are included in the result, but not files""" # TODO: this method should not extract files, we need files # solely to make sure we imply the correct filtering mechanism # when `files` filter is used t = time.time() with self.session_spawner.get_session() as session: # Parse filters into an object for more comfortable work filters = Filters(filters_raw) scans_from_db = SubqueryBuilder.scans_basic_filtered( session, project_uuid, filters_raw) ips_query = SubqueryBuilder.ips_basic_filtered( session, project_uuid, filters.ips) ips_query_subq = aliased(IPDatabase, ips_query.subquery('all_ips_parsed')) files_query_aliased = SubqueryBuilder.files_basic_filtered( session, project_uuid, filters_raw) # Create hosts subquery ip_filters_exist = filters_raw.get('ip', False) scans_filters_exist = (filters_raw.get('port', False) or filters_raw.get('protocol', False) or filters_raw.get('banner', False)) files_filters_exist = filters_raw.get('files', False) hosts_query = (session.query(HostDatabase).filter( HostDatabase.project_uuid == project_uuid, filters.hosts).join(ips_query_subq, HostDatabase.ip_addresses, isouter=(not ip_filters_exist)).join( scans_from_db, IPDatabase.ports, isouter=(not scans_filters_exist))) if files_filters_exist: hosts_query = (hosts_query.join(files_query_aliased, HostDatabase.files, isouter=False)) hosts_ids = SubqueryBuilder.page_ids(hosts_query, HostDatabase, page_number, page_size) selected_hosts = (hosts_query.from_self( HostDatabase.id).distinct().count()) # Now select hosts, joining them with # all other subqueries from the prev step hosts_ips_scans_query = (session.query(HostDatabase).filter( HostDatabase.project_uuid == project_uuid, HostDatabase.id.in_(hosts_ids), filters.hosts).join(ips_query_subq, HostDatabase.ip_addresses, isouter=(not ip_filters_exist)).join( scans_from_db, IPDatabase.ports, isouter=(not scans_filters_exist))) if files_filters_exist: hosts_from_db = (hosts_ips_scans_query.join( files_query_aliased, HostDatabase.files, isouter=False).options( contains_eager(HostDatabase.ip_addresses, alias=ips_query_subq).contains_eager( IPDatabase.ports, alias=scans_from_db), contains_eager(HostDatabase.files, alias=files_query_aliased)).all()) else: hosts_from_db = (hosts_ips_scans_query.options( contains_eager(HostDatabase.ip_addresses, alias=ips_query_subq).contains_eager( IPDatabase.ports, alias=scans_from_db)).all()) # Reformat each hosts to JSON-like objects hosts = (sorted(map( lambda each_host: each_host.dict(include_ips=True, include_ports=True), hosts_from_db), key=lambda x: x['hostname'])) total_db_hosts = self.count_hosts(project_uuid) self.logger.info( "Selecting hosts: from {} hosts, filter: {}. Finished in {}. @{}". format(total_db_hosts, filters_raw, time.time() - t, format(project_uuid))) # Together with hosts list return total amount of hosts in the db return { "total_db_hosts": total_db_hosts, "selected_hosts": selected_hosts, "hosts": hosts } def get_ips(self, filters, project_uuid): """ Select all ips which are relative to the project specified by project_uuid and which are passing the filters """ t = time.time() with self.session_spawner.get_session() as session: # Parse filters into an object for more comfortable work parsed_filters = Filters(filters) # Scans scans_filters_exist = (filters.get('port', False) or filters.get('protocol', False) or filters.get('banner', False)) scans_from_db = SubqueryBuilder.scans_basic_filtered( session, project_uuid, filters) # Files files_filters_exist = filters.get('files', False) files_query_aliased = SubqueryBuilder.files_basic_filtered( session, project_uuid, filters) # Select IPs + Scans which passed the filters ips_query = (session.query(IPDatabase.target).filter( IPDatabase.project_uuid == project_uuid, parsed_filters.ips)) if scans_filters_exist: ips_query = (ips_query.join(scans_from_db, IPDatabase.ports, isouter=False)) if files_filters_exist: ips_query = (ips_query.join(files_query_aliased, IPDatabase.files, isouter=False)) ips_from_db = (ips_query.distinct().all()) self.logger.info( "Selecting ips only: filter: {}. Finished in {}. @{}".format( filters, time.time() - t, project_uuid)) return list(map(lambda x: x[0], ips_from_db)) def get_ips_with_ports(self, filters, project_uuid, page_number=None, page_size=None): """ Returns ips that are associated with a given project. Not all ips are selected. Only those, that are within the described page """ t = time.time() with self.session_spawner.get_session() as session: # Parse filters into an object for more comfortable work parsed_filters = Filters(filters) # Scans scans_filters_exist = (filters.get('port', False) or filters.get('protocol', False) or filters.get('banner', False)) scans_from_db = SubqueryBuilder.scans_basic_filtered( session, project_uuid, filters) # Files files_filters_exist = filters.get('files', False) files_query_aliased = SubqueryBuilder.files_basic_filtered( session, project_uuid, filters) # Select IPs + Scans which passed the filters ips_query = (session.query(IPDatabase).filter( IPDatabase.project_uuid == project_uuid, parsed_filters.ips).join( scans_from_db, IPDatabase.ports, isouter=(not scans_filters_exist)).join( files_query_aliased, IPDatabase.files, isouter=(not files_filters_exist))) ips_query_subq = aliased(IPDatabase, ips_query.subquery('all_ips_parsed')) ids_limited = SubqueryBuilder.page_ids(ips_query, IPDatabase, page_number, page_size) ips_request = (session.query(IPDatabase).filter( IPDatabase.project_uuid == project_uuid, IPDatabase.id.in_(ids_limited), parsed_filters.ips).join(scans_from_db, IPDatabase.ports, isouter=(not scans_filters_exist))) if files_filters_exist: ips_from_db = (ips_request.join( files_query_aliased, IPDatabase.files, isouter=(not files_filters_exist)).options( joinedload(IPDatabase.hostnames), contains_eager(IPDatabase.files, alias=files_query_aliased), contains_eager(IPDatabase.ports, alias=scans_from_db)).all()) else: ips_from_db = (ips_request.options( joinedload(IPDatabase.hostnames), contains_eager(IPDatabase.ports, alias=scans_from_db)).all()) selected_ips = ips_query.from_self( IPDatabase.id).distinct().count() ips = sorted( list( map( lambda each_ip: each_ip.dict(include_ports=True, include_hostnames=True # include_files=True ), ips_from_db)), key=lambda x: x['ip_address']) total_db_ips = self.count_ips(project_uuid) self.logger.info( "Selecting ips: from {} ips, filter: {}. Finished in {}. @{}". format(total_db_ips, filters, time.time() - t, project_uuid)) # Together with ips, return amount of total ips in the database return { "total_db_ips": total_db_ips, "selected_ips": selected_ips, "ips": ips } def count_ips(self, project_uuid): """ Counts ip entries in the database (for single project) """ return IPDatabase.count(project_uuid) def count_hosts(self, project_uuid): """ Counts host entries in the database (for single project) """ return HostDatabase.count(project_uuid) async def create_ip(self, ip_address, project_uuid): with self.session_spawner.get_session() as session: ips_locked = (session.query(ProjectDatabase).filter( ProjectDatabase.project_uuid == project_uuid).one().ips_locked) if ips_locked: return {"status": "error", "text": "Ips are locked"} return await IPDatabase.create(target=ip_address, project_uuid=project_uuid) async def create_ips_network(self, network, project_uuid): ips = IPNetwork(network) return await self.create_batch_ips(list(map(str, ips)), project_uuid) async def create_batch_ips(self, ips, project_uuid): with self.session_spawner.get_session() as session: ips_locked = (session.query(ProjectDatabase).filter( ProjectDatabase.project_uuid == project_uuid).one().ips_locked) if ips_locked: return {"status": "error", "text": "Ips are locked"} results = {"status": "success", "new_scopes": []} to_add = [] for ip_address in ips: if await IPDatabase.find(target=ip_address, project_uuid=project_uuid) is None: to_add.append(ip_address) try: t = time.time() current_date = datetime.datetime.utcnow() new_ips = [{ "target": ip_address, "comment": "", "project_uuid": project_uuid, "task_id": None, "date_added": str(current_date) } for ip_address in to_add] if new_ips: insert_res = self.session_spawner.engine.execute( IPDatabase.__table__.insert(), new_ips) with self.session_spawner.get_session() as session: results["new_scopes"] = list( map(lambda ip: ip.dict(), (session.query(IPDatabase).filter( IPDatabase.project_uuid == project_uuid, IPDatabase.target.in_(to_add))))) self.logger.info("Added batch ips: {}@{} in {}".format( ips, project_uuid, time.time() - t)) except Exception as exc: self.logger.error("{} adding batch ips: {}@{}".format( str(exc), ips, project_uuid)) return {"status": "error", "text": str(exc)} return results async def create_host(self, hostname, project_uuid): """ Creating a host we should first check whether it is already in the db, then create a new one if necessary """ with self.session_spawner.get_session() as session: hosts_locked = (session.query(ProjectDatabase).filter( ProjectDatabase.project_uuid == project_uuid).one().hosts_locked) if hosts_locked: return {"status": "error", "text": "Hosts are locked"} return await HostDatabase.create(target=hostname, project_uuid=project_uuid) async def delete_scope(self, scope_id, scope_type): """ Deletes scope by its id """ if scope_type == "ip_address": return await IPDatabase.delete_scope(scope_id) else: return await HostDatabase.delete_scope(scope_id) async def update_scope(self, scope_id, comment, scope_type): """ Update a comment on the scope """ if scope_type == "ip_address": return await IPDatabase.update(scope_id, comment) else: return await HostDatabase.update(scope_id, comment) async def get_tasks_filtered(self, project_uuid, ips=None, hosts=None): """ Get the tasks associated with certain targets """ get_result = await TaskDatabase.get_tasks(project_uuid, ips, hosts) if get_result["status"] == "success": return { "status": "success", "active": list(map(lambda task: task.dict(), get_result["active"])), "finished": list(map(lambda task: task.dict(), get_result["finished"])) } else: self.logger.error(get_result["text"]) return get_result async def resolve_scopes(self, scopes_ids, project_uuid): """ Using all the ids of scopes, resolve the hosts, now we resolve ALL the scopes, that are related to the project_uuid """ self.resolver_called += 1 with self.session_spawner.get_session() as session: # Select all hosts from the db project_hosts = session.query(HostDatabase).filter( HostDatabase.project_uuid == project_uuid).options( joinedload(HostDatabase.ip_addresses)).all() if scopes_ids is None: to_resolve = project_hosts else: # Yet we cannot resolve only specific hosts. # TODO: add such possibility to_resolve = list( filter(lambda x: x.id in scopes_ids, project_hosts)) loop = asyncio.get_event_loop() try: nameservers_ips = await get_nameservers(map( lambda host: host.target, to_resolve), logger=self.logger) resolver = aiodns.DNSResolver(loop=loop, nameservers=nameservers_ips) except Exception as exc: self.logger.error("{} during resolver setup {}@{}".format( str(exc), scopes_ids, project_uuid)) resolver = aiodns.DNSResolver(loop=loop) current_retries = 0 while current_retries < 3: resolve_results = await self._resolve(to_resolve, resolver) total_ips, failed_hosts = await self.parse_resolve_results( resolve_results, project_uuid, nameservers_ips) to_resolve = failed_hosts current_retries += 1 if self.resolver_called % 5 == 0: print(self.debug_resolve_results) self.logger.info("Successfully resolved {} ips @{}".format( len(to_resolve), project_uuid)) return (total_ips, 0) async def _resolve(self, hosts, resolver): futures = [] resolve_results = [] for each_host in hosts: each_future = resolver.query(each_host.target, "A") each_future.database_host = each_host futures.append(each_future) if len(futures) >= 10: (resolve_batch, _) = await asyncio.wait(futures, return_when=asyncio.ALL_COMPLETED) resolve_results += resolve_batch futures = [] if futures: (resolve_batch, _) = await asyncio.wait(futures, return_when=asyncio.ALL_COMPLETED) resolve_results += resolve_batch return resolve_results async def parse_resolve_results(self, resolve_results, project_uuid, nameservers_ips=None): failed_hosts = [] total_ips = 0 while resolve_results: await asyncio.sleep(0) each_future = resolve_results.pop() host = each_future.database_host hostname = host.target exc = each_future.exception() if exc: # An error during resolve happend exc_code = exc.args[0] exc_description = exc.args[1] if exc_code != 4: print(exc_description) failed_hosts.append(host) if hostname not in self.debug_resolve_results: self.debug_resolve_results[hostname] = { "status": "error", "exception": { "number": exc_code, "description": exc_description }, "nameservers": nameservers_ips } else: prev_result = self.debug_resolve_results[hostname] if prev_result["status"] == "error": if prev_result["exception"]["number"] != exc_code: print( "{} different exception: exc #{} -> #{}; {} -> {}" .format( hostname, self.debug_resolve_results[hostname] ["exception"]["number"], exc_code, self.debug_resolve_results[hostname] ["exception"]["description"], exc_description)) else: print("{} no longer resolves: exc #{} {}".format( hostname, exc_code, exc_description)) continue result = each_future.result() if hostname not in self.debug_resolve_results: self.debug_resolve_results[hostname] = { "status": "success", "results": result.sort(), "nameservers": nameservers_ips } else: prev_result = self.debug_resolve_results[hostname] if prev_result["status"] == "error": print("{} now resolved. Old {}. New {} @ {}".format( hostname, prev_result, result, nameservers_ips)) # else: # print("{} no longer resolves: exc {}".format(hostname, self.debug_resolve_results[hostname]["exception"])) with self.session_spawner.get_session() as session: ips_locked = (session.query(ProjectDatabase).filter( ProjectDatabase.project_uuid == project_uuid).one().ips_locked) for each_result in result: total_ips += 1 resolved_ip = each_result.host found_ip = await IPDatabase.find(target=resolved_ip, project_uuid=project_uuid) if found_ip: found = False for each_inner_ip_address in host.ip_addresses: if each_inner_ip_address.id == found_ip.id: found = True if not found: host.ip_addresses.append(session.merge(found_ip)) session.add(host) elif not ips_locked: ip_create_result = await IPDatabase.create( target=resolved_ip, project_uuid=project_uuid) if ip_create_result["status"] == "success": newly_created_ip = ip_create_result["new_scope"] host.ip_addresses.append(newly_created_ip) session.add(host) return (total_ips, failed_hosts)
def __init__(self): self.sessions = Sessions()
async def save_raw_output(task_id, output, project_uuid): try: sessions_manager = Sessions() concated = "".join(output) saved_scans = [] if concated: parsed_dict = xmltodict.parse(concated) with sessions_manager.get_session() as session: if isinstance(parsed_dict['nmaprun']['host'], list): for each_host in parsed_dict['nmaprun']['host']: address = each_host['address']['@addr'] target = await IPDatabase.find( target=address, project_uuid=project_uuid) port_data = each_host['ports']['port'] port_number = int(port_data['@portid']) port_state = port_data['state'] if port_state != 'closed': scan_id = str(uuid.uuid4()) new_scan = ScanDatabase(scan_id=scan_id, target=target.id, port_number=port_number, task_id=task_id, project_uuid=project_uuid) try: session.add(new_scan) session.commit() except: session.rollback() saved_scans.append(target.id) else: each_host = parsed_dict['nmaprun']['host'] address = each_host['address']['@addr'] target = await IPDatabase.find(target=address, project_uuid=project_uuid) port_data = each_host['ports']['port'] port_number = int(port_data['@portid']) port_state = port_data['state'] if port_state != 'closed': scan_id = str(uuid.uuid4()) new_scan = ScanDatabase(scan_id=scan_id, target=target.id, port_number=port_number, task_id=task_id, project_uuid=project_uuid) try: session.add(new_scan) session.commit() except: session.rollback() saved_scans.append(target.id) return list(set(saved_scans)) except Exception as e: # TODO: add logger here print("save_raw_output") print(e) raise e
async def parse_results(self): def save_scan(data): session = sessions.get_new_session() scans_ids = self.params["saver"].get('scans_ids', None) if scans_ids: target_scan = list( filter(lambda x: data["port_number"] == x["port_number"], scans_ids))[0] target_scan_id = target_scan["scan_id"] new_scan = (session.query(ScanDatabase).filter( ScanDatabase.scan_id == target_scan_id).first()) new_scan.banner = data["banner"] new_scan.protocol = data["protocol"] else: try: ip = (session.query(IPDatabase.id).filter( IPDatabase.target == data["target"], IPDatabase.project_uuid == data["project_uuid"]).one()) new_scan = ScanDatabase(scan_id=str(uuid4()), target=ip, port_number=data["port_number"], protocol=data["protocol"], banner=data["banner"], project_uuid=data["project_uuid"]) except Exception as e: print("Nmap " + e) raise e try: session.add(new_scan) session.commit() except: session.rollback() sessions.destroy_session(session) await self.read_stdout() await self.read_stderr() stdout = "".join(self.stdout) try: nmap_report = NmapParser.parse(stdout) except NmapParserException: nmap_report = NmapParser.parse(stdout, incomplete=True) sessions = Sessions() targets = [self.target] for scanned_host in nmap_report.hosts: for service_of_host in scanned_host.services: if service_of_host.open(): save_scan({ 'target': str(scanned_host.address), 'port_number': int(service_of_host.port), 'protocol': str(service_of_host.service), 'banner': str(service_of_host.banner), 'project_uuid': self.project_uuid }) return targets
def __init__(self, task_id, project_uuid): self.task_id = task_id self.project_uuid = project_uuid self.session_spawner = Sessions()
class Saver: def __init__(self, task_id, project_uuid): self.task_id = task_id self.project_uuid = project_uuid self.session_spawner = Sessions() async def save_raw_output(self, output): print("saving amass {}".format(self.task_id)) try: updated_hosts = False updated_ips = False targets = ''.join(output).strip().split('\n') for target in targets: try: splitted = target.split(',') host = splitted[0] if not re.match(HOSTNAME_REGEX, host): print( "{} not valid hostname for {}. Trying other format" .format(host, self.task_id)) splitted = target.split(' ') host = splitted[0] ips = splitted[1].split(',') if not re.match(HOSTNAME_REGEX, host): print( " {} not valid hostname for {}. F****d up. Sorry" .format(host, self.task_id)) continue else: ips = splitted[1:] except: host = target ips = [] if self.find_anomalies(host): print("[-] Hostname seems to be not valid", host) new_hosts, new_ips = await self.save_host_ips(host, ips) updated_hosts = updated_hosts or new_hosts updated_ips = updated_ips or new_ips return updated_hosts, updated_ips except Exception as e: print("Exception while saving amass", str(e)) raise e @staticmethod def find_anomalies(hostname): if ' ' in hostname: return True return False async def save_host_ips(self, host, ips): updated_hosts = False updated_ips = False host_db, created = await HostDatabase.get_or_create( host, self.project_uuid, self.task_id) if created: updated_hosts = True for ip in ips: if not re.match(IP_REGEX, ip): print("{} not valid ip for {}".format(ip, self.task_id)) continue ip_db, created = await IPDatabase.get_or_create( ip, self.project_uuid, self.task_id) if created: updated_ips = True if host_db and ip_db: found = False for ip_db_host in host_db.ip_addresses: if ip_db_host.id == ip_db.id: found = True break if not found: host_db.ip_addresses.append(ip_db) with self.session_spawner.get_session() as session: try: session.add(host_db) session.commit() updated_hosts = True updated_ips = True except Exception as exc: session.rollback() print("[-] Save exception of {}+{}: {}".format( host, ip, exc)) return updated_hosts, updated_ips
def __init__(self): self.session_spawner = Sessions() self.debug_resolve_results = {} self.resolver_called = 0
def __init__(self): self.session_spawner = Sessions()
class Task(object): """ Base class for the task """ def __init__(self, task_id, task_type, target, params, project_uuid): # ID returned from the queue self.task_id = task_id # Name of the task (nmap, dnsscan ...) self.task_type = task_type # Target of the task self.target = target # Special parameters self.params = params # Project, on which the task has been launched self.project_uuid = project_uuid # Task's status self.status = None # Tasks progress self.progress = None # Tasks text self.text = None # Points the the asyncio.Process object # (if the task is launched via Popen) # Otherwise nothing self.proc = None # Keep track of the data self.stdout = None self.stderr = None self.exit_code = None self.exchange = None self.sessions = Sessions() # Point to the object of ORM self.create_db_record() def get_id(self): """ Return the id of the current task """ return self.task_id def set_status(self, new_status, progress=0, text=""): """ Change the status of the current task """ self.status = new_status self.progress = progress session = self.sessions.get_new_session() task_db_object = session.query(TaskDatabase).filter_by( task_id=self.get_id() ).first() task_db_object.status = new_status task_db_object.progress = progress task_db_object.text = text if new_status == 'Finished' or new_status == 'Aborted': task_db_object.date_finished = datetime.utcnow() session.commit() self.sessions.destroy_session(session) def append_stdout(self, stdout=""): self.stdout.append(stdout) # session = self.sessions.get_new_session() # task_db_object = session.query(TaskDatabase).filter_by( # task_id=self.get_id() # ).first() # task_db_object.stdout = task_db_object.stdout + stdout # session.commit() # self.sessions.destroy_session(session) def append_stderr(self, stderr=""): self.stderr.append(stderr) # session = self.sessions.get_new_session() # task_db_object = session.query(TaskDatabase).filter_by( # task_id=self.get_id() # ).first() # task_db_object.stderr = task_db_object.stderr + stderr # session.commit() # self.sessions.destroy_session(session) def get_status(self): """ Return the status of the current task """ return self.status async def start(self): """ Launch the task """ raise NotImplementedError def send_notification(self, command): """ Sends 'command' notification to the current process """ raise NotImplementedError def wait_for_exit(self): raise NotImplementedError def create_db_record(self): """ Creates the record of the task in a special DB table """ session = self.sessions.get_new_session() task_new_object = TaskDatabase( task_id=self.get_id(), task_type=self.task_type, target=json.dumps(self.target), params=json.dumps(self.params), project_uuid=self.project_uuid, stdout="", stderr="" ) session.add(task_new_object) session.commit() self.sessions.destroy_session(session)