def scan(self): MP = models.Project MH = models.HostResult MR = models.PortResult ping = int(self.args.get('ping',0)) for target in [self.target] if ping else gethosts(self.target): self.portscan(target) ret = [] payloads = BaseHostPlugin.payloads() + BaseWebPlugin.payloads() for plug in payloads: for H in gethosts(self.target): for P in MR.select().join(MH).where((MH.host_ip == H)&(MH.projectid == self.Q.projectid)): if isinstance(plug,BaseHostPlugin): host = BaseHost(str(P.host),str(P.port),service=str(P.service_name)) ret.append((plug,host)) elif str(P.service_name) == 'http': hp = 'https' if '443' in str(P.port) else 'http' url = '%s://%s:%s/'%(hp,str(P.host),str(P.port)) host = BaseWebSite(url) ret.append((plug,host)) pool = CoroutinePool(len(payloads)) for plug,host in ret: pool.spawn(self.payloadverify,plug,host) pool.join()
def scan(self): headers = json.loads(self.args.get('headers',"{}")) proxy= json.loads(self.args.get('proxy',"{}")) level = int(self.args.get('level',1)) #post 扫描 threads = int(self.args.get('threads',10)) timeout = int(self.args.get('timeout',60)) sleep = int(self.args.get('sleep',2)) if not self.target.startswith(('http','HTTP')): self.target = 'http://' + self.target if not self.target.endswith('/'): self.target += '/' for target in gethosts(self.target): self.portscan(target) self.crawle = Crawler(self.target, level=1,headers=headers,proxy=proxy,threads=threads,timeout=timeout,sleep=sleep) #self.crawle.settings.update(self.args) #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'}) self.crawle.start() th=[] #th.append(threading.Thread(target=self.crawle.run1)) th.append(threading.Thread(target=self.webscan)) th.append(threading.Thread(target=self.httpscan)) for t in th: #t.daemon = True t.start() for t in th: t.join() #扫描完成写入httpret结果 self.writewebsite(self.crawle.website)
def portscan(self,target): '''端口扫描''' write = bool(self.args.get('write',1)) ping = bool(self.args.get('ping',0)) threads = int(self.args.get('threads',100)) timeout = int(self.args.get('timeout',5)) isfilter = bool(self.args.get('isfilter',0)) ports = self.args.get('port',None) block = self.args.get('block',[]) logging.info('[portscan][host:%s][port:%s][write:%s][ping:%s][threads:%s][timeout:%s]'%(target,ports,write,ping,threads,timeout)) target = set(gethosts(target)) target = target.difference(set(block)) if isfilter: H = models.HostResult hosts = set([(h.host_ip) for h in H.select().where(H.projectid == self.Q.projectid)]) target = target.difference(hosts) ps = PortScan( target, ports = ports, neping = ping, threads = threads, timeout = timeout) self.writehost(ps.scan())
def scan(self): level = int(self.args.get('level', 1)) #post 扫描 headers = json.loads(self.args.get('headers', "{}")) proxy = json.loads(self.args.get('proxy', "{}")) if not self.target.startswith(('http', 'HTTP')): self.target = 'http://' + self.target if not self.target.endswith('/'): self.target += '/' for target in gethosts(self.target): self.portscan(target) self.crawle = Crawler(self.target) self.crawle.settings.update(self.args) #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'}) th = [] th.append(threading.Thread(target=self.crawle.run1)) th.append(threading.Thread(target=self.webscan)) th.append(threading.Thread(target=self.httpscan)) for t in th: #t.daemon = True t.start() for t in th: t.join() #扫描完成写入httpret结果 self.writewebsite(self.crawle.website)
def __init__(self,hosts,ports=None,neping=None,threads=None,timeout=None): self.hosts = gethosts(hosts) self.ports = getports(ports) if ports else getports(PORTS) self.neping = neping and True self.threads = threads or 100 self.timeout = timeout or 10 self.result = {}
def scan(self): #不使用存活扫描时将IP分开来单个扫描保证进度能完整保存 ping = int(self.args.get('ping', 1)) for target in [self.target] if ping else gethosts(self.target): self.portscan(target) MP = models.PortResult sw = MP.port_type != 'tcp/http' sw &= MP.service_name == 'http' pool = CoroutinePool(10) for q in MP.select().where(sw): pool.spawn(self.selecthttp, q) pool.join()
def scan(self): MP = models.Project MH = models.HostResult MR = models.PortResult plug_names = self.args.get('plug', '').split(',') for plug_name in plug_names: logging.info('Scan plug name: %s' % plug_name) hosts = self.target ret = [] try: R = MP.get(MP.project_id == hosts) for H in MH.select().where(MH.projectid == R): ret.append(str(H.host_ip)) except MP.DoesNotExist: for H in gethosts(self.target): ret.append(H) wret = [] hret = [] for H in ret: for P in MR.select().join(MH).where( (MH.host_ip == H) & (MH.projectid == self.Q.projectid)): if str(P.service_name) == 'http': hp = 'https' if '443' in str(P.port) else 'http' url = '%s://%s:%s/' % (hp, str(P.host), str(P.port)) host = BaseWebSite(url) wret.append(host) else: host = BaseHost(str(P.host), str(P.port), service=str(P.service_name)) hret.append(host) ret = [] for plug in PluginsManage.get_plugins(plug_name): if isinstance(plug, BaseHostPlugin): for host in hret: ret.append((plug, host)) elif isinstance(plug, BaseWebPlugin): for host in wret: ret.append((plug, host)) pool = CoroutinePool(10) for plug, host in ret: pool.spawn(self.payloadverify, plug, host) pool.join()
def scan(self): #不使用存活扫描时将IP分开来单个扫描保证进度能完整保存 ping = int(self.args.get('ping',1)) for target in [self.target] if ping else gethosts(self.target): self.portscan(target)