def _request(self,url,body): LOG.debug('request target: '+url) r=self._call_method(url,data=body,headers=self._headers,cookies=self._cookies) for cur_flag in self._flags: if r.text.find(cur_flag)!=-1: return url+','+body+';' return ''
def _scan_target(self,target): try: LOG.debug('scan target: %s'%target) socket.getaddrinfo(target,None) return target+';' except Exception, e: return ''
def _parse_args(self,args): l=args.split(',') for cur in l: pair=cur.split(':') LOG.debug('key: %s, value: %s'%(pair[0],pair[1])) self._args[pair[0]]=pair[1] self._unite_args()
def _parse_args(self, args): l = args.split(',') for cur in l: pair = cur.split(':') LOG.debug('key: %s, value: %s' % (pair[0], pair[1])) self._args[pair[0]] = pair[1] self._unite_args()
def get_result(self): """ Get result from result queue, do task index confirm meanwhile Return '' if all tasks have been confirmed Raises: Queue.Empty: can not get response within timeout """ # check whether all task has been confirmed # if so, return '' if self._task_confirm_num==self._cur_task_num: return '' # may throw Queue.Empty here task_result=self._result_queue.get(block=True,timeout=self._timeout) resultl=task_result.split('|') index=int(resultl[1],10) result='|'.join(resultl[2:]) # do confirm # if it is duplicate, try to get result again if self._task_confirm_list[index]!=0: return self.get_result() self._task_confirm_list[index]=1 self._task_confirm_num+=1 LOG.debug('get result: %s'%task_result.replace('\n',' ')) return result
def _scan_target(self, target): try: LOG.debug('scan target: %s' % target) socket.getaddrinfo(target, None) return target + ';' except Exception, e: return ''
def get_task(self): task=self._task_queue.get() LOG.debug('get task:%s'%task) taskl=task.split('|') self._cur_task_flag=taskl[0] self._cur_task_index=taskl[1] task='|'.join(taskl[2:]) return self._cur_task_flag,task
def get_task(self): task=self._task_queue.get() LOG.debug('get task: %s'%task.replace('\n',' ')) taskl=task.split('|') self._cur_task_flag=taskl[0] self._cur_task_index=taskl[1] task='|'.join(taskl[2:]) return self._cur_task_flag,task
def put_task(self,pre_str,task): """ Put task into task queue, update current task list and current task number meanwhile. """ task="|".join([pre_str,str(self._cur_task_num),task]) LOG.debug('put task: %s'%task.replace('\n',' ')) self._task_queue.put(task) self._cur_task_num+=1 self._cur_task_list.append(task)
def _put_task(self,pre_str,task): """ Put task into task queue, update current task list and current task number meanwhile """ task=":".join([pre_str,str(self._cur_task_num),task]) LOG.debug('put task into queue:%s'%task) self._task_queue.put(task) self._cur_task_num+=1 self._cur_task_list.append(task)
def _put_task(self, pre_str, task): """ Put task into task queue, update current task list and current task number meanwhile """ task = ":".join([pre_str, str(self._cur_task_num), task]) LOG.debug('put task into queue:%s' % task) self._task_queue.put(task) self._cur_task_num += 1 self._cur_task_list.append(task)
def _parse_args_for_swarm(self): s='' s=self._put_key_value(s,'m_addr',self._args.m_addr) s=self._put_key_value(s,'m_port',self._args.m_port) s=self._put_key_value(s,'authkey',self._args.authkey) s=self._put_key_value(s,'process_num',self._args.process_num) s=self._put_key_value(s,'thread_num',self._args.thread_num) s=self._put_key_value(s,'domain_timeout',self._args.domain_timeout) # remove the last ',' s=s[:-1] LOG.debug('args pass to swarm-s: '+s) return s
def reorganize_tasks(self): # first clear tasks in task queue while True: try: self._task_queue.get(block=False) except Queue.Empty as e: break # put tasks which have not been confirmed again for cur_index,cur in enumerate(self._task_confirm_list): if cur==0: LOG.debug('put task into queue again: %s'%self._cur_task_list[cur_index]) self._task_queue.put(self._cur_task_list[cur_index])
def _parse_args_for_swarm(self): s = '' s = self._put_key_value(s, 'm_addr', self._args.m_addr) s = self._put_key_value(s, 'm_port', self._args.m_port) s = self._put_key_value(s, 'authkey', self._args.authkey) s = self._put_key_value(s, 'process_num', self._args.process_num) s = self._put_key_value(s, 'thread_num', self._args.thread_num) s = self._put_key_value(s, 'domain_timeout', self._args.domain_timeout) # remove the last ',' s = s[:-1] LOG.debug('args pass to swarm-s: ' + s) return s
def reorganize_tasks(self): # first clear tasks in task queue while True: try: self._task_queue.get(block=False) except Queue.Empty as e: break # put tasks which have not been confirmed again for cur_index,cur in enumerate(self._task_confirm_list): if cur==0: tmptask=self._cur_task_list[cur_index] LOG.debug('put task: %s'%tmptask.replace('\n',' ')) self._task_queue.put(self._cur_task_list[cur_index])
def _send2one(self,content,ip,port): try: s=socket.socket(socket.AF_INET,socket.SOCK_STREAM) s.settimeout(self._args.timeout) LOG.debug('connect to %s:%d...'%(ip,port)) s.connect((ip,port)) s.send(content) s.close() LOG.debug('connection to %s:%d close'%(ip,port)) except socket.timeout as e: LOG.warning('%s:%d lost response'%(ip,port)) except socket.error as arg: LOG.error('socket error while connecting to %s:%d errno %d: %s'%(ip,port,arg[0],arg[1]))
def _send2one(self, content, ip, port): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self._args.timeout) LOG.debug('connect to %s:%d...' % (ip, port)) s.connect((ip, port)) s.send(content) s.close() LOG.debug('connection to %s:%d close' % (ip, port)) except socket.timeout as e: LOG.warning('%s:%d lost response' % (ip, port)) except socket.error as arg: LOG.error('socket error while connecting to %s:%d errno %d: %s' % (ip, port, arg[0], arg[1]))
def _parse_url(self, dst, src): """ Check wether target url 'dst' is in the same domain(include port) with url 'src', and convert url into complete url without params. Returns: String of complete url with query params if it has. if target url is not in the same domain, return ''; """ LOG.debug('detecting url: ' + dst) s_parsed = urlparse.urlparse(src) s_scheme = s_parsed.scheme s_netloc = s_parsed.netloc s_cur_dir = s_parsed.path if s_cur_dir[-1] != '/': s_cur_dir = '/'.join(s_cur_dir.split('/')[:-1]) else: s_cur_dir = s_cur_dir[:-1] d_parsed = urlparse.urlparse(dst) d_scheme = d_parsed.scheme if d_parsed.netloc.find(':') == -1 and d_parsed.netloc != '': if d_scheme == 'http': d_netloc = d_parsed.netloc + ':80' elif d_scheme == 'https': d_netloc = d_parsed.netloc + ':443' elif d_scheme == '': d_netloc = d_parsed.netloc + ':80' if s_scheme == 'http' else d_parsed.netloc + ':443' else: d_netloc = d_parsed.netloc else: d_netloc = d_parsed.netloc # add '/' as prefix if the path does not starts with '/' if d_parsed.path != '': d_path = '/' + d_parsed.path if d_parsed.path[ 0] != '/' else d_parsed.path else: d_path = '/' d_query = d_parsed.query # if it is a relative url if d_netloc == '': return urlparse.ParseResult(s_scheme, s_netloc, s_cur_dir + d_path, '', d_query, '').geturl() elif d_netloc == s_netloc and (d_scheme == s_scheme or d_scheme == ''): return urlparse.ParseResult(s_scheme, s_netloc, d_path, '', d_query, '').geturl() else: return ''
def get_do_task(self): proc=[] if self._args['process_num']==0: for cur in range(multiprocessing.cpu_count()): p=multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) else: for cur in range(self._args['process_num']): p=multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) for cur in proc: proc.join() LOG.debug('task completed')
def get_do_task(self): proc = [] if self._args['process_num'] == 0: for cur in range(multiprocessing.cpu_count()): p = multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) else: for cur in range(self._args['process_num']): p = multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) for cur in proc: proc.join() LOG.debug('task completed')
def _send2one_r(self,content,ip,port,result): try: s=socket.socket(socket.AF_INET,socket.SOCK_STREAM) s.settimeout(self._args.timeout) LOG.info('connecting to %s:%d...'%(ip,port)) s.connect((ip,port)) s.send(content.replace('__EOF__','__EOF___')) s.send('__EOF__') r=s.recv(4096) if r!='': result.append(r) s.close() LOG.debug('connection to %s:%d close'%(ip,port)) except socket.timeout,e: LOG.warning('%s:%d lost response'%(ip,port)) return ''
def _send2one_r(self, content, ip, port, result): try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self._args.timeout) LOG.info('connecting to %s:%d...' % (ip, port)) s.connect((ip, port)) s.send(content.replace('__EOF__', '__EOF___')) s.send('__EOF__') r = s.recv(4096) if r != '': result.append(r) s.close() LOG.debug('connection to %s:%d close' % (ip, port)) except socket.timeout, e: LOG.warning('%s:%d lost response' % (ip, port)) return ''
def main(): try: parser=argparse.ArgumentParser() parser.add_argument('-p',dest='s_port',metavar='LISTEN PORT',type=int,required=True, help="Listen port to receive info from master") args=parser.parse_args() init_logger('/var/log/swarm_s.log',True,False) sswarm=SSwarm(args.s_port) # Parse arguments from mswarm sswarm.get_parse_args() # Ready to get and exec command from master host sswarm.get_do_task() except SwarmBaseException as e: LOG.debug(str(e)) return
def _parse_url(self,dst,src): """ Check wether target url 'dst' is in the same domain(include port) with url 'src', and convert url into complete url without params. Returns: String of complete url with query params if it has. if target url is not in the same domain, return ''; """ LOG.debug('detecting url: '+dst) s_parsed=urlparse.urlparse(src) s_scheme=s_parsed.scheme s_netloc=s_parsed.netloc s_cur_dir=s_parsed.path if s_cur_dir[-1]!='/': s_cur_dir='/'.join(s_cur_dir.split('/')[:-1]) else: s_cur_dir=s_cur_dir[:-1] d_parsed=urlparse.urlparse(dst) d_scheme=d_parsed.scheme if d_parsed.netloc.find(':')==-1 and d_parsed.netloc!='': if d_scheme=='http': d_netloc=d_parsed.netloc+':80' elif d_scheme=='https': d_netloc=d_parsed.netloc+':443' elif d_scheme=='': d_netloc=d_parsed.netloc+':80' if s_scheme=='http' else d_parsed.netloc+':443' else: d_netloc=d_parsed.netloc else: d_netloc=d_parsed.netloc # add '/' as prefix if the path does not starts with '/' if d_parsed.path!='': d_path='/'+d_parsed.path if d_parsed.path[0]!='/' else d_parsed.path else: d_path='/' d_query=d_parsed.query # if it is a relative url if d_netloc=='': return urlparse.ParseResult(s_scheme,s_netloc,s_cur_dir+d_path,'',d_query,'').geturl() elif d_netloc==s_netloc and (d_scheme==s_scheme or d_scheme==''): return urlparse.ParseResult(s_scheme,s_netloc,d_path,'',d_query,'').geturl() else: return ''
def get_do_task(self): proc=[] if self._args.process_num==0: for cur in range(multiprocessing.cpu_count()): p=multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) else: for cur in range(self._args.process_num): p=multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) # start a new thread to listen command from master host # use daemon argtment so we need not to wait for this thread to exit t=threading.Thread(target=self._response_master) t.daemon=True t.start() for cur in proc: cur.join() LOG.debug('task completed')
def _get_do_task_proc(self): self._manager=SSwarmManager(address=(self._args.m_addr, self._args.m_port), authkey=self._args.authkey) LOG.debug('load module: '+self._args.mod) LOG.debug('begin to get and do task...') try: module=importlib.import_module('modules.'+self._args.mod+'.'+self._args.mod) except ImportError as e: raise SwarmModuleException('an error occured when load module:'+self._args.mod) # create Slave class of this module mod_slave=getattr(module,'Slave')(self._args) while True: flag,task=self._manager.get_task() if flag=='__off__': break # else use module to do task result=mod_slave.do_task(task) self._manager.put_result(result)
def _parse_charset(self): try: charset=self._args.domain_charset while True: index=charset.find('-') if index==-1: break begin_chr=charset[index-1] end_chr=charset[index+1] dst='' for x in range(ord(begin_chr),ord(end_chr)+1): dst+=chr(x) charset=charset.replace(begin_chr+'-'+end_chr,dst) ret = ''.join(x for i, x in enumerate(charset) if charset.index(x) == i) LOG.debug('charset: %s'%ret) return ret except Exception, e: LOG.critical('invalid subdomain name charset, or format error') # raise SwarmUseException('invalid subdomain name charset, or format error') raise
def get_do_task(self): proc = [] if self._args.process_num == 0: for cur in range(multiprocessing.cpu_count()): p = multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) else: for cur in range(self._args.process_num): p = multiprocessing.Process(target=self._get_do_task_proc) p.start() proc.append(p) # start a new thread to listen command from master host # use daemon argtment so we need not to wait for this thread to exit t = threading.Thread(target=self._response_master) t.daemon = True t.start() for cur in proc: cur.join() LOG.debug('task completed')
def main(): try: parser = argparse.ArgumentParser() parser.add_argument('-p', dest='s_port', metavar='LISTEN PORT', type=int, required=True, help="Listen port to receive info from master") args = parser.parse_args() init_logger('/var/log/swarm_s.log', True, False) sswarm = SSwarm(args.s_port) # Parse arguments from mswarm sswarm.get_parse_args() # Ready to get and exec command from master host sswarm.get_do_task() except SwarmBaseException as e: LOG.debug(str(e)) return
def _parse_charset(self): try: charset = self._args.domain_charset while True: index = charset.find('-') if index == -1: break begin_chr = charset[index - 1] end_chr = charset[index + 1] dst = '' for x in range(ord(begin_chr), ord(end_chr) + 1): dst += chr(x) charset = charset.replace(begin_chr + '-' + end_chr, dst) ret = ''.join(x for i, x in enumerate(charset) if charset.index(x) == i) LOG.debug('charset: %s' % ret) return ret except Exception, e: LOG.critical('invalid subdomain name charset, or format error') # raise SwarmUseException('invalid subdomain name charset, or format error') raise
def _get_do_task_proc(self): self._manager = SwarmManager(address=(self._args['m_addr'], self._args['m_port']), authkey=self._args['authkey']) self._manager.connect() self._task_queue = self._manager.get_task_queue() self._result_queue = self._manager.get_result_queue() # init scanners and other modules self._init_module() LOG.debug('begin to get and do task...') while True: task = self._task_queue.get() LOG.debug('get task:%s' % task) taskl = task.split(':') task_flag = taskl[0] task_index = taskl[1] if task_flag == '__doms__': result = self.do_domain_scan(taskl[2:]) elif task_flag == '__off__': break result = ":".join([task_flag, task_index, result]) LOG.debug('put result:%s' % result) self._result_queue.put(result)
def _get_do_task_proc(self): self._manager=SwarmManager(address=(self._args['m_addr'], self._args['m_port']), authkey=self._args['authkey']) self._manager.connect() self._task_queue = self._manager.get_task_queue() self._result_queue = self._manager.get_result_queue() # init scanners and other modules self._init_module() LOG.debug('begin to get and do task...') while True: task=self._task_queue.get() LOG.debug('get task:%s'%task) taskl=task.split(':') task_flag=taskl[0] task_index=taskl[1] if task_flag=='__doms__': result=self.do_domain_scan(taskl[2:]) elif task_flag=='__off__': break result=":".join([task_flag,task_index,result]) LOG.debug('put result:%s'%result) self._result_queue.put(result)
def _receive_master(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # incase 'Address already in use error' s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(('', self._s_port)) LOG.debug('listen on port:%d' % self._s_port) s.listen(1) sock, addr = s.accept() LOG.debug('receive from master host...') buff = '' while True: d = sock.recv(4096) buff += d if d.find('__EOF__') != -1: break sock.send('ack') sock.close() s.close() # cut off last __EOF__ buff = buff[:-7] # return to origin args buff = buff.replace('__EOF___', '__EOF__') return buff
def _get_do_task_proc(self): self._manager = SSwarmManager(address=(self._args.m_addr, self._args.m_port), authkey=self._args.authkey) LOG.debug('load module: ' + self._args.mod) LOG.debug('begin to get and do task...') try: module = importlib.import_module('modules.' + self._args.mod + '.' + self._args.mod) except ImportError as e: raise SwarmModuleException('an error occured when load module:' + self._args.mod) # create Slave class of this module mod_slave = getattr(module, 'Slave')(self._args) while True: flag, task = self._manager.get_task() if flag == '__off__': break # else use module to do task result = mod_slave.do_task(task) self._manager.put_result(result)
def _receive_master(self): s=socket.socket(socket.AF_INET,socket.SOCK_STREAM) # incase 'Address already in use error' s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(('',self._s_port)) LOG.debug('listen on port:%d'%self._s_port) s.listen(1) sock, addr=s.accept() LOG.debug('receive from master host...') buff='' while True: d=sock.recv(4096) buff+=d if d.find('__EOF__')!=-1: break sock.send('ack') sock.close() s.close() # cut off last __EOF__ buff=buff[:-7] # return to origin args buff=buff.replace('__EOF___','__EOF__') return buff
def get_parse_args(self): # first receive args args=self._receive_master() sync_flag=args[-8:] args=args[:-8] self._parse_args(args) LOG.debug('complete parsing args') if sync_flag=='__SYNC__': # do data sync here LOG.debug('begin to synchronize data...') self._sync_data() LOG.debug('data synchronize completed')
def get_parse_args(self): # first receive args args = self._receive_master() sync_flag = args[-8:] args = args[:-8] self._parse_args(args) LOG.debug('complete parsing args') if sync_flag == '__SYNC__': # do data sync here LOG.debug('begin to synchronize data...') self._sync_data() LOG.debug('data synchronize completed')
def _scan_target(self,target): LOG.debug('scan target: %s'%target) if self._quick_mode: return self._scan_target_quick(target) else: return self._scan_target_normal(target)
def _parse_args(self,args): dict=json.loads(args) for k,v in dict.items(): LOG.debug('set self._args.'+k+' => '+str(v)) setattr(self._args,k,v)
def put_result(self,result): result="|".join([self._cur_task_flag,self._cur_task_index,result]) LOG.debug('put result:%s'%result.replace('\n',' ')) self._result_queue.put(result)
def _parse_args(self, args): dict = json.loads(args) for k, v in dict.items(): LOG.debug('set self._args.' + k + ' => ' + str(v)) setattr(self._args, k, v)
def put_result(self,result): result="|".join([self._cur_task_flag,self._cur_task_index,result]) LOG.debug('put result:%s'%result) self._result_queue.put(result)