def push_third_party_vulnerabilities(self, vulnerabilities_id): """ Pushed to a third-party vulnerability management platform :param vulnerabilities_id: :return: """ try: status = Config('third_party_vulnerabilities', 'status').value if int(status): q = Queue(self.project_name, self.third_party_vulnerabilities_name, self.third_party_vulnerabilities_type, self.file_path, self.line_number, self.code_content, vulnerabilities_id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message)
def push_third_party_vulnerabilities(self, vulnerabilities_id): """ Pushed to a third-party vulnerability management platform :param vulnerabilities_id: :return: """ try: status = Config('third_party_vulnerabilities', 'status').value if int(status): q = Queue(self.project_name, self.third_party_vulnerabilities_name, self.third_party_vulnerabilities_type, self.file_path, self.line_number, self.code_content, vulnerabilities_id) q.push() except Exception as e: traceback.print_exc() self.log('critical', e.message)
def queue(): from utils.queue import Queue """ Pushed to a third-party vulnerability management platform Start the queue first celery -A daemon worker --loglevel=info :return: """ # Configure the project ID and the vulnerability ID project_id = request.json.get('project_id') rule_id = request.json.get('rule_id') if project_id is None or rule_id is None: return jsonify(code=1002, result='Project ID and Rule ID can\'t empty!') # Project Info project_info = CobraProjects.query.filter_by(id=project_id).first() # Unpunched vulnerability and rule information result_all = db.session().query(CobraRules, CobraResults).join( CobraResults, CobraResults.rule_id == CobraRules.id).filter( CobraResults.project_id == project_id, CobraResults.status == 0, CobraResults.rule_id == rule_id).all() if len(result_all) == 0: return jsonify(code=1001, result="There are no unpacked vulnerabilities") # Dealing with vulnerabilities for index, (rule, result) in enumerate(result_all): try: # Query the vulnerability type information vul_info = CobraVuls.query.filter( CobraVuls.id == rule.vul_id).first() # Pushed to a third-party vulnerability management platform q = Queue(project_info.name, vul_info.name, vul_info.third_v_id, result.file, result.line, result.code, result.id) q.push() except: print(traceback.print_exc()) return jsonify( code=1001, result= "Successfully pushed {0} vulnerabilities to a third-party vulnerability management platform" .format(len(result_all)))
def queue(): from utils.queue import Queue """ 推送到第三方漏洞管理平台 先启动队列 celery -A daemon worker --loglevel=info :return: """ # 配置项目ID和漏洞ID project_id = request.json.get('project_id') rule_id = request.json.get('rule_id') if project_id is None or rule_id is None: return jsonify(code=1002, result='项目ID和规则ID不能为空') # 项目信息 project_info = CobraProjects.query.filter_by(id=project_id).first() # 未推送的漏洞和规则信息 result_all = db.session().query(CobraRules, CobraResults).join( CobraResults, CobraResults.rule_id == CobraRules.id).filter( CobraResults.project_id == project_id, CobraResults.status == 0, CobraResults.rule_id == rule_id).all() if len(result_all) == 0: return jsonify(code=1001, result="没有未推送的漏洞") # 处理漏洞 for index, (rule, result) in enumerate(result_all): try: # 取出漏洞类型信息 vul_info = CobraVuls.query.filter( CobraVuls.id == rule.vul_id).first() # 推动到第三方漏洞管理平台 q = Queue(project_info.name, vul_info.name, vul_info.third_v_id, result.file, result.line, result.code, result.id) q.push() except: print(traceback.print_exc()) return jsonify(code=1001, result="成功推送{0}个漏洞到第三方漏洞管理平台".format(len(result_all)))
def getAiPath(self, start, goal): bfsQueue = Queue() used = set() currentState = (start, [], 1) bfsQueue.push(currentState) while (not bfsQueue.isEmpty() and not self.isGoalState(currentState[0], goal)): currentState = bfsQueue.pop() if (currentState[0] not in used and not self.isGoalState(currentState[0], goal)): used.add(currentState[0]) for state in self.getDirections(currentState): newState = (state[0], [], 1) for step in currentState[1]: newState[1].append(step) newState[1].append(state[1]) bfsQueue.push(newState) print start, goal, currentState[1] return currentState[1]
def queue(): from utils.queue import Queue """ Pushed to a third-party vulnerability management platform Start the queue first celery -A daemon worker --loglevel=info :return: """ # Configure the project ID and the vulnerability ID project_id = request.json.get('project_id') rule_id = request.json.get('rule_id') if project_id is None or rule_id is None: return jsonify(code=1002, result='Project ID and Rule ID can\'t empty!') # Project Info project_info = CobraProjects.query.filter_by(id=project_id).first() # Unpunched vulnerability and rule information result_all = db.session().query(CobraRules, CobraResults).join(CobraResults, CobraResults.rule_id == CobraRules.id).filter( CobraResults.project_id == project_id, CobraResults.status == 0, CobraResults.rule_id == rule_id ).all() if len(result_all) == 0: return jsonify(code=1001, result="There are no unpacked vulnerabilities") # Dealing with vulnerabilities for index, (rule, result) in enumerate(result_all): try: # Query the vulnerability type information vul_info = CobraVuls.query.filter(CobraVuls.id == rule.vul_id).first() # Pushed to a third-party vulnerability management platform q = Queue(project_info.name, vul_info.name, vul_info.third_v_id, result.file, result.line, result.code, result.id) q.push() except: print(traceback.print_exc()) return jsonify(code=1001, result="Successfully pushed {0} vulnerabilities to a third-party vulnerability management platform".format(len(result_all)))
def analyse(self): if self.directory is None: logging.critical("Please set directory") sys.exit() logging.info('Start code static analyse...') d = directory.Directory(self.directory) files = d.collect_files(self.task_id) logging.info('Scan Files: {0}, Total Time: {1}s'.format( files['file_nums'], files['collect_time'])) ext_language = { # Image '.jpg': 'image', '.png': 'image', '.bmp': 'image', '.gif': 'image', '.ico': 'image', '.cur': 'image', # Font '.eot': 'font', '.otf': 'font', '.svg': 'font', '.ttf': 'font', '.woff': 'font', # CSS '.css': 'css', '.less': 'css', '.scss': 'css', '.styl': 'css', # Media '.mp3': 'media', '.swf': 'media', # Execute '.exe': 'execute', '.sh': 'execute', '.dll': 'execute', '.so': 'execute', '.bat': 'execute', '.pl': 'execute', # Edit '.swp': 'tmp', # Cert '.crt': 'cert', # Text '.txt': 'text', '.csv': 'text', '.md': 'markdown', # Backup '.zip': 'backup', '.bak': 'backup', '.tar': 'backup', '.rar': 'backup', '.tar.gz': 'backup', '.db': 'backup', # Config '.xml': 'config', '.yml': 'config', '.spf': 'config', '.iml': 'config', '.manifest': 'config', # Source '.psd': 'source', '.as': 'source', # Log '.log': 'log', # Template '.template': 'template', '.tpl': 'template', } for ext in files: if ext in ext_language: logging.info('{0} - {1}'.format(ext, files[ext])) continue else: logging.info(ext) languages = CobraLanguages.query.all() rules = CobraRules.query.filter_by(status=1).all() extensions = None # `grep` (`ggrep` on Mac) grep = '/bin/grep' # `find` (`gfind` on Mac) find = '/bin/find' if 'darwin' == sys.platform: ggrep = '' gfind = '' for root, dir_names, file_names in os.walk( '/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename or 'grep' == filename: ggrep = os.path.join(root, filename) for root, dir_names, file_names in os.walk( '/usr/local/Cellar/findutils'): for filename in file_names: if 'gfind' == filename: gfind = os.path.join(root, filename) if ggrep == '': logging.critical("brew install ggrep pleases!") sys.exit(0) else: grep = ggrep if gfind == '': logging.critical("brew install findutils pleases!") sys.exit(0) else: find = gfind """ all vulnerabilities vulnerabilities_all[vuln_id] = {'name': 'vuln_name', 'third_v_id': 'third_v_id'} """ vulnerabilities_all = {} vulnerabilities = CobraVuls.query.all() for v in vulnerabilities: vulnerabilities_all[v.id] = { 'name': v.name, 'third_v_id': v.third_v_id } for rule in rules: rule.regex_location = rule.regex_location.strip() rule.regex_repair = rule.regex_repair.strip() logging.info('Scan rule id: {0} {1} {2}'.format( self.project_id, rule.id, rule.description)) # Filters for language in languages: if language.id == rule.language: extensions = language.extensions.split('|') if extensions is None: logging.critical("Rule Language Error") sys.exit(0) # White list white_list = [] ws = CobraWhiteList.query.filter_by(project_id=self.project_id, rule_id=rule.id, status=1).all() if ws is not None: for w in ws: white_list.append(w.path) try: if rule.regex_location == "": filters = [] for index, e in enumerate(extensions): if index > 1: filters.append('-o') filters.append('-name') filters.append('*' + e) # Find Special Ext Files param = [find, self.directory, "-type", "f"] + filters else: filters = [] for e in extensions: filters.append('--include=*' + e) # explode dirs explode_dirs = ['.svn', '.cvs', '.hg', '.git', '.bzr'] for explode_dir in explode_dirs: filters.append('--exclude-dir={0}'.format(explode_dir)) # -n Show Line number / -r Recursive / -P Perl regular expression param = [grep, "-n", "-r", "-P"] + filters + [ rule.regex_location, self.directory ] # logging.info(' '.join(param)) p = subprocess.Popen(param, stdout=subprocess.PIPE) result = p.communicate() # Exists result if len(result[0]): lines = str(result[0]).strip().split("\n") for line in lines: line = line.strip() if line == '': continue if rule.regex_location == '': # Find (special file) file_path = line.strip().replace( self.directory, '') logging.debug('File: {0}'.format(file_path)) exist_result = CobraResults.query.filter_by( project_id=self.project_id, rule_id=rule.id, file=file_path).first() if exist_result is not None: # push queue if exist_result.status == 0: try: q = Queue( self.project_name, vulnerabilities_all[rule.vul_id] ['name'], vulnerabilities_all[ rule.vul_id]['third_v_id'], file_path, 0, 0, exist_result.id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message) logging.warning("Exists Result") else: vul = CobraResults(self.task_id, self.project_id, rule.id, file_path, 0, '', 0) db.session.add(vul) try: # push queue q = Queue( self.project_name, vulnerabilities_all[ rule.vul_id]['name'], vulnerabilities_all[ rule.vul_id]['third_v_id'], file_path, 0, 0, vul.id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message) else: # Grep line_split = line.split(':', 1) file_path = line_split[0].strip() if len(line_split) < 2: logging.info("Line len < 2 {0}".format(line)) continue code_content = line_split[1].split(':', 1)[1].strip() line_number = line_split[1].split(':', 1)[0].strip() if file_path in white_list or ".min.js" in file_path: logging.info("In white list or min.js") else: only_match = rule.regex_location[: 1] == '(' and rule.regex_location[ -1] == ')' """ annotation (注释过滤) # // /* * Exclude: - (rule_location) - 当定位规则左右两边为括号时不过滤注释行,比如硬编码密码 """ match_result = re.match( "(#)?(//)?(\*)?(/\*)?", code_content) if match_result.group( 0) is not None and match_result.group( 0 ) is not "" and only_match is not True: logging.info("In Annotation") else: param_value = None # parse file function structure if only_match: found_vul = True else: if file_path[ -3:] == 'php' and rule.regex_repair.strip( ) != '': try: parse_instance = parse.Parse( rule.regex_location, file_path, line_number, code_content) if parse_instance.is_controllable_param( ): if parse_instance.is_repair( rule.regex_repair, rule.block_repair): logging.info( "Static: repaired") continue else: if parse_instance.param_value is not None: param_value = parse_instance.param_value found_vul = True else: logging.info( "Static: uncontrollable param" ) continue except: print(traceback.print_exc()) found_vul = False else: found_vul = True file_path = file_path.replace( self.directory, '') if found_vul: logging.info('In Insert') exist_result = CobraResults.query.filter_by( project_id=self.project_id, rule_id=rule.id, file=file_path, line=line_number).first() if exist_result is not None: logging.info("Exists Result") # push queue if exist_result.status == 0: try: q = Queue( self.project_name, vulnerabilities_all[ rule.vul_id] ['name'], vulnerabilities_all[ rule.vul_id] ['third_v_id'], file_path, line_number, code_content, exist_result.id) q.push() except Exception as e: print( traceback.print_exc()) logging.critical(e.message) else: code_content = code_content.encode( 'unicode_escape') if len(code_content) > 512: code_content = code_content[: 500] + '...' code_content = '# Trigger\r' + code_content if param_value is not None: code_content = '# Param\r' + param_value + '\r//\r// ------ Continue... ------\r//\r' + code_content logging.debug( 'File: {0}:{1} {2}'.format( file_path, line_number, code_content)) vul = CobraResults( self.task_id, self.project_id, rule.id, file_path, line_number, code_content, 0) db.session.add(vul) db.session.commit() try: q = Queue( self.project_name, vulnerabilities_all[ rule.vul_id]['name'], vulnerabilities_all[ rule.vul_id] ['third_v_id'], file_path, line_number, code_content, vul.id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message) logging.info( 'Insert Results Success') else: logging.info('Not Found') except Exception as e: print(traceback.print_exc()) logging.critical('Error calling grep: ' + str(e)) # Set End Time For Task t = CobraTaskInfo.query.filter_by(id=self.task_id).first() t.status = 2 t.file_count = files['file_nums'] t.time_end = int(time.time()) t.time_consume = t.time_end - t.time_start t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: logging.critical("Set start time failed:" + e.message) logging.info("Scan Done")
class Downloader(object): """constructor""" def __init__(self, connection): self.download_queue = Queue() self.current_download = None self.download_status = 'not_ready' self.connection = connection self.dcc_connection = None """start downloader""" def start(self): self.download_status = 'ready' """stop downloader""" def stop(self): self.download_status = 'not_ready' """is it ready for download?""" def is_ready(self): return (self.download_status == 'ready') """add manga to download_queue""" def download(self, manga): print "Adding %s to download queue" % manga.title self.download_queue.push(manga) """check for download status and determine action to take""" def check(self): if self.is_ready(): self.download_next() elif ( self.download_status == 'requesting_download' and self.current_download.expired_request_time < time.time() ): self.finish_download() """pop an item from download_queue and request download""" def download_next(self): assert(self.is_ready()) if self.current_download is not None and not self.current_download.isDownloaded(): # restart download manga = self.current_download.manga else: manga = self.download_queue.pop() if (manga is None): return print "Requesting download of %s" % manga.title self.current_download = DownloadItem(manga) self.connection.privmsg(manga.bot, "XDCC SEND #%d" % manga.pack_id) self.download_status = 'requesting_download' """initiate a manga download connection""" def initiate_download(self, file_name, file_size, dcc_connection): print "Downloading %s" % self.current_download.manga.title self.download_status = 'downloading' self.dcc_connection = dcc_connection self.current_download.initiateDownload(file_name, file_size) """receive download data""" def receive_data(self, data): self.current_download.appendData(data) self.dcc_connection.privmsg(struct.pack("!I", self.current_download.received_bytes)) """clean up dcc connection and its state""" def finish_download(self): if (self.current_download.isDownloaded()): print "Finished downloading %s\n" % self.current_download.manga.title else: print "Failed downloading %s" % self.current_download.manga.title self.current_download.finishDownload() self.download_status = 'ready' self.dcc_connection = None