def download_cpe_match_file(self): """ :return: """ try: url = 'https://nvd.nist.gov/feeds/json/cpematch/1.0/nvdcpematch-1.0.json.gz' logger.info('[DOWNLOAD] {0}'.format(url)) with requests.get( url, headers=self.headers, stream=True, proxies=self.proxies, timeout=self.http_timeout, verify=False ) as r: r.raise_for_status() with open('{0}.gz'.format(self.cpe_file), 'wb') as f: for chunk in r.iter_content(chunk_size=8192): if chunk: f.write(chunk) logger.info("Start extracting '{0}' files...".format(self.cve_path)) with gzip.open('{0}.gz'.format(self.cpe_file), 'rb') as f_in: with open(self.cpe_file, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) os.unlink('{0}.gz'.format(self.cpe_file)) except Exception as ex: raise ex
def download_file(year): try: cve_file = os.path.join(self.cve_path, 'nvdcve-1.1-{0}.json.gz'.format(year)) url = 'https://nvd.nist.gov/feeds/json/cve/1.1/nvdcve-1.1-{0}.json.gz'.format(year) logger.info('[DOWNLOAD] {0}'.format(url)) with requests.get( url, headers=self.headers, stream=True, proxies=self.proxies, timeout=self.http_timeout, verify=False ) as r: r.raise_for_status() with open(cve_file, 'wb') as f: for chunk in r.iter_content(chunk_size=8192): if chunk: f.write(chunk) logger.info("Start extracting '{0}' files...".format(cve_file)) with gzip.open(cve_file, 'rb') as f_in: with open(os.path.join(self.cve_path, 'nvdcve-1.1-{0}.json'.format(year)), 'wb') as f_out: shutil.copyfileobj(f_in, f_out) os.unlink(cve_file) except Exception as ex: raise ex
def start(**kwargs): """ :param kwargs: :return: """ code_dir = kwargs.get('code_dir', '') file_list = recursive_search_files(code_dir, '*/build.gradle') result = [] for item in file_list: origin_file = item[len(code_dir) + 1:] logger.info('[-] Start analysis "{0}" file...'.format(origin_file)) with open(item, 'rb') as fp: content = fp.read().decode() include_file = find_include_file(content) if include_file: path, _ = os.path.split(item) for f in include_file: full_path = os.path.join(path, f) with open(full_path, 'rb') as fpi: result.extend( find_product_info(fpi.read().decode(), full_path[len(code_dir) + 1:])) dependencies = find_keyword_block(content) for key, value in dependencies.items(): result.extend(find_product_info(value, origin_file)) return result
def __init__(self, proxies=None, upgrade_interval_day='7d', http_timeout=15): """ :param proxies: :param upgrade_interval_day: :param http_timeout: """ self.http_timeout = int(http_timeout) self.cve_path = paths.CVE_PATH self.cve_cpe_db = paths.DB_FILE self.cpe_file = os.path.join(self.cve_path, 'nvdcpematch-1.0.json') interval_type = re.search(r'(\d+)(\w)', upgrade_interval_day) if interval_type and interval_type.group(2) in ('d', 'h'): if interval_type.group(2) == 'd': self.upgrade_interval = 60 * 60 * 24 * int(interval_type.group(1)) elif interval_type.group(2) == 'h': self.upgrade_interval = 60 * 60 * int(interval_type.group(1)) else: self.upgrade_interval = 60 * 60 * 24 * 7 self.headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3", "accept-encoding": "gzip, deflate, br", "accept-language": "en;q=0.9", "connection": "keep-alive", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108" } self.headers.update(conf['http']['headers']) self.pool = ThreadPool(10) logger.info('Proxies: {0}'.format(proxies)) self.proxies = proxies
def start(self): try: s_time = time.time() # self.download_cpe_match_file() # self.download_cve_file() self.cpe_upgrade() self.cve_upgrade() logger.info('total seconds: {0}'.format(time.time() - s_time)) except Exception as ex: import traceback; traceback.print_exc() logger.error(ex)
def __send_data(self, url, data, method='POST'): """ :param url: :param data: :param method: :return: """ def get_delay_s(): return round(random.random(), 2) * random.randrange(3, 9) result = None try_index = 1 requests.packages.urllib3.disable_warnings() while True: try: if method == 'GET': resp = self.session.get(url, data=data, timeout=self.timeout, verify=False) else: resp = self.session.post(url, json=data, timeout=self.timeout, verify=False) logger.info('URL: {0}, Status:{1}'.format(url, resp.status_code)) if resp.status_code == 200: result = resp.content break elif resp.status_code in (404, ): result = '' break else: logger.warning('status_code: {0}, reason: {1}'.format(resp.status_code, resp.reason)) raise HTTPStatusCodeError(resp.reason) except (requests.exceptions.Timeout, requests.exceptions.ProxyError, HTTPStatusCodeError) as ex: logger.warn(ex) time.sleep(get_delay_s()) if try_index >= self.timeout_try + 1: break else: logger.warning('[-] Start {0} attempts to send data...'.format(try_index)) try_index += 1 return result
def start(self): try: s_time = time.time() if self.is_update: self.download_cpe_match_file() self.download_cve_file() logger.info( 'File download is complete and data synchronization begins...' ) self.cpe_upgrade() self.cve_upgrade() logger.info('Rule upgrade succeeded.') else: raise NoUpgradeRequiredError( "No upgrade required, last update time: {0}.".format( self.last_update_time)) logger.info('Total time consumption: {0}(s)'.format( round(time.time() - s_time, 2))) except NoUpgradeRequiredError as ex: logger.warning(ex) except Exception as ex: import traceback traceback.print_exc() logger.error(ex) bak_file = '{0}.bak'.format(self.cve_cpe_db) if os.path.isfile(bak_file): shutil.move(bak_file, self.cve_cpe_db) finally: bak_file = '{0}.bak'.format(self.cve_cpe_db) if os.path.isfile(bak_file): os.unlink(bak_file)
def start(self): """ :return: """ logger.info("%d analyzer plugin loaded." % len(self.getPluginNames)) try: logger.info('analysis statistics code ...') self.cloc.start(code_dir=self.code_dir, args=self.cloc_args) self._result['cloc'] = json.loads(self.cloc.result) except Exception as ex: import traceback traceback.print_exc() logger.warning(ex) for func, product in kb.pluginFunctions: try: logger.debug("test item depends on package using '%s'" % product) result = func(code_dir=self.code_dir, skipNewVerCheck=self.skip_check_new_version, timeout=self.timeout, tag_filter=self.tag_filter) if self.enable_vuln_scan: logger.info("Start using CPE rules for matching ...") for item in result: rule_list = kb.cpe_cache.get(item['product']) for rule in rule_list: if rule.compare(vendor=item['vendor'], product=item['product'], version=item['version']): if 'cve' not in item: item['cve'] = {} if rule.cve_info: item['cve'][ rule.cve] = rule.cve_info.description except Exception as ex: import traceback traceback.print_exc() err = "exception occurred while running script for '%s' ('%s')" % ( product, str(ex)) logger.critical(err) result = None if result: self._result['depends'].append({product: result})
def main(): """ main function """ t1 = time.time() try: init() banner() cmdLineParser() # search if conf.search: result = kb.cpe_cache.get(conf.search) print("=" * 55) print(" cve | vendor | product | version | update ") for item in result: print("-" * 55) print(" {0} | {1} | {2} | {3} | {4} ".format( item.cve, item.vendor, item.product, item.version, item.update, )) print("=" * 55) print( "[*] cve count: {0}\n[*] cve list: {1} \n[*] affect version: {2}" .format( len(result), [_.cve for _ in result], ['{0}:{1}'.format(_.version, _.update) for _ in result])) sys.exit(1) # upgrade if conf.upgrade: up = Upgrade(proxies=conf['http']['proxies'], upgrade_interval=conf['upgrade_interval'], http_timeout=conf['http']['timeout']) up.start() sys.exit(1) if conf.code_dir: if not os.path.exists(conf.code_dir): msg = '[%s] path does not exist!' % conf.code_dir logger.critical(msg) raise IOError(msg) c = ClocDetector( code_dir=conf.code_dir, skip_check_new_version=conf.skip_check_new_version, enable_vuln_scan=conf.vuln_scan, cloc_args=conf.cloc['args']) c.start() if conf.output: with open(conf.output, 'wb') as fp: fp.write((json.dumps(c.getResult, indent=2)).encode()) logger.info( 'The scan is complete and the results have been saved to the "{0}" file.' .format(conf.output)) else: pprint.pprint(c.getResult) except UserQuitException: logger.error("user quit") except KeyboardInterrupt: logger.error("user aborted") except EOFError: logger.error("exit") except SystemExit: raise finally: logger.info('Total time consumption: {0}(s)'.format( round(time.time() - t1, 2)))
def start(**kwargs): """ :param kwargs: :return: """ code_dir = kwargs.get('code_dir', '') enable_online_recursive = kwargs.get('enable_online_recursive', False) deep_recursive = kwargs.get('deep_recursive', False) pom_file_list = recursive_search_files(code_dir, '*/pom.xml') pom_entity_list = [] result = [] for item in pom_file_list: logger.info('[-] Start analysis "{0}" file...'.format(item)) with open(item, 'rb') as fp: pom_content = fp.read() pom = PomEntity(origin_file_name=item, pom_content=pom_content.decode()) if not pom.parent: if pom.key not in kb.dependencies: kb.dependencies[pom.key] = pom pom_entity_list.append(pom) if enable_online_recursive: _result = [] for pom in pom_entity_list: for dep in pom.dependencies: if dep['version']: _url = '{0}{1}/{2}/{3}/{2}-{3}.pom'.format( conf.mvn['repo'][0], dep['group_id'].replace('.', '/'), dep['artifact_id'], dep['version'], ) _result.extend( recursive_online(url=_url, parent_file=pom.file_name, deep_recursive=deep_recursive)) pom_entity_list.extend(_result) for pom in pom_entity_list: parent_key = None parent_file = '' if pom.parent: parent_key = '{0}:{1}'.format(pom.parent['groupId'], pom.parent['artifactId']) if parent_key and parent_key in kb.dependencies: parent_file = kb.dependencies[parent_key].file_name for item in pom.dependencies: version = item['version'] ver = re.search(r'\$\{(.+?)\}', version, re.I) if ver and parent_key and parent_key in kb.dependencies and ver.group( 1) in kb.dependencies[parent_key].properties: version = kb.dependencies[parent_key].properties[ver.group(1)] result.append({ 'vendor': item['group_id'], 'product': item['artifact_id'], 'version': version, 'new_version': '', 'cve': {}, 'parent_file': parent_file if parent_file else pom.parent_file, 'origin_file': pom.file_name, }) return result