def paging(endpoint, request_json) -> tuple: """Split input list into pages""" result_pages = {} session = requests.Session() success = True while True: r_json = utils.vmaas_post_request(endpoint, request_json, session) if r_json is None: LOGGER.info("Downloading ERROR.") success = False break response_to_list = [(k, v) for k, v in r_json.items()] data_index = response_to_list[0][0] data = response_to_list[0][1] result_pages.setdefault(data_index, {}).update(data) LOGGER.info( "Downloading CVE/REPOs metadata (page: %s, page_size: %s, pages: %s)", request_json['page'], r_json["page_size"], r_json['pages']) if request_json['page'] >= r_json['pages']: break request_json['page'] += 1 session.close() if success: result_pages.update({ "page": r_json["page"], "page_size": r_json["page_size"], "pages": r_json["pages"] }) return (success, result_pages)
def _vmaas_request_cves(self, vmaas_request_json): """Make VMaaS request for cves""" system_cves = set() vulnerabilities_response_json = vmaas_post_request(vmaas_vulnerabilities_endpoint, vmaas_request_json, session=self.session) if vulnerabilities_response_json is not None: for cve in vulnerabilities_response_json['cve_list']: system_cves.add(cve) return system_cves
def sync_cve_md(page_size=5000): """Sync all CVE metadata from VMaaS""" LOGGER.info('Syncing CVE metadata') conn = DatabaseHandler.get_connection() cur = conn.cursor() impact_id_map = {} cur.execute("select name, id from cve_impact") for impact_name, impact_id in cur.fetchall(): impact_id_map[impact_name] = impact_id cur.execute('select cve from cve_metadata') cves_in_db = [] for cve_tuple in cur.fetchall(): cves_in_db.append(cve_tuple[0]) cve_list = [".*"] success = True page = 1 session = requests.Session() while True: cve_request = { 'cve_list': cve_list, 'page_size': page_size, 'page': page, 'rh_only': True } LOGGER.info('Downloading CVE metadata (page: %s, page_size: %s)', page, page_size) r_json = vmaas_post_request(VMAAS_CVES_ENDPOINT, cve_request, session=session) if r_json is None: success = False break LOGGER.info( 'Importing CVE metadata (page: %s, page_size: %s, pages: %s)', page, page_size, r_json['pages']) cves = r_json['cve_list'] to_insert = [] to_update = [] for cve in cves: description = cves[cve]['description'] impact_id = impact_id_map[cves[cve]['impact']] public_date = cves[cve]['public_date'] or None modified_date = cves[cve]['modified_date'] or None cvss3_score = float(cves[cve]['cvss3_score']) if cves[cve].get( 'cvss3_score') else None cvss3_metrics = cves[cve].get('cvss3_metrics') cvss2_score = float(cves[cve]['cvss2_score']) if cves[cve].get( 'cvss2_score') else None cvss2_metrics = cves[cve].get('cvss2_metrics') row = (cve, description, impact_id, public_date, modified_date, cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics) if cve not in cves_in_db: to_insert.append(row) else: to_update.append(row) if to_insert: execute_values(cur, """insert into cve_metadata (cve, description, impact_id, public_date, modified_date, cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics) values %s""", to_insert, page_size=len(to_insert)) if to_update: execute_values( cur, """update cve_metadata set description = data.description, impact_id = data.impact_id, public_date = cast(data.public_date as timestamp with time zone), modified_date = cast(data.modified_date as timestamp with time zone), cvss3_score = cast(data.cvss3_score as numeric), cvss3_metrics = data.cvss3_metrics, cvss2_score = cast(data.cvss2_score as numeric), cvss2_metrics = data.cvss2_metrics from (values %s) as data (cve, description, impact_id, public_date, modified_date, cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics) where cve_metadata.cve = data.cve""", to_update, page_size=len(to_update)) LOGGER.info( 'Finished importing CVE metadata (page: %s, page_size: %s, pages: %s)', page, page_size, r_json['pages']) if page >= r_json['pages']: break page += 1 cur.close() conn.commit() session.close() LOGGER.info('Finished syncing CVE metadata') return success