Esempio n. 1
0
 def _vmaas_repos_modified_since(modified_since: str) -> list:
     """Get list of modified repose since `modified since`"""
     repos_json = {"repository_list": [".*"], "page": 1, "page_size": DEFAULT_PAGE_SIZE,
                   "modified_since": modified_since}
     repos_pages = paging(VMAAS_REPOS_ENDPOINT, repos_json)[1]
     repos = [repo_name_key for repo_name_key in repos_pages]
     LOGGER.info("%d repos found updated since %s", len(repos), modified_since)
     return repos
 def test_cves_paging(self, caplog):  # pylint: disable=unused-argument
     """Test downloading all cves pages"""
     cves_json = {"cve_list": [".*"], "page": DEFAULT_PAGE, "page_size": DEFAULT_PAGE_SIZE, "rh_only": True}
     with caplog.at_level(logging.INFO):
         success, cves_pages = paging(VMAAS_CVES_ENDPOINT, cves_json)
         assert success
         assert cves_pages['page'] == 10
     caplog.clear()
 def test_repos_paging(self, caplog):  # pylint: disable=unused-argument
     """Test downloading all repos pages"""
     repos_json = {"repository_list": [".*"], "page": DEFAULT_PAGE, "page_size": DEFAULT_PAGE_SIZE}
     with caplog.at_level(logging.INFO):
         success, repos_pages = paging(VMAAS_REPOS_ENDPOINT, repos_json)
         assert success
         assert repos_pages['page'] == 2
     caplog.clear()
Esempio n. 4
0
 def _vmaas_repos_modified_since(modified_since: str) -> list:
     """Get list of modified repose since `modified since`"""
     repos_json = {"repository_list": [".*"], "page": 1, "page_size": DEFAULT_PAGE_SIZE,
                   "modified_since": modified_since}
     success, repos_pages = paging(VMAAS_REPOS_ENDPOINT, repos_json)
     if not success:
         return []
     repos = list(repos_pages["repository_list"])
     LOGGER.info("%d repos found updated since %s", len(repos), modified_since)
     return repos
Esempio n. 5
0
def sync_cve_md():
    """Sync all CVE metadata from VMaaS"""
    LOGGER.info('Syncing CVE metadata')
    with DatabasePoolConnection() as conn:
        with conn.cursor() as cur:
            impact_id_map = {}
            cur.execute("select name, id from cve_impact")
            for impact_name, impact_id in cur.fetchall():
                impact_id_map[impact_name] = impact_id
            cur.execute('select id, cve from cve_metadata')
            cves_in_db = {}
            for cve_tuple in cur.fetchall():
                cves_in_db[cve_tuple[1]] = cve_tuple[0]
            cve_json = {'cve_list': [".*"], 'page': 1, 'page_size': DEFAULT_PAGE_SIZE, 'rh_only': True, 'errata_associated': True}
            success, cve_pages = paging(VMAAS_CVES_ENDPOINT, cve_json)
            if not success:
                return success
            cves = cve_pages['cve_list']
            LOGGER.info("Importing CVE metadata")

            to_insert, to_update, to_delete = process_cve_list(cves, cves_in_db, impact_id_map)

            insert_cves(cur, to_insert)
            update_cves(cur, to_update)

            if to_delete:
                associated_cves = set()
                LOGGER.info("Deleting %s unnecessary CVE metadata", len(to_delete))
                cur.execute("""select distinct cve_id from system_vulnerabilities""")
                for row in cur.fetchall():
                    associated_cves.add(row[0])
                cur.execute("""select distinct cve_id from cve_rule_mapping""")
                for row in cur.fetchall():
                    associated_cves.add(row[0])
                safety_delete = []
                unable_to_delete = []
                for cve_to_delete in to_delete:
                    cve_id = cves_in_db[cve_to_delete[0]]
                    if cve_id in associated_cves:
                        unable_to_delete.append(cve_to_delete[0])
                    else:
                        safety_delete.append(cve_id)
                if safety_delete:
                    execute_values(cur, """delete from cve_account_data
                                           where cve_id in (%s)""",
                                   list(zip(safety_delete)), page_size=len(safety_delete))
                    execute_values(cur, """delete from cve_metadata where id in (%s)""",
                                   list(zip(safety_delete)), page_size=len(safety_delete))
                    LOGGER.info('Finished deleting unnecessary CVE metadata')
                if unable_to_delete:
                    LOGGER.warning(
                        'Unable to delete %s cves (still referenced from system_vulnerabilities table or have rules): %s',
                        len(unable_to_delete), str(unable_to_delete))
                    LOGGER.debug('Attempting to update information about %s', str(unable_to_delete))
                    cve_json = {'cve_list': unable_to_delete, 'page': 1, 'page_size': DEFAULT_PAGE_SIZE, 'rh_only': True}
                    success, cve_pages = paging(VMAAS_CVES_ENDPOINT, cve_json)
                    if not success:
                        return success
                    cves = cve_pages['cve_list']
                    _, to_update, _ = process_cve_list(cves, cves_in_db, impact_id_map)
                    update_cves(cur, to_update)

            conn.commit()
            LOGGER.info('Finished syncing CVE metadata')
            return success
def sync_cve_md(webhook_queue):  # pylint: disable=too-many-branches, too-many-statements
    """Sync all CVE metadata from VMaaS"""
    LOGGER.info('Syncing CVE metadata')
    with DatabasePoolConnection() as conn:
        with conn.cursor() as cur:
            impact_id_map = {}
            cur.execute("select name, id from cve_impact")
            for impact_name, impact_id in cur.fetchall():
                impact_id_map[impact_name] = impact_id
            cur.execute('select cve from cve_metadata')
            cves_in_db = []
            for cve_tuple in cur.fetchall():
                cves_in_db.append(cve_tuple[0])
            cvss_buckets = {'0to3': 0, '3to7': 0, '7to10': 0}
            cve_json = {'cve_list': [".*"], 'page': 1, 'page_size': DEFAULT_PAGE_SIZE, 'rh_only': True}
            success, cve_pages = paging(VMAAS_CVES_ENDPOINT, cve_json)
            if not success:
                return success
            cves = cve_pages['cve_list']
            LOGGER.info("Importing CVE metadata")
            to_insert = []
            to_update = []
            to_delete = []
            for cve in cves:
                description = cves[cve]['description']
                impact_id = impact_id_map[cves[cve]['impact']]
                public_date = cves[cve]['public_date'] or None
                modified_date = cves[cve]['modified_date'] or None
                cvss3_score = float(cves[cve]['cvss3_score']) if cves[cve].get('cvss3_score') else None
                cvss3_metrics = cves[cve].get('cvss3_metrics')
                cvss2_score = float(cves[cve]['cvss2_score']) if cves[cve].get('cvss2_score') else None
                cvss2_metrics = cves[cve].get('cvss2_metrics')
                redhat_url = cves[cve].get('redhat_url', None)
                secondary_url = cves[cve].get('secondary_url', None)
                row = (cve, description, impact_id, public_date, modified_date, cvss3_score, cvss3_metrics,
                       cvss2_score, cvss2_metrics, redhat_url, secondary_url)
                if cve not in cves_in_db:
                    to_insert.append(row)
                    cvss_score = cvss3_score if cvss3_score is not None else cvss2_score
                    if cvss_score is None:
                        pass
                    elif cvss_score < 3:
                        cvss_buckets['0to3'] += 1
                    elif 3 <= cvss_score < 7:
                        cvss_buckets['3to7'] += 1
                    elif 7 <= cvss_score <= 10:
                        cvss_buckets['7to10'] += 1
                else:
                    to_update.append(row)
            to_delete = [(cve,) for cve in cves_in_db if cve not in cves]
            if to_insert:
                execute_values(cur, """insert into cve_metadata
                                        (cve, description, impact_id, public_date, modified_date,
                                        cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics, redhat_url,
                                        secondary_url)
                                        values %s""", to_insert, page_size=len(to_insert))
            if to_update:
                execute_values(cur, """update cve_metadata set description = data.description,
                                        impact_id = data.impact_id,
                                        public_date = cast(data.public_date as timestamp with time zone),
                                        modified_date = cast(data.modified_date as timestamp with time zone),
                                        cvss3_score = cast(data.cvss3_score as numeric),
                                        cvss3_metrics = data.cvss3_metrics,
                                        cvss2_score = cast(data.cvss2_score as numeric),
                                        cvss2_metrics = data.cvss2_metrics,
                                        redhat_url = data.redhat_url,
                                        secondary_url = data.secondary_url
                                        from (values %s) as data
                                        (cve, description, impact_id, public_date, modified_date,
                                        cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics, redhat_url,
                                        secondary_url)
                                        where cve_metadata.cve = data.cve""",
                               to_update, page_size=len(to_update))
            if to_delete:
                LOGGER.info("Deleting %s unnecessary CVE metadata", len(to_delete))
                execute_values(cur, """select cm.cve from cve_metadata cm
                                       where not exists(select sv.cve_id from system_vulnerabilities sv
                                       where cm.id = sv.cve_id)
                                       and cm.cve in (%s)""",
                               to_delete, page_size=len(to_delete))
                safety_delete = [cve_name_tuple[0] for cve_name_tuple in cur.fetchall()]
                unable_to_delete = [cve for cve in zip(*to_delete).__next__() if cve not in safety_delete]
                if safety_delete:
                    execute_values(cur, """delete from cve_account_data
                                           where cve_id in (select id from cve_metadata
                                           where cve in (%s))""",
                                   [i for i in zip(safety_delete)], page_size=len(safety_delete))
                    execute_values(cur, """delete from cve_metadata where cve in (%s)""",
                                   [i for i in zip(safety_delete)], page_size=len(safety_delete))
                    LOGGER.info('Finished deleting unnecessary CVE metadata')
                if unable_to_delete:
                    LOGGER.warning(
                        'Unable to delete %s cves (still referenced from system_vulnerabilities table): %s',
                        len(unable_to_delete), str(unable_to_delete))

            LOGGER.info("Finished importing CVE metadata (page: %s, page_size: %s, pages: %s)",
                        cve_pages['page'], cve_pages['page_size'], cve_pages['pages'])
            msgs = []
            time = dt.datetime.utcnow().isoformat()
            for level in cvss_buckets:
                if cvss_buckets[level] > 0:
                    msg = {
                        'application': 'vulnerability',
                        'event_type': 'new-cve',
                        'level': level,
                        'message': 'Discovered %s new CVEs with cvss score within %s radius' % (cvss_buckets[level],
                                                                                                level),
                        'timestamp': time,
                    }
                    msgs.append(msg)
            if msgs:
                cur.execute('select name from rh_account')
                for rh_account in cur.fetchall():
                    for msg in msgs:
                        msg.update({'account_id': rh_account[0]})
                        webhook_queue.send(msg)
            conn.commit()
            LOGGER.info('Finished syncing CVE metadata')
            return success
def sync_cve_md():  # pylint: disable=too-many-branches, too-many-statements
    """Sync all CVE metadata from VMaaS"""
    LOGGER.info('Syncing CVE metadata')
    with DatabasePoolConnection() as conn:
        with conn.cursor() as cur:
            impact_id_map = {}
            cur.execute("select name, id from cve_impact")
            for impact_name, impact_id in cur.fetchall():
                impact_id_map[impact_name] = impact_id
            cur.execute('select cve from cve_metadata')
            cves_in_db = []
            for cve_tuple in cur.fetchall():
                cves_in_db.append(cve_tuple[0])
            cve_json = {
                'cve_list': [".*"],
                'page': 1,
                'page_size': DEFAULT_PAGE_SIZE,
                'rh_only': True
            }
            success, cve_pages = paging(VMAAS_CVES_ENDPOINT, cve_json)
            if not success:
                return success
            cves = cve_pages['cve_list']
            LOGGER.info("Importing CVE metadata")
            to_insert = []
            to_update = []
            to_delete = []
            for cve in cves:
                description = cves[cve]['description']
                impact_id = impact_id_map[cves[cve]['impact']]
                public_date = cves[cve]['public_date'] or None
                modified_date = cves[cve]['modified_date'] or None
                cvss3_score = float(cves[cve]['cvss3_score']) if cves[cve].get(
                    'cvss3_score') else None
                cvss3_metrics = cves[cve].get('cvss3_metrics')
                cvss2_score = float(cves[cve]['cvss2_score']) if cves[cve].get(
                    'cvss2_score') else None
                cvss2_metrics = cves[cve].get('cvss2_metrics')
                redhat_url = cves[cve].get('redhat_url', None)
                secondary_url = cves[cve].get('secondary_url', None)
                row = (cve, description, impact_id, public_date, modified_date,
                       cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics,
                       redhat_url, secondary_url)
                if cve not in cves_in_db:
                    to_insert.append(row)
                else:
                    to_update.append(row)
            to_delete = [(cve, ) for cve in cves_in_db if cve not in cves]
            if to_insert:
                execute_values(cur,
                               """insert into cve_metadata
                                        (cve, description, impact_id, public_date, modified_date,
                                        cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics, redhat_url,
                                        secondary_url)
                                        values %s""",
                               to_insert,
                               page_size=len(to_insert))
            if to_update:
                execute_values(
                    cur,
                    """update cve_metadata set description = data.description,
                                        impact_id = data.impact_id,
                                        public_date = cast(data.public_date as timestamp with time zone),
                                        modified_date = cast(data.modified_date as timestamp with time zone),
                                        cvss3_score = cast(data.cvss3_score as numeric),
                                        cvss3_metrics = data.cvss3_metrics,
                                        cvss2_score = cast(data.cvss2_score as numeric),
                                        cvss2_metrics = data.cvss2_metrics,
                                        redhat_url = data.redhat_url,
                                        secondary_url = data.secondary_url
                                        from (values %s) as data
                                        (cve, description, impact_id, public_date, modified_date,
                                        cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics, redhat_url,
                                        secondary_url)
                                        where cve_metadata.cve = data.cve""",
                    to_update,
                    page_size=len(to_update))
            if to_delete:
                LOGGER.info("Deleting %s unnecessary CVE metadata",
                            len(to_delete))
                execute_values(cur,
                               """select cm.cve from cve_metadata cm
                                       where not exists(select sv.cve_id from system_vulnerabilities sv
                                       where cm.id = sv.cve_id)
                                       and cm.cve in (%s)""",
                               to_delete,
                               page_size=len(to_delete))
                safety_delete = [
                    cve_name_tuple[0] for cve_name_tuple in cur.fetchall()
                ]
                unable_to_delete = [
                    cve for cve in zip(*to_delete).__next__()
                    if cve not in safety_delete
                ]
                if safety_delete:
                    execute_values(cur,
                                   """delete from cve_account_data
                                           where cve_id in (select id from cve_metadata
                                           where cve in (%s))""",
                                   [i for i in zip(safety_delete)],
                                   page_size=len(safety_delete))
                    execute_values(
                        cur,
                        """delete from cve_metadata where cve in (%s)""",
                        [i for i in zip(safety_delete)],
                        page_size=len(safety_delete))
                    LOGGER.info('Finished deleting unnecessary CVE metadata')
                if unable_to_delete:
                    LOGGER.warning(
                        'Unable to delete %s cves (still referenced from system_vulnerabilities table): %s',
                        len(unable_to_delete), str(unable_to_delete))

            LOGGER.info(
                "Finished importing CVE metadata (page: %s, page_size: %s, pages: %s)",
                cve_pages['page'], cve_pages['page_size'], cve_pages['pages'])
            conn.commit()
            LOGGER.info('Finished syncing CVE metadata')
            return success