def db_import_system(upload_data, vmaas_json: str, repo_list: list): """Import initial system record to the DB, report back on what we did.""" status = ImportStatus.FAILED with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: host = upload_data["host"] import_status, system_id = db_import_system_platform( cur, host['id'], host['account'], upload_data['platform_metadata']['url'], host.get("display_name"), host.get('stale_timestamp'), host.get('stale_warning_timestamp'), host.get('culled_timestamp'), vmaas_json) if import_status is None: return status status |= import_status db_import_repos(cur, repo_list) db_import_system_repos(cur, repo_list, system_id) db_delete_other_system_repos(cur, repo_list, system_id) conn.commit() status -= ImportStatus.FAILED except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error importing system: ") FailedCache.push(FailedCache.upload_cache, upload_data) LOGGER.info("Remembered upload %s", str(upload_data)) conn.rollback() return status
def delete_cve(cve_names: str) -> bool: """Delete CVEs""" if not cve_names: LOGGER.info('Need to specify CVE') return False cve_list = cve_names.split(',') LOGGER.info('Deleting %s CVE metadata', len(cve_list)) with DatabasePoolConnection() as conn: with conn.cursor() as cur: success = True execute_values(cur, """select id from cve_metadata where cve in (%s)""", [cve for cve in zip(cve_list)], page_size=len(cve_list)) cve_ids_to_delete = cur.fetchall() if cve_ids_to_delete: execute_values( cur, """delete from cve_account_data where cve_id in (%s)""", cve_ids_to_delete, page_size=len(cve_ids_to_delete)) execute_values( cur, """delete from system_vulnerabilities where cve_id in (%s)""", cve_ids_to_delete, page_size=len(cve_ids_to_delete)) execute_values(cur, """delete from cve_metadata where id in (%s)""", cve_ids_to_delete, page_size=len(cve_ids_to_delete)) conn.commit() LOGGER.info('Finished deleting CVE metadata') return success
async def re_evaluate_systems(self, repo_based: bool): """Schedule re-evaluation for all systems in DB.""" with DatabasePoolConnection() as conn: if repo_based: updated_repos = self._get_updated_repos(conn) with NamedCursor(conn) as cur: if repo_based: LOGGER.info("Re-evaluating in repo-based mode") self.select_repo_based_inventory_ids(cur, updated_repos) else: LOGGER.info("Re-evaluating all systems") self.select_all_inventory_ids(cur) total_scheduled = 0 while True: await RE_EVALUATION_KAFKA_BATCH_SEMAPHORE.acquire() rows = cur.fetchmany(size=RE_EVALUATION_KAFKA_BATCH_SIZE) if not rows: RE_EVALUATION_KAFKA_BATCH_SEMAPHORE.release() break msgs = [{"type": "re-evaluate_system", "host": {"id": inventory_id}} for inventory_id, in rows] total_scheduled += len(msgs) future = self.evaluator_queue.send_list(msgs) future.add_done_callback(lambda x: RE_EVALUATION_KAFKA_BATCH_SEMAPHORE.release()) LOGGER.info("%s systems scheduled for re-evaluation", total_scheduled) conn.commit()
def db_import_system(inventory_id: str, rh_account: str, s3_url: str, vmaas_json: str, repo_list: list): """Import initial system record to the DB, report back on what we did.""" status = ImportStatus.FAILED with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: import_status, system_id = db_import_system_platform( cur, inventory_id, rh_account, s3_url, vmaas_json) if import_status is None: return status status |= import_status db_import_repos(cur, repo_list) db_import_system_repos(cur, repo_list, system_id) db_delete_other_system_repos(cur, repo_list, system_id) conn.commit() status -= ImportStatus.FAILED except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error importing system: ") conn.rollback() return status
def _mark_evaluated(inventory_id: str): with DatabasePoolConnection() as conn: with conn.cursor() as cur: cur.execute( """update system_platform set last_evaluation = CURRENT_TIMESTAMP where inventory_id = %s""", (inventory_id, )) conn.commit()
def test_rule_not_changing_cves(self, pg_db_conn, cleanup, inv_id, cve_id, rule_id, cve_name): # pylint: disable=unused-argument """Inserts CVE which is tied to inactive rule, nothing shall happen""" with DatabasePool(2): with DatabasePoolConnection() as conn: with conn.cursor() as cur: system = deepcopy(SYSTEM_DICT) system['inventory_id'] = inv_id rule_hits = { cve_id: { 'id': rule_id, 'details': '{"detail_key": "detail_value"}', 'cve_name': cve_name } } orig_cve_count_cache = self._system_cache(cur, inv_id) orig_caches = self._account_caches(cur) db_import_system(system, rule_hits) new_cve_count_cache = self._system_cache(cur, inv_id) new_caches = self._account_caches(cur) assert orig_cve_count_cache == new_cve_count_cache assert orig_caches == new_caches assert all([ self._cache_check(cur, account_id) for account_id in ('0', '1', '2') ])
def db_import_rule(rule_id: str, rule_cves: list): """Import single error key into database""" with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: cur.execute( """INSERT INTO insights_rule (name) VALUES(%s) ON CONFLICT (name) DO UPDATE SET name = %s RETURNING id AS inserted""", ((rule_id, ), rule_id)) inserted = cur.fetchone() RULES_CACHE[rule_id] = inserted to_insert = [] for rule_cve in rule_cves: if rule_cve not in CVES_CACHE: db_import_cve(rule_cve) to_insert.append((inserted, CVES_CACHE[rule_cve])) execute_values( cur, """INSERT INTO cve_rule_mapping (rule_id, cve_id) VALUES %s ON CONFLICT DO NOTHING""", to_insert, page_size=len(to_insert)) conn.commit() except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error updating rules cache: ") conn.rollback()
def process_upload_or_re_evaluate(self, msg_dict: dict, loop=None): """ Process function to upload new file or re-evaluate system """ with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: LOGGER.info("Received message type: %s", msg_dict['type']) # Lock the system for processing cur.execute("""SELECT id, inventory_id, vmaas_json, rh_account_id, opt_out FROM system_platform WHERE inventory_id = %s FOR UPDATE""", (msg_dict['host']['id'],)) system_platform = cur.fetchone() if system_platform is not None: self.evaluate_vmaas(system_platform, cur, loop=loop) conn.commit() if msg_dict['type'] == 'upload_new_file': send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'success', loop=loop) else: INV_ID_NOT_FOUND.inc() LOGGER.error("System with inventory_id not found in DB: %s", msg_dict['host']['id']) send_msg_to_payload_tracker(PAYLOAD_TRACKER_PRODUCER, msg_dict, 'error', status_msg='System with inventory_id not found in DB: %s' % msg_dict['host']['id'], loop=loop) except DatabaseError: LOGGER.exception("Unable to store data: ") FailedCache.push(FailedCache.upload_cache, msg_dict) LOGGER.info("Remembered failed upload: %s", str(msg_dict)) conn.rollback()
def db_update_system(msg_dict): """Update system with inventory ID.""" rtrn = {'updated': False, 'failed': True} with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: cur.execute( """UPDATE system_platform SET display_name = %s WHERE inventory_id = %s RETURNING id AS updated""", ( msg_dict['host']['display_name'], msg_dict['host']['id'], )) updated = cur.fetchone() rtrn['updated'] = bool(updated) conn.commit() rtrn['failed'] = False except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error updating system: ") conn.rollback() return rtrn
def test_rule_adding_cve(self, pg_db_conn, cleanup, cve_id, rule_id, cve_name): # pylint: disable=unused-argument """ Tests adding a rule which adds one CVE to a system In some we are removing other rules from a system, but the rules in this tests were chosen so they're either not active or not VMaaS mitigated """ with DatabasePool(2): with DatabasePoolConnection() as conn: with conn.cursor() as cur: system = deepcopy(SYSTEM_DICT) system['inventory_id'] = 'INV-4' rule_hits = { cve_id: { 'id': rule_id, 'details': '{"detail_key": "detail_value"}', 'cve_name': cve_name } } orig_cve_count_cache = self._system_cache(cur, 'INV-4') orig_caches = self._account_caches(cur) db_import_system(system, rule_hits) new_cve_count_cache = self._system_cache(cur, 'INV-4') new_caches = self._account_caches(cur) self._test_counts(orig_cve_count_cache, new_cve_count_cache, orig_caches, new_caches, 1, cve_id) assert all([ self._cache_check(cur, account_id) for account_id in ('0', '1', '2') ])
def db_delete_system(inventory_id): """Delete system with inventory ID.""" rtrn = {'deleted': False, 'failed': True} with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: curr_time = datetime.now(tz=pytz.utc) cur.execute( """INSERT INTO deleted_systems (inventory_id, when_deleted) VALUES (%s, %s) ON CONFLICT (inventory_id) DO UPDATE SET when_deleted = EXCLUDED.when_deleted """, ( inventory_id, curr_time, )) cur.execute( """DELETE FROM deleted_systems WHERE when_deleted < %s """, (curr_time - timedelta(hours=SYSTEM_DELETION_THRESHOLD), )) cur.execute( """SELECT deleted_inventory_id FROM delete_system(%s)""", (inventory_id, )) system_platform = cur.fetchone() if system_platform is not None: rtrn['deleted'] = True conn.commit() rtrn['failed'] = False except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error deleting system: ") conn.rollback() return rtrn
def upgrade(self): """perform database upgrade""" with DatabasePool(1): with DatabasePoolConnection() as conn: try: self._get_db_lock(conn) db_version = self._get_current_db_version(conn) if db_version == self.version_max: LOGGER.info('Database is up to date at version: %d', db_version) return if db_version > self.version_max: msg = 'Database version %d is greater than upgrade version %d' % ( db_version, self.version_max) LOGGER.warning(msg) return LOGGER.info( 'Database requires upgrade from version %d to %d', db_version, self.version_max) upgrades_to_apply = self._get_upgrades_to_apply( db_version, self.version_max) for upgrade in upgrades_to_apply: self._apply_upgrade(upgrade['ver'], upgrade['script'], conn) finally: self._release_db_lock(conn)
def test_mitigation_to_hit(self, pg_db_conn, cleanup): # pylint: disable=unused-argument """Test replacing rule which has mitigation and is actvie with another one which is inactive but has mitigation""" with DatabasePool(2): with DatabasePoolConnection() as conn: with conn.cursor() as cur: inv_id = 'INV-17' system = deepcopy(SYSTEM_DICT) system['inventory_id'] = inv_id orig_cve_count_cache = self._system_cache(cur, inv_id) orig_caches = self._account_caches(cur) db_import_system( system, { 1: { 'id': 1, 'mitigation_reason': 'SELinux mitigates', 'cve_name': 'CVE-2014-1' }, 9: { 'id': 4, 'details': '{"detail_key": "detail_value"}', 'cve_name': 'CVE-2018-1' } }) new_cve_count_cache = self._system_cache(cur, inv_id) new_caches = self._account_caches(cur) self._test_counts(orig_cve_count_cache, new_cve_count_cache, orig_caches, new_caches, 1, 9) assert all([ self._cache_check(cur, account_id) for account_id in ('0', '1', '2') ])
def db_delete_system(msg_dict): """Delete system with inventory ID.""" rtrn = {'deleted': False, 'failed': True} with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: rh_account_id = db_account_lookup(cur, msg_dict['account']) cur.execute( """INSERT INTO system_platform (inventory_id, rh_account_id, opt_out, stale, when_deleted) VALUES (%s, %s, true, true, now()) ON CONFLICT (inventory_id) DO UPDATE SET opt_out = EXCLUDED.opt_out, stale = EXCLUDED.stale, when_deleted = EXCLUDED.when_deleted RETURNING (xmax = 0) AS inserted""", ( msg_dict['id'], rh_account_id, )) inserted, = cur.fetchone() rtrn['deleted'] = not inserted conn.commit() rtrn['failed'] = False except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error deleting system: ") FailedCache.push(FailedCache.delete_cache, msg_dict) LOGGER.info("Remembered deleting %s", str(msg_dict)) conn.rollback() return rtrn
def process_upload_or_re_evaluate(self, msg_dict: dict, loop=None): """ Process function to upload new file or re-evaluate system """ with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: LOGGER.info("Received message type: %s", msg_dict['type']) # Lock the system for processing cur.execute( """SELECT id, inventory_id, vmaas_json, rh_account_id, opt_out FROM system_platform WHERE inventory_id = %s FOR UPDATE""", (msg_dict['inventory_id'], )) system_platform = cur.fetchone() if system_platform is not None: self.evaluate_vmaas(system_platform, cur, loop=loop) conn.commit() else: INV_ID_NOT_FOUND.inc() LOGGER.error( "System with inventory_id not found in DB: %s", msg_dict['inventory_id']) except DatabaseError: LOGGER.exception("Unable to store data: ") conn.rollback()
def _materialize_account_cache(account_id, account_name, current_cache): LOGGER.debug("Materializing cache for account '%s' started.", account_name) with DatabasePoolConnection() as conn: with conn.cursor() as cur: cur.execute("""SELECT sv.cve_id, COUNT(*) AS systems_affected, SUM(CASE WHEN sv.status_id != cad.status_id THEN 1 ELSE 0 END) AS systems_status_divergent FROM system_vulnerabilities_active sv INNER JOIN system_platform sp ON (sv.system_id = sp.id AND sp.rh_account_id = %s AND sp.opt_out = false AND sp.stale = false AND sp.when_deleted IS NULL) INNER JOIN inventory.hosts ih ON sp.inventory_id = ih.id LEFT JOIN cve_account_data cad ON (sv.cve_id = cad.cve_id AND cad.rh_account_id = %s) WHERE sv.rh_account_id = %s AND (sv.mitigation_reason IS NULL OR sv.rule_id IN (SELECT id FROM insights_rule WHERE active = false AND NOT rule_only)) AND (sv.when_mitigated IS NULL OR sv.rule_id IN (SELECT id FROM insights_rule WHERE active = true AND NOT rule_only)) GROUP BY sv.cve_id""", (account_id, account_id, account_id,)) to_insert = [] to_update = [] for cve_id, systems_affected, systems_status_divergent in cur.fetchall(): cve_cache = current_cache.get(account_id, {}).get(cve_id) if not cve_cache: to_insert.append((account_id, cve_id, systems_affected, systems_status_divergent)) else: if cve_cache[0] != systems_affected or cve_cache[1] != systems_status_divergent: to_update.append((account_id, cve_id, systems_affected, systems_status_divergent)) current_cache[account_id].pop(cve_id) to_delete = list(current_cache.get(account_id, {})) LOGGER.debug("Account '%s' - to_insert: %s", account_name, len(to_insert)) if to_insert: execute_values(cur, """INSERT INTO cve_account_cache (rh_account_id, cve_id, systems_affected, systems_status_divergent) VALUES %s""", to_insert, page_size=len(to_insert)) LOGGER.debug("Account '%s' - to_update: %s", account_name, len(to_update)) if to_update: execute_values(cur, """UPDATE cve_account_cache AS cac SET systems_affected = v.systems_affected, systems_status_divergent = v.systems_status_divergent FROM (VALUES %s) AS v(rh_account_id, cve_id, systems_affected, systems_status_divergent) WHERE v.rh_account_id = cac.rh_account_id AND v.cve_id = cac.cve_id""", to_update, page_size=len(to_update)) LOGGER.debug("Account '%s' - to_delete: %s", account_name, len(to_delete)) if to_delete: cur.execute("""DELETE FROM cve_account_cache WHERE rh_account_id = %s AND cve_id IN %s""", (account_id, tuple(to_delete))) cur.execute("""UPDATE rh_account SET cve_cache_from = now() WHERE id = %s""", (account_id,)) conn.commit() LOGGER.info("Materializing cache for account '%s' finished.", account_name)
def db_import_system(system_data: dict, rule_hits: dict): """Import results from advisor into DB""" with DatabasePoolConnection() as conn: with conn.cursor() as cur: rh_account_id, system_id, opt_out = db_import_system_platform(cur, system_data) if system_id is None: return db_import_rule_hits(cur, rh_account_id, system_id, opt_out, rule_hits) conn.commit()
def test_import_system_platform(self, pg_db_conn): # pylint: disable=unused-argument """Test insertion of system data""" with DatabasePool(1): with DatabasePoolConnection() as conn: with conn.cursor() as cur: system = deepcopy(SYSTEM_DICT) system['inventory_id'] = 'INV-111' db_import_system_platform(cur, system) # import new system db_import_system_platform(cur, system) # update recently imported system
def db_import_system(inventory_id, rh_account, s3_url, vmaas_json, satellite_managed): """Import initial system record to the DB, report back on what we did.""" rtrn = { 'inserted': False, 'updated': False, 'changed': False, 'failed': True } with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: unchanged_since = None json_checksum = hashlib.sha256( vmaas_json.encode('utf-8')).hexdigest() curr_time = datetime.now(tz=pytz.utc) # xmax is PG system column used to find out if row was inserted or updated cur.execute( """INSERT INTO system_platform (inventory_id, rh_account, s3_url, vmaas_json, json_checksum, satellite_managed) VALUES (%s, %s, %s, %s, %s, %s) ON CONFLICT (inventory_id) DO UPDATE SET rh_account = %s, s3_url = %s, vmaas_json = %s, json_checksum = %s, satellite_managed = %s RETURNING (xmax = 0) AS inserted, unchanged_since """, ( inventory_id, rh_account, s3_url, vmaas_json, json_checksum, satellite_managed, rh_account, s3_url, vmaas_json, json_checksum, satellite_managed, )) rtrn['inserted'], unchanged_since = cur.fetchone() conn.commit() # If inserting, or if unchanged_since is newer-than 'now', we want to evaluate this upload rtrn['changed'] = rtrn['inserted'] or (unchanged_since > curr_time) if rtrn['inserted']: NEW_SYSTEM.inc() else: rtrn['updated'] = True UPDATE_SYSTEM.inc() rtrn['failed'] = False except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error importing system: ") conn.rollback() return rtrn
def run(self): if self.debug: print("running...\n") with DatabasePoolConnection() as conn: self._report_cve_status_usage(conn) print(" ") self._report_system_cve_status_usage(conn) print(" ") self._report_cve_business_risk_usage(conn) conn.commit()
def db_import_system(system_data: dict, rule_hits: dict, loop=None): """Import results from advisor into DB""" with DatabasePoolConnection() as conn: with conn.cursor() as cur: rh_account_id, system_id, opt_out, stale = db_import_system_platform(cur, system_data) if system_id is None: conn.rollback() return db_import_rule_hits(cur, rh_account_id, system_data['inventory_id'], system_id, opt_out, stale, rule_hits, loop) conn.commit()
def run(self): """Run the metrics gathering.""" if self.debug: print("running...\n") with DatabasePoolConnection() as conn: cve_status_usage = self.query_cve_status_usage(conn) system_cve_status_usage = self.query_system_cve_status_usage(conn) cve_business_risk_usage = self.query_cve_business_risk_usage(conn) conn.commit() raw_data = { "cve_status_usage": cve_status_usage, "system_cve_status_usage": system_cve_status_usage, "cve_business_risk_usage": cve_business_risk_usage } today_key = self.date_to_key(self.today) key_list = self.bucket_mgr.list_keys() if today_key in key_list: # replace existing day's data with that just gathered # in case something went wrong the first time around. print("Replacing %s data" % today_key) self.bucket_mgr.delete_by_key(today_key) key_list.remove(today_key) self.bucket_mgr.upload_data(today_key, raw_data) if self.today.day == 1: # its day one, so do monthly report. # Compare to first day of previous month. if self.today.month == 1: previous_date = datetime.date(self.today.year - 1, 12, 1) else: previous_date = datetime.date(self.today.year, self.today.month - 1, 1) else: # Not monthly report, so compare with first of # current month to see how this month is doing previous_date = datetime.date(self.today.year, self.today.month, 1) previous_key = self.date_to_key(previous_date) if previous_key in key_list: previous_data = self.bucket_mgr.retrieve_data(previous_key) else: previous_data = { "cve_status_usage": [], "system_cve_status_usage": [], "cve_business_risk_usage": [] } results = self.compare_data(previous_data, raw_data) self.send_results(results) keys_to_delete = [] for key in key_list: if self.key_to_date(key).day != 1: keys_to_delete.append(key) if keys_to_delete: if self.debug: print("Clearing unnecessary data from bucket") self.bucket_mgr.delete_by_key_list(keys_to_delete)
def _materialize_caches(account_id, account_name, current_cache): with DatabasePoolConnection() as conn: with conn.cursor() as cur: _materialize_cve_cache(cur, account_id, account_name, current_cache) _materialize_rule_cache(cur, account_id, account_name, current_cache) cur.execute( """UPDATE rh_account SET cve_cache_from = now() WHERE id = %s""", (account_id, )) conn.commit()
def test_db_metrics(pg_db_conn, monkeypatch): # pylint: disable=unused-argument) """Test gathering of DB""" with DatabasePoolConnection() as conn: monkeypatch.setattr(dm, 'get_conn', lambda: conn) dm.run() assert dm.METRIC_SYSTEMS.collect( )[0].samples[0].value == 20 # there are 20 systems in DB assert dm.METRIC_CYNDI_SYSTEMS.collect( )[0].samples[0].value == 20 # there are also 20 systems syndicated assert len(dm.METRIC_TABLE_SIZE.collect()[0].samples) == 277
def db_init_repo_cache(): """Populate initial repo cache""" with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: cur.execute("""SELECT id, name FROM repo""") for repo_id, repo_name in cur.fetchall(): REPO_ID_CACHE[repo_name] = repo_id except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error caching repos: ") conn.rollback()
def db_import_cve(cve: str): """Import missing CVE metadata into database""" with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: cur.execute("""INSERT INTO cve_metadata (cve, description, impact_id) VALUES %s ON CONFLICT (cve) DO UPDATE SET cve = %s RETURNING id AS inserted""", ((cve, 'unknown', 0,), cve)) conn.commit() inserted = cur.fetchone() CVES_CACHE[cve] = inserted[0] except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error during inserting CVE: ")
def validate_system_inventory(inventory_id: str, timestamp: str) -> bool: """Check if system is still valid in inventory, since messsages are coming also from insights-engine. Validity is only checked for systems which message is older then CFG.listeners_valid_system_sec seconds.""" if _is_system_msg_recent(inventory_id, timestamp): return True with DatabasePoolConnection() as conn: with conn.cursor() as cur: cur.execute( """SELECT true FROM inventory.hosts AS ih WHERE ih.id = %s LIMIT 1""", (inventory_id, )) res = bool(cur.fetchone()) return res
def ensure_minimal_schema_version(): """Ensure that database schema is up-to-date, wait if it's not.""" with DatabasePool(1): with DatabasePoolConnection() as conn: with conn.cursor() as cur: while True: cur.execute("SELECT version FROM db_version WHERE name = 'schema_version'") current_schema = int(cur.fetchone()[0]) if current_schema >= MINIMAL_SCHEMA: LOGGER.info("Current schema version: %s, minimal required: %s, OK", current_schema, MINIMAL_SCHEMA) return LOGGER.warning("Current schema version: %s, minimal required: %s, waiting...", current_schema, MINIMAL_SCHEMA) sleep(10)
def test_sync(pg_db_conn, monkeypatch): # pylint: disable=unused-argument """Test rules sync""" tmpdir = path.join(Path(__file__).resolve().parent, 'data') monkeypatch.setattr(rgs, 'CONTENT_GIT_NAME', 'insights-content-vulnerability') monkeypatch.setattr(rgs, 'PLAYBOOKS_GIT_NAME', 'insights-playbooks') with DatabasePoolConnection() as conn: monkeypatch.setattr(rgs, 'get_conn', lambda: conn) rgs.sync(tmpdir, 'xxee', 'eexx') cur = pg_db_conn.cursor() cur.execute( """SELECT id, name, active, rule_only FROM insights_rule where name like 'CVE_123_456%' ORDER BY NAME""" ) rows = cur.fetchall() assert len(rows) == 3 assert rows[0][1] == 'CVE_123_456' assert rows[0][2] == True # pylint: disable=singleton-comparison assert rows[0][3] == True # pylint: disable=singleton-comparison assert rows[1][1] == 'CVE_123_456|CVE_123_456_DISABLED' assert rows[1][2] == False # pylint: disable=singleton-comparison assert rows[1][3] == False # pylint: disable=singleton-comparison assert rows[2][1] == 'CVE_123_456|CVE_123_456_ENABLED' assert rows[2][2] == True # pylint: disable=singleton-comparison assert rows[2][3] == False # pylint: disable=singleton-comparison cur.execute( """SELECT cve, celebrity_name FROM cve_metadata WHERE cve = 'CVE-123-456'""" ) row = cur.fetchone() assert row is not None assert row[1] == 'test1' cur.execute( """SELECT cve, celebrity_name FROM cve_metadata WHERE cve = 'CVE-2018-6'""" ) row = cur.fetchone() assert row is not None assert row[1] is None cur.close()
def db_import_rule(rule_id: str): """Import single error key into database""" with DatabasePoolConnection() as conn: with conn.cursor() as cur: try: cur.execute("""INSERT INTO insights_rule (name) VALUES(%s) ON CONFLICT (name) DO UPDATE SET name = %s RETURNING id AS inserted""", ((rule_id,), rule_id)) conn.commit() inserted = cur.fetchone() RULES_CACHE[rule_id] = inserted except DatabaseError: DATABASE_ERROR.inc() LOGGER.exception("Error updating rules cache: ") conn.rollback()