Ejemplo n.º 1
0
    def __init__(self, kafka_topics_in):
        # connect to the Messaging Service
        self.consumer = mqueue.MQReader(kafka_topics_in) # [kafka_evaluator_topic]

        LOGGER.info("Using BOOTSTRAP_SERVERS: %s", mqueue.BOOTSTRAP_SERVERS)
        LOGGER.info("Using GROUP_ID: %s", mqueue.GROUP_ID)
        LOGGER.info("Using TOPICS: %s", ", ".join(kafka_topics_in))

        self.producer = mqueue.MQWriter(kafka_evaluator_topic)

        # get DB connection
        init_db()
        self.conn = DatabaseHandler.get_connection()
        self.session = requests.Session()
Ejemplo n.º 2
0
MISSING_ID = Counter('ve_listener_upl_missing_inventory_id',
                     '# of upload-msgs missing inventory_id')
ARCHIVE_PARSE_FAILURE = Counter('ve_listener_upl_archive_exceptions',
                                '# of exceptions during archive-processing')
ARCHIVE_RETRIEVE_ATTEMPT = Counter('ve_listener_upl_archive_tgz_attempt',
                                   '# of times retried archive-retrieval')
ARCHIVE_RETRIEVE_FAILURE = Counter('ve_listener_upl_archive_tgz_failure',
                                   '# of times gave up on archive retrieval')
ARCHIVE_RETRIEVE_INVALID_HTTP = Counter(
    've_listener_upl_archive_tgz_invalid_http',
    '# archive downloaded with invalid http code')
ARCHIVE_NO_RPMDB = Counter('ve_listener_upl_no_rpmdb',
                           '# of systems ignored due to missing rpmdb')

# kafka clients
UPLOAD_QUEUE = mqueue.MQReader(mqueue.UPLOAD_TOPIC)
EVALUATOR_QUEUE = mqueue.MQWriter(mqueue.EVALUATOR_TOPIC)


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info("Signal received, stopping kafka consumers.")
    await UPLOAD_QUEUE.stop()
    await EVALUATOR_QUEUE.stop()
    loop.stop()


def on_thread_done(future):
    """Callback to call after ThreadPoolExecutor worker finishes."""
    try:
        future.result()
                              '# of message parse errors')
NEW_REPO = Counter('ve_listener_upl_new_repo', '# of new repos inserted')
NEW_RH_ACCOUNT = Counter('ve_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
NEW_SYSTEM_REPO = Counter('ve_listener_upl_new_system_repo',
                          '# of new system_repo pairs inserted')
DELETED_SYSTEM_REPO = Counter('ve_listener_upl_system_repo_deleted',
                              '# deleted system_repo pairs')
INVALID_IDENTITY = Counter('ve_listener_upl_invalid_identity',
                           '# of skipped uploads because of invalid identity')
MISSING_SMART_MANAGEMENT = Counter(
    've_listener_upl_non_smart_management',
    '# of skipped uploads because of entitlement check')

# kafka clients
LISTENER_QUEUE = mqueue.MQReader([mqueue.UPLOAD_TOPIC, mqueue.EVENTS_TOPIC])
EVALUATOR_QUEUE = mqueue.MQWriter(mqueue.EVALUATOR_TOPIC)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(mqueue.PAYLOAD_TRACKER_TOPIC)

# caching repo names to id
REPO_ID_CACHE = {}

REQUIRED_UPLOAD_MESSAGE_FIELDS = {
    "host": ["id", "account"],
    "platform_metadata": ["b64_identity", "url"],
    "timestamp": [],
    "type": []
}
REQUIRED_EVENT_MESSAGE_FIELDS = {"id": [], "type": []}

Ejemplo n.º 4
0
                           '# of systems uploaded after being deleted')
NEW_RH_ACCOUNT = Counter('ve_advisor_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
INVALID_INSIGHTS_ACC = Counter('ve_advisor_listener_invalid_insights_acc',
                               '# of non-insights messages')
UPLOAD_NO_RESULTS = Counter(
    've_advisor_listener_upl_no_result',
    '# of systems ignored due to missing reports and passes')
UNCHANGED_SYSTEM = Counter(
    've_advisor_listener_upl_unchanged_system',
    '# of system-updates with same advisor results info')
DELETED_SYSTEM_FROM_INVENTORY = Counter(
    've_advisor_listener_deleted_from_inventory',
    '# of systems which are already deleted from inventory')

ADVISOR_QUEUE = mqueue.MQReader([CFG.advisor_results_topic])
REMEDIATIONS_PRODUCER = mqueue.MQWriter(CFG.remediation_updates_topic)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(CFG.payload_tracker_topic)

RULE_BLACKLIST = [
    'CVE_2017_5715_cpu_virt|VIRT_CVE_2017_5715_CPU_3_ONLYKERNEL',
    'CVE_2017_5715_cpu_virt'
]

RULES_CACHE = {}
CVES_CACHE = {}

REQUIRED_MESSAGE_FIELDS = {"input": [{"host": [("insights_id", False)]}]}

PAYLOAD_TRACKER_SERVICE = "vulnerability-rules"
DB_POOL: Optional[asyncpg.pool.Pool] = None
Ejemplo n.º 5
0
VMAAS_COUNT = Counter('ve_evaluator_vmaas_calls',
                      'Number of VMaaS-evaluations attempted')
INV_ID_NOT_FOUND = Counter(
    've_evaluator_inventory_not_found',
    'Number of times inventory-id not in SystemPlatform')
UNKNOWN_MSG = Counter('ve_evaluator_unknown_msg',
                      'Number of unrecognized messages delivered from queue')
UNKNOWN_TOPIC = Counter(
    've_evaluator_unknown_topic',
    'Number of times message delivered from unsupported topic')
MESSAGE_PARSE_ERROR = Counter('ve_evaluator_message_parse_error',
                              '# of message parse errors')
VMAAS_ERRORS_SKIP = Counter('ve_evaluator_vmaas_errors_skip',
                            '# of evaluations skipped due to VMaaS errors')

CONSUMER_QUEUE = mqueue.MQReader(CFG.evaluator_topics)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(CFG.payload_tracker_topic)
REMEDIATIONS_PRODUCER = mqueue.MQWriter(CFG.remediation_updates_topic)

MAIN_LOOP = asyncio.get_event_loop()
MAX_MESSAGES_SEMAPHORE = asyncio.BoundedSemaphore(
    CFG.max_loaded_evaluator_msgs)


async def _load_cves_for_inventory_id(rh_account_id, system_id, conn):
    system_cves_map = {}
    async for record in conn.cursor(
            """select cm.id, cm.cve, sv.when_mitigated, sv.mitigation_reason, ir.active, ir.playbook_count
                                         from system_vulnerabilities sv
                                         join cve_metadata cm on sv.cve_id = cm.id
                                         left outer join insights_rule ir on sv.rule_id = ir.id
                              '# of message parse errors')
NEW_REPO = Counter('ve_listener_upl_new_repo', '# of new repos inserted')
NEW_RH_ACCOUNT = Counter('ve_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
NEW_SYSTEM_REPO = Counter('ve_listener_upl_new_system_repo',
                          '# of new system_repo pairs inserted')
DELETED_SYSTEM_REPO = Counter('ve_listener_upl_system_repo_deleted',
                              '# deleted system_repo pairs')
INVALID_IDENTITY = Counter('ve_listener_upl_invalid_identity',
                           '# of skipped uploads because of invalid identity')
MISSING_INSIGHTS_ENTITLEMENT = Counter(
    've_listener_upl_non_insights_entitlement',
    '# of skipped uploads because of entitlement check')

# kafka clients
LISTENER_QUEUE = mqueue.MQReader([CFG.events_topic])
EVALUATOR_QUEUE = mqueue.MQWriter(CFG.evaluator_upload_topic)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(CFG.payload_tracker_topic)

WORKER_THREADS = CFG.worker_threads or 30

# caching repo names to id
REPO_ID_CACHE = {}

REQUIRED_CREATED_UPDATED_MESSAGE_FIELDS = {
    "host": [
        "id", "account", "system_profile", "display_name",
        ("insights_id", False)
    ],
    "timestamp": [],
    "type": []
                           '# of system-updates with same rules hit')
MESSAGE_PARSE_ERROR = Counter('ve_advisor_listener_message_parse_error',
                              '# of message parse errors')
INVALID_IDENTITY = Counter('ve_advisor_listener_invalid_identity',
                           '# of skipped uploads because of invalid identity')
MISSING_INSIGHTS_ENTITLEMENT = Counter(
    've_advisor_listener_non_insights_entitlement',
    '# of skipped uploads because of entitlement check')
DATABASE_ERROR = Counter('ve_advisor_listener_database_error',
                         '# of database errors')
DELETED_UPLOADED = Counter('ve_advisor_listener_deleted_uploaded',
                           '# of systems uploaded after being deleted')
NEW_RH_ACCOUNT = Counter('ve_advisor_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')

ADVISOR_QUEUE = mqueue.MQReader([mqueue.ADVISOR_RESULTS_TOPIC])

RULE_BLACKLIST = ['CVE_2017_5715_cpu_virt|VIRT_CVE_2017_5715_CPU_3_ONLYKERNEL']

RULES_CACHE = {}
CVES_CACHE = {}


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info('Signal received, stopping kafka consumers.')
    await ADVISOR_QUEUE.stop()
    loop.stop()


def db_import_cve(cve: str):
Ejemplo n.º 8
0
                            'Time spent checking a system for vmaas hits')
# counts
VMAAS_COUNT = Counter('ve_evaluator_vmaas_calls',
                      'Number of VMaaS-evaluations attempted')
INV_ID_NOT_FOUND = Counter(
    've_evaluator_inventory_not_found',
    'Number of times inventory-id not in SystemPlatform')
UNKNOWN_MSG = Counter('ve_evaluator_unknown_msg',
                      'Number of unrecognized messages delivered from queue')
UNKNOWN_TOPIC = Counter(
    've_evaluator_unknown_topic',
    'Number of times message delivered from unsupported topic')
MESSAGE_PARSE_ERROR = Counter('ve_evaluator_message_parse_error',
                              '# of message parse errors')

CONSUMER_QUEUE = mqueue.MQReader(kafka_evaluator_topic)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(mqueue.PAYLOAD_TRACKER_TOPIC)


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info("Signal received, stopping kafka consumers.")
    await CONSUMER_QUEUE.stop()
    await PAYLOAD_TRACKER_PRODUCER.stop()
    loop.stop()


class QueueEvaluator:
    """ This class contains logic for the processing vulnerabilities using VMaaS.
    """
    def __init__(self):