def start(self):
     """Start websocket server."""
     # Sync CVEs always when app starts
     self.evaluator_queue = mqueue.MQWriter(mqueue.EVALUATOR_TOPIC)
     self.webhook_queue = mqueue.MQWriter(mqueue.WEBHOOKS_TOPIC)
     sync_cve_md(self.webhook_queue)
     self._websocket_reconnect()
     self.reconnect_callback = PeriodicCallback(self._websocket_reconnect,
                                                WEBSOCKET_RECONNECT_INTERVAL * 1000)
     self.reconnect_callback.start()
     self.instance.start()
 def __init__(self):
     handlers = [
         (r"/api/v1/upload/?", UploadHandler),
         (r"/api/v1/download/(.+)", DownloadHandler),
         (r"/api/v1/delete/(.+)", DeleteHandler),
     ]
     Application.__init__(self, handlers)
     self.instance = IOLoop.instance()
     self.upload_queue = mqueue.MQWriter(mqueue.UPLOAD_TOPIC, bootstrap_servers="localhost:9092")
     self.events_queue = mqueue.MQWriter(mqueue.EVENTS_TOPIC, bootstrap_servers="localhost:9092")
     self.archive_to_facts_cache = {}
 def __init__(self):
     handlers = [
         (r"/api/v1/upload/?", UploadHandler),
         (r"/api/v1/download/(.+)", DownloadHandler),
         (r"/api/v1/delete/(.+)", DeleteHandler),
         (r"/api/rbac/v1/access.+", RbacHandler),
         (r"/api/inventory/v1/hosts/(.+)/system_profile", InventoryHandler),
         (r"/api/insights/v1/rule/(.+)", InsightsRulesHandler),
     ]
     Application.__init__(self, handlers)
     self.instance = IOLoop.instance()
     self.upload_queue = mqueue.MQWriter(mqueue.UPLOAD_TOPIC, bootstrap_servers="localhost:9092")
     self.events_queue = mqueue.MQWriter(mqueue.EVENTS_TOPIC, bootstrap_servers="localhost:9092")
     self.archive_to_profile_cache = {}
     self.inventory_id_to_profile_cache = {}
Example #4
0
 async def start(self, _):
     """Start websocket server."""
     # Sync CVEs always when app starts
     VmaasSyncContext.evaluator_queue = mqueue.MQWriter(
         CFG.evaluator_recalc_topic)
     VmaasSyncContext.session = ClientSession()
     if CFG.sync_on_start:
         await SyncHandler.a_sync_cve_md()
     VmaasSyncContext.websocket_task = asyncio.ensure_future(
         self._websocket_loop())
Example #5
0
    def __init__(self, kafka_topics_in):
        # connect to the Messaging Service
        self.consumer = mqueue.MQReader(kafka_topics_in) # [kafka_evaluator_topic]

        LOGGER.info("Using BOOTSTRAP_SERVERS: %s", mqueue.BOOTSTRAP_SERVERS)
        LOGGER.info("Using GROUP_ID: %s", mqueue.GROUP_ID)
        LOGGER.info("Using TOPICS: %s", ", ".join(kafka_topics_in))

        self.producer = mqueue.MQWriter(kafka_evaluator_topic)

        # get DB connection
        init_db()
        self.conn = DatabaseHandler.get_connection()
        self.session = requests.Session()
 def __init__(self):
     handlers = [
         (r"/api/v1/upload/?", UploadHandler),
         (r"/api/v1/download/(.+)", DownloadHandler),
         (r"/api/v1/delete/(.+)", DeleteHandler),
         (r"/api/rbac/v1/access.+", RbacHandler),
         (r"/api/inventory/v1/hosts/(.+)/system_profile", InventoryHandler),
         (r"/api/insights/v1/rule/(.+)", InsightsRulesHandler),
         (r"/api/patch/v1/systems/(.+)/advisories", PatchAdvisoriesHandler),
         (r"/api/patch/v1/advisories/(.+)/systems", PatchSystemsHandler),
         (r"/api/patch/v1/views/systems/advisories", PatchViewsSystemsAdvHandler),
         (r"/v1/exploits", ExploitsHandler),
     ]
     Application.__init__(self, handlers)
     self.instance = IOLoop.instance()
     self.queue = mqueue.MQWriter(mqueue.EVENTS_TOPIC, bootstrap_servers="localhost:9092")
     self.archive_to_profile_cache = {}
     self.inventory_id_to_profile_cache = {}
Example #7
0
                     '# of upload-msgs missing inventory_id')
ARCHIVE_PARSE_FAILURE = Counter('ve_listener_upl_archive_exceptions',
                                '# of exceptions during archive-processing')
ARCHIVE_RETRIEVE_ATTEMPT = Counter('ve_listener_upl_archive_tgz_attempt',
                                   '# of times retried archive-retrieval')
ARCHIVE_RETRIEVE_FAILURE = Counter('ve_listener_upl_archive_tgz_failure',
                                   '# of times gave up on archive retrieval')
ARCHIVE_RETRIEVE_INVALID_HTTP = Counter(
    've_listener_upl_archive_tgz_invalid_http',
    '# archive downloaded with invalid http code')
ARCHIVE_NO_RPMDB = Counter('ve_listener_upl_no_rpmdb',
                           '# of systems ignored due to missing rpmdb')

# kafka clients
UPLOAD_QUEUE = mqueue.MQReader(mqueue.UPLOAD_TOPIC)
EVALUATOR_QUEUE = mqueue.MQWriter(mqueue.EVALUATOR_TOPIC)


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info("Signal received, stopping kafka consumers.")
    await UPLOAD_QUEUE.stop()
    await EVALUATOR_QUEUE.stop()
    loop.stop()


def on_thread_done(future):
    """Callback to call after ThreadPoolExecutor worker finishes."""
    try:
        future.result()
    except Exception:  # pylint: disable=broad-except
NEW_REPO = Counter('ve_listener_upl_new_repo', '# of new repos inserted')
NEW_RH_ACCOUNT = Counter('ve_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
NEW_SYSTEM_REPO = Counter('ve_listener_upl_new_system_repo',
                          '# of new system_repo pairs inserted')
DELETED_SYSTEM_REPO = Counter('ve_listener_upl_system_repo_deleted',
                              '# deleted system_repo pairs')
INVALID_IDENTITY = Counter('ve_listener_upl_invalid_identity',
                           '# of skipped uploads because of invalid identity')
MISSING_SMART_MANAGEMENT = Counter(
    've_listener_upl_non_smart_management',
    '# of skipped uploads because of entitlement check')

# kafka clients
LISTENER_QUEUE = mqueue.MQReader([mqueue.UPLOAD_TOPIC, mqueue.EVENTS_TOPIC])
EVALUATOR_QUEUE = mqueue.MQWriter(mqueue.EVALUATOR_TOPIC)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(mqueue.PAYLOAD_TRACKER_TOPIC)

# caching repo names to id
REPO_ID_CACHE = {}

REQUIRED_UPLOAD_MESSAGE_FIELDS = {
    "host": ["id", "account"],
    "platform_metadata": ["b64_identity", "url"],
    "timestamp": [],
    "type": []
}
REQUIRED_EVENT_MESSAGE_FIELDS = {"id": [], "type": []}


class ImportStatus(flags.Flags):
Example #9
0
NEW_RH_ACCOUNT = Counter('ve_advisor_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
INVALID_INSIGHTS_ACC = Counter('ve_advisor_listener_invalid_insights_acc',
                               '# of non-insights messages')
UPLOAD_NO_RESULTS = Counter(
    've_advisor_listener_upl_no_result',
    '# of systems ignored due to missing reports and passes')
UNCHANGED_SYSTEM = Counter(
    've_advisor_listener_upl_unchanged_system',
    '# of system-updates with same advisor results info')
DELETED_SYSTEM_FROM_INVENTORY = Counter(
    've_advisor_listener_deleted_from_inventory',
    '# of systems which are already deleted from inventory')

ADVISOR_QUEUE = mqueue.MQReader([CFG.advisor_results_topic])
REMEDIATIONS_PRODUCER = mqueue.MQWriter(CFG.remediation_updates_topic)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(CFG.payload_tracker_topic)

RULE_BLACKLIST = [
    'CVE_2017_5715_cpu_virt|VIRT_CVE_2017_5715_CPU_3_ONLYKERNEL',
    'CVE_2017_5715_cpu_virt'
]

RULES_CACHE = {}
CVES_CACHE = {}

REQUIRED_MESSAGE_FIELDS = {"input": [{"host": [("insights_id", False)]}]}

PAYLOAD_TRACKER_SERVICE = "vulnerability-rules"
DB_POOL: Optional[asyncpg.pool.Pool] = None
MAIN_LOOP = asyncio.get_event_loop()
NEW_REPO = Counter('ve_listener_upl_new_repo', '# of new repos inserted')
NEW_RH_ACCOUNT = Counter('ve_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
NEW_SYSTEM_REPO = Counter('ve_listener_upl_new_system_repo',
                          '# of new system_repo pairs inserted')
DELETED_SYSTEM_REPO = Counter('ve_listener_upl_system_repo_deleted',
                              '# deleted system_repo pairs')
INVALID_IDENTITY = Counter('ve_listener_upl_invalid_identity',
                           '# of skipped uploads because of invalid identity')
MISSING_INSIGHTS_ENTITLEMENT = Counter(
    've_listener_upl_non_insights_entitlement',
    '# of skipped uploads because of entitlement check')

# kafka clients
LISTENER_QUEUE = mqueue.MQReader([CFG.events_topic])
EVALUATOR_QUEUE = mqueue.MQWriter(CFG.evaluator_upload_topic)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(CFG.payload_tracker_topic)

WORKER_THREADS = CFG.worker_threads or 30

# caching repo names to id
REPO_ID_CACHE = {}

REQUIRED_CREATED_UPDATED_MESSAGE_FIELDS = {
    "host": [
        "id", "account", "system_profile", "display_name",
        ("insights_id", False)
    ],
    "timestamp": [],
    "type": []
}
MAX_QUEUE_SIZE = int(getenv('MAX_QUEUE_SIZE', '30'))
SYSTEM_DELETION_THRESHOLD = int(getenv('SYSTEM_DELETION_THRESHOLD', '24'))  # 24 hours

PROCESS_MESSAGES = Counter('ve_advisor_listener_messages_processed', '# of messages processed')
NEW_SYSTEM = Counter('ve_advisor_listener_upl_new_system', '# of new systems inserted by advisor')
UPDATE_SYSTEM = Counter('ve_advisor_listener_upl_update_system', '# of systems updated')
MESSAGE_PARSE_ERROR = Counter('ve_advisor_listener_message_parse_error', '# of message parse errors')
INVALID_IDENTITY = Counter('ve_advisor_listener_invalid_identity', '# of skipped uploads because of invalid identity')
MISSING_INSIGHTS_ENTITLEMENT = Counter('ve_advisor_listener_non_insights_entitlement', '# of skipped uploads because of entitlement check')
DATABASE_ERROR = Counter('ve_advisor_listener_database_error', '# of database errors')
DELETED_UPLOADED = Counter('ve_advisor_listener_deleted_uploaded', '# of systems uploaded after being deleted')
NEW_RH_ACCOUNT = Counter('ve_advisor_listener_upl_new_rh_account', '# of new rh accounts inserted')


ADVISOR_QUEUE = mqueue.MQReader([mqueue.ADVISOR_RESULTS_TOPIC])
REMEDIATIONS_PRODUCER = mqueue.MQWriter(mqueue.REMEDIATION_UPDATES_TOPIC)

RULE_BLACKLIST = ['CVE_2017_5715_cpu_virt|VIRT_CVE_2017_5715_CPU_3_ONLYKERNEL']

RULES_CACHE = {}
CVES_CACHE = {}


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info('Signal received, stopping kafka consumers.')
    await ADVISOR_QUEUE.stop()
    await REMEDIATIONS_PRODUCER.stop()
    loop.stop()

INVALID_IDENTITY = Counter('ve_advisor_listener_invalid_identity',
                           '# of skipped uploads because of invalid identity')
MISSING_INSIGHTS_ENTITLEMENT = Counter(
    've_advisor_listener_non_insights_entitlement',
    '# of skipped uploads because of entitlement check')
DATABASE_ERROR = Counter('ve_advisor_listener_database_error',
                         '# of database errors')
DELETED_UPLOADED = Counter('ve_advisor_listener_deleted_uploaded',
                           '# of systems uploaded after being deleted')
NEW_RH_ACCOUNT = Counter('ve_advisor_listener_upl_new_rh_account',
                         '# of new rh accounts inserted')
INVALID_INSIGHTS_ACC = Counter('ve_advisor_listener_invalid_insights_acc',
                               '# of non-insights messages')

ADVISOR_QUEUE = mqueue.MQReader([mqueue.ADVISOR_RESULTS_TOPIC])
REMEDIATIONS_PRODUCER = mqueue.MQWriter(mqueue.REMEDIATION_UPDATES_TOPIC)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(mqueue.PAYLOAD_TRACKER_TOPIC)

RULE_BLACKLIST = ['CVE_2017_5715_cpu_virt|VIRT_CVE_2017_5715_CPU_3_ONLYKERNEL']

RULES_CACHE = {}
CVES_CACHE = {}

REQUIRED_MESSAGE_FIELDS = {"input": [{"host": [("insights_id", False)]}]}


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info('Signal received, stopping kafka consumers.')
    await ADVISOR_QUEUE.stop()
    await REMEDIATIONS_PRODUCER.stop()
# counts
VMAAS_COUNT = Counter('ve_evaluator_vmaas_calls',
                      'Number of VMaaS-evaluations attempted')
INV_ID_NOT_FOUND = Counter(
    've_evaluator_inventory_not_found',
    'Number of times inventory-id not in SystemPlatform')
UNKNOWN_MSG = Counter('ve_evaluator_unknown_msg',
                      'Number of unrecognized messages delivered from queue')
UNKNOWN_TOPIC = Counter(
    've_evaluator_unknown_topic',
    'Number of times message delivered from unsupported topic')
MESSAGE_PARSE_ERROR = Counter('ve_evaluator_message_parse_error',
                              '# of message parse errors')

CONSUMER_QUEUE = mqueue.MQReader(kafka_evaluator_topic)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(mqueue.PAYLOAD_TRACKER_TOPIC)


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info("Signal received, stopping kafka consumers.")
    await CONSUMER_QUEUE.stop()
    await PAYLOAD_TRACKER_PRODUCER.stop()
    loop.stop()


class QueueEvaluator:
    """ This class contains logic for the processing vulnerabilities using VMaaS.
    """
    def __init__(self):
        LOGGER.info("Using BOOTSTRAP_SERVERS: %s", mqueue.BOOTSTRAP_SERVERS)
Example #14
0
# number of worker threads
WORKER_THREADS = int(os.getenv('WORKER_THREADS', '30'))
MAX_QUEUE_SIZE = int(os.getenv('MAX_QUEUE_SIZE', '30'))

# prometheus probes
# times
VMAAS_EVAL_TIME = Histogram('ve_evaluator_vmaas_evaluation_seconds', 'Time spent checking a system for vmaas hits')
# counts
VMAAS_COUNT = Counter('ve_evaluator_vmaas_calls', 'Number of VMaaS-evaluations attempted')
INV_ID_NOT_FOUND = Counter('ve_evaluator_inventory_not_found', 'Number of times inventory-id not in SystemPlatform')
UNKNOWN_MSG = Counter('ve_evaluator_unknown_msg', 'Number of unrecognized messages delivered from queue')
UNKNOWN_TOPIC = Counter('ve_evaluator_unknown_topic', 'Number of times message delivered from unsupported topic')
MESSAGE_PARSE_ERROR = Counter('ve_evaluator_message_parse_error', '# of message parse errors')

CONSUMER_QUEUE = mqueue.MQReader(kafka_evaluator_topic)
WEBHOOKS_QUEUE = mqueue.MQWriter(mqueue.WEBHOOKS_TOPIC)
PAYLOAD_TRACKER_PRODUCER = mqueue.MQWriter(mqueue.PAYLOAD_TRACKER_TOPIC)


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info("Signal received, stopping kafka consumers.")
    await CONSUMER_QUEUE.stop()
    await WEBHOOKS_QUEUE.stop()
    await PAYLOAD_TRACKER_PRODUCER.stop()
    loop.stop()


class QueueEvaluator:
    """ This class contains logic for the processing vulnerabilities using VMaaS.
    """
Example #15
0
prometheus_port = os.getenv('PROMETHEUS_PORT', '8085')  # pylint: disable=invalid-name
# number of worker threads
WORKER_THREADS = int(os.getenv('WORKER_THREADS', '30'))
MAX_QUEUE_SIZE = int(os.getenv('MAX_QUEUE_SIZE', '30'))

# prometheus probes
# times
VMAAS_EVAL_TIME = Histogram('ve_evaluator_vmaas_evaluation_seconds', 'Time spent checking a system for vmaas hits')
# counts
VMAAS_COUNT = Counter('ve_evaluator_vmaas_calls', 'Number of VMaaS-evaluations attempted')
INV_ID_NOT_FOUND = Counter('ve_evaluator_inventory_not_found', 'Number of times inventory-id not in SystemPlatform')
UNKNOWN_MSG = Counter('ve_evaluator_unknown_msg', 'Number of unrecognized messages delivered from queue')
UNKNOWN_TOPIC = Counter('ve_evaluator_unknown_topic', 'Number of times message delivered from unsupported topic')

CONSUMER_QUEUE = mqueue.MQReader(kafka_evaluator_topic)
WEBHOOKS_QUEUE = mqueue.MQWriter(mqueue.WEBHOOKS_TOPIC)


async def terminate(_, loop):
    """Trigger shutdown."""
    LOGGER.info("Signal received, stopping kafka consumers.")
    await CONSUMER_QUEUE.stop()
    await WEBHOOKS_QUEUE.stop()
    loop.stop()


class QueueEvaluator:
    """ This class contains logic for the processing vulnerabilities using VMaaS.
    """

    def __init__(self):