Exemple #1
0
 def __init__(self):
     self.stdin_path = '/dev/null'
     self.stdout_path = '/dev/tty'
     self.stderr_path = '/dev/tty'
     self.pidfile_path = '/tmp/foo.pid'
     self.pidfile_timeout = 5
     self.mq_host = os.getenv("MQ_HOST") or 'localhost'
     self.mq_port = os.getenv("MQ_PORT") or 5672
     # chagne this connection string to the remote sql connection once we have containers
     self.handler = DatabaseHandler('sqlite:///house_record.db')
Exemple #2
0
 def re_evaluate_systems(self):
     """Schedule re-evaluation for all systems in DB."""
     LOGGER.info("Re-evaluating all systems")
     conn = DatabaseHandler.get_connection()
     with NamedCursor(conn) as cur:
         cur.execute("select inventory_id from system_platform")
         # reevaluate updates for every system in the DB
         for inventory_id, in cur:
             self.evaluator_queue.send({
                 "type": "re-evaluate_system",
                 "system_id": inventory_id
             })
     conn.commit()
     DatabaseHandler.close_connection()
Exemple #3
0
def main():
    """Main kafka listener entrypoint."""
    start_http_server(int(PROMETHEUS_PORT))
    init_logging()
    init_db()
    LOGGER.info("Starting upload listener.")
    # get DB connection
    conn = DatabaseHandler.get_connection()

    loop = asyncio.get_event_loop()
    signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT)
    for sig in signals:
        loop.add_signal_handler(
            sig, lambda sig=sig: loop.create_task(terminate(sig, loop)))
    executor = BoundedExecutor(MAX_QUEUE_SIZE, max_workers=WORKER_THREADS)

    def process_message(msg):
        """Message processing logic"""
        PROCESS_MESSAGES.inc()
        LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value)

        try:
            msg_dict = json.loads(msg.value.decode("utf8"))
        except json.decoder.JSONDecodeError:
            MESSAGE_PARSE_ERROR.inc()
            LOGGER.exception("Unable to parse message: ")
            return

        if msg.topic == mqueue.UPLOAD_TOPIC:
            process_func = process_upload
        elif msg.topic == mqueue.EVENTS_TOPIC:
            if msg_dict['type'] == 'delete':
                process_func = process_delete
            else:
                UNKNOWN_EVENT_TYPE.inc()
                LOGGER.error("Received unknown event type: %s",
                             msg_dict['type'])
                return
        else:
            UNKNOWN_TOPIC.inc()
            LOGGER.error("Received message on unsupported topic: %s",
                         msg.topic)
            return

        if 'id' not in msg_dict or msg_dict["id"] is None:
            MISSING_ID.inc()
            LOGGER.warning(
                "Unable to process message, inventory ID is missing.")
            return

        future = executor.submit(process_func, msg_dict, conn, loop=loop)
        future.add_done_callback(on_thread_done)

    LISTENER_QUEUE.listen(process_message)

    # wait until loop is stopped from terminate callback
    loop.run_forever()

    LOGGER.info("Shutting down.")
    executor.shutdown()
Exemple #4
0
class App():
    def __init__(self):
        self.stdin_path = '/dev/null'
        self.stdout_path = '/dev/tty'
        self.stderr_path = '/dev/tty'
        self.pidfile_path = '/tmp/foo.pid'
        self.pidfile_timeout = 5
        self.mq_host = os.getenv("MQ_HOST") or 'localhost'
        self.mq_port = os.getenv("MQ_PORT") or 5672
        # chagne this connection string to the remote sql connection once we have containers
        self.handler = DatabaseHandler('sqlite:///house_record.db')

    def do_work(self, channel):
        print('Waiting for messages on host ' + self.mq_host + \
              ':' + str(self.mq_port) + '. To exit press CTRL+C')

        def callback(ch, method, properties, body):
            data = json.loads(body)
            id = data['id']
            print id
            record = self.handler.get_record_by_id(id)
            results = process_record(data["action"], record)
            self.handler.save_result(results[0],
                                     encrypt_fernet(results[1]),
                                     action=data["action"])
            ch.basic_ack(delivery_tag=method.delivery_tag)

        channel.basic_qos(prefetch_count=1)
        channel.basic_consume(callback, queue='task_queue')
        channel.start_consuming()

    def run(self):
        if not self.mq_host or not self.mq_port:
            self.mq_host = 'localhost'
            self.mq_port = 5672
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_host,
                                      port=int(self.mq_port)))
        channel = connection.channel()
        channel.queue_declare(queue='task_queue', durable=True)

        while True:
            self.do_work(channel)
Exemple #5
0
    def __init__(self, kafka_topics_in):
        # connect to the Messaging Service
        self.consumer = mqueue.MQReader(kafka_topics_in) # [kafka_evaluator_topic]

        LOGGER.info("Using BOOTSTRAP_SERVERS: %s", mqueue.BOOTSTRAP_SERVERS)
        LOGGER.info("Using GROUP_ID: %s", mqueue.GROUP_ID)
        LOGGER.info("Using TOPICS: %s", ", ".join(kafka_topics_in))

        self.producer = mqueue.MQWriter(kafka_evaluator_topic)

        # get DB connection
        init_db()
        self.conn = DatabaseHandler.get_connection()
        self.session = requests.Session()
Exemple #6
0
def main():
    """Main kafka listener entrypoint."""
    start_http_server(int(PROMETHEUS_PORT))
    init_logging()
    init_db()
    LOGGER.info("Starting upload listener.")
    # get DB connection
    conn = DatabaseHandler.get_connection()

    session = requests.Session()
    loop = asyncio.get_event_loop()
    signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT)
    for sig in signals:
        loop.add_signal_handler(
            sig, lambda sig=sig: loop.create_task(terminate(sig, loop)))
    executor = ThreadPoolExecutor(WORKER_THREADS)

    def process_message(msg):
        """Message processing logic"""
        PROCESS_UPLOAD.inc()
        LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value)

        upload_data = json.loads(msg.value.decode("utf8"))

        # Inventory ID is missing
        if 'id' not in upload_data or upload_data["id"] is None:
            MISSING_ID.inc()
            LOGGER.warning("Unable to store system, inventory ID is missing.")
            return

        future = executor.submit(process_upload,
                                 upload_data,
                                 session,
                                 conn,
                                 loop=loop)
        future.add_done_callback(on_thread_done)

    UPLOAD_QUEUE.listen(process_message)

    # wait until loop is stopped from terminate callback
    loop.run_forever()

    LOGGER.info("Shutting down.")
    executor.shutdown()
    session.close()
def sync_cve_md(page_size=5000):
    """Sync all CVE metadata from VMaaS"""
    LOGGER.info('Syncing CVE metadata')
    conn = DatabaseHandler.get_connection()
    cur = conn.cursor()
    impact_id_map = {}
    cur.execute("select name, id from cve_impact")
    for impact_name, impact_id in cur.fetchall():
        impact_id_map[impact_name] = impact_id
    cur.execute('select cve from cve_metadata')
    cves_in_db = []
    for cve_tuple in cur.fetchall():
        cves_in_db.append(cve_tuple[0])
    cve_list = [".*"]
    success = True
    page = 1
    session = requests.Session()
    while True:
        cve_request = {
            'cve_list': cve_list,
            'page_size': page_size,
            'page': page,
            'rh_only': True
        }
        LOGGER.info('Downloading CVE metadata (page: %s, page_size: %s)', page,
                    page_size)
        r_json = vmaas_post_request(VMAAS_CVES_ENDPOINT,
                                    cve_request,
                                    session=session)
        if r_json is None:
            success = False
            break
        LOGGER.info(
            'Importing CVE metadata (page: %s, page_size: %s, pages: %s)',
            page, page_size, r_json['pages'])
        cves = r_json['cve_list']
        to_insert = []
        to_update = []
        for cve in cves:
            description = cves[cve]['description']
            impact_id = impact_id_map[cves[cve]['impact']]
            public_date = cves[cve]['public_date'] or None
            modified_date = cves[cve]['modified_date'] or None
            cvss3_score = float(cves[cve]['cvss3_score']) if cves[cve].get(
                'cvss3_score') else None
            cvss3_metrics = cves[cve].get('cvss3_metrics')
            cvss2_score = float(cves[cve]['cvss2_score']) if cves[cve].get(
                'cvss2_score') else None
            cvss2_metrics = cves[cve].get('cvss2_metrics')
            row = (cve, description, impact_id, public_date, modified_date,
                   cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics)
            if cve not in cves_in_db:
                to_insert.append(row)
            else:
                to_update.append(row)
        if to_insert:
            execute_values(cur,
                           """insert into cve_metadata
                           (cve, description, impact_id, public_date, modified_date,
                           cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics)
                           values %s""",
                           to_insert,
                           page_size=len(to_insert))
        if to_update:
            execute_values(
                cur,
                """update cve_metadata set description = data.description,
                           impact_id = data.impact_id,
                           public_date = cast(data.public_date as timestamp with time zone),
                           modified_date = cast(data.modified_date as timestamp with time zone),
                           cvss3_score = cast(data.cvss3_score as numeric),
                           cvss3_metrics = data.cvss3_metrics,
                           cvss2_score = cast(data.cvss2_score as numeric),
                           cvss2_metrics = data.cvss2_metrics
                           from (values %s) as data (cve, description, impact_id, public_date, modified_date,
                           cvss3_score, cvss3_metrics, cvss2_score, cvss2_metrics)
                           where cve_metadata.cve = data.cve""",
                to_update,
                page_size=len(to_update))
        LOGGER.info(
            'Finished importing CVE metadata (page: %s, page_size: %s, pages: %s)',
            page, page_size, r_json['pages'])
        if page >= r_json['pages']:
            break
        page += 1
    cur.close()
    conn.commit()
    session.close()
    LOGGER.info('Finished syncing CVE metadata')
    return success
Exemple #8
0
import datetime
import hashlib
import json
import uuid

import pika
from pika.exceptions import ConnectionClosed, ChannelClosed
from sqlalchemy.exc import IntegrityError

from common.database_handler import DatabaseHandler
from common.models.house_record import HouseRecord

handler = DatabaseHandler('sqlite:///house_record.db')
#handler = DatabaseHandler('sqlite:///:memory:')

#users
users = ['admin', 'user', 'anna', 'jake']
try:
    for u in users:
        pwd_hash = hashlib.sha256(u + "pass").hexdigest()
        handler.save_user(u, pwd_hash)
except IntegrityError:
    #users already in db, recreating session
    pass

#records
my_csv = open('kc_house_data.csv', 'r')
i = 0
records = []
for line in my_csv.readlines():
    values = line.replace('"', "").split(',')[:11]
Exemple #9
0
sys.path.append(os.path.dirname(os.path.realpath(__name__)) + '/../')

from common.database_handler import DatabaseHandler
from common.models.house_record import HouseRecord
from common.encrypt_decrypt import *

import pika
from pika.exceptions import ConnectionClosed, ChannelClosed

app = Flask(__name__)
app.debug = True

upload_folder = os.path.dirname(os.path.realpath(__name__))
app.config['upload_folder'] = upload_folder

handler = DatabaseHandler('sqlite:///house_record.db')

#users
users = ['admin', 'user', 'anna', 'jake']


# Routes
@app.route("/")
def index():
    return render_template('base.html')


@app.route('/login', methods=['POST'])
def login():
    username = request.json.get('username', '')
    password = request.json.get('password', '')