Example #1
0
def request_handler_1_3():
    assert saq.CONFIG is not None

    # TODO actually use the library
    # protocol constants copied over from the client library ;)
    KEY_ID = 'id'
    KEY_UUID = 'uuid'
    KEY_TOOL = 'tool'
    KEY_TOOL_INSTANCE = 'tool_instance'
    KEY_TYPE = 'type'
    KEY_DESCRIPTION = 'description'
    KEY_EVENT_TIME = 'event_time'
    KEY_DETAILS = 'details'
    KEY_OBSERVABLES = 'observables'
    KEY_TAGS = 'tags'
    KEY_NAME = 'name'
    KEY_COMPANY_NAME = 'company_name'

    # client passes in the JSON contents of the alert
    contents = json.loads(request.form['alert'])

    alert = Alert()
    alert.uuid = contents[KEY_UUID]
    alert.storage_dir = os.path.join(saq.CONFIG['global']['data_dir'], saq.SAQ_NODE, alert.uuid[0:3], alert.uuid)
    alert.initialize_storage()
    alert.tool = contents[KEY_TOOL]
    alert.tool_instance = contents[KEY_TOOL_INSTANCE]
    alert.alert_type = contents[KEY_TYPE]
    alert.description = contents[KEY_DESCRIPTION]
    alert.event_time = contents[KEY_EVENT_TIME]
    alert.details = contents[KEY_DETAILS]

    if KEY_NAME in contents:
        alert.name = contents[KEY_NAME]

    if KEY_COMPANY_NAME in contents and contents[KEY_COMPANY_NAME]:
        alert.company_name = contents[KEY_COMPANY_NAME]
    else:
        alert.company_name = saq.CONFIG['global']['company_name']

    # add all the specified observables
    # each key in the observable dictionary is the type
    for o_type in contents[KEY_OBSERVABLES].keys():
        # protocol verison 1.2 only had two elements (value, time)
        # version 1.3 has four (value, time, is_suspect, directives)
        for values in contents[KEY_OBSERVABLES][o_type]:
            o_value = values[0]
            o_time = values[1]
            is_suspect = values[2] # DEPRECATED
            directives = values[3]

            o = alert.add_observable(o_type, o_value, o_time)
            if o:
                for directive in directives:
                    o.add_directive(directive)

    # add all the specified tags
    for tag in contents[KEY_TAGS]:
        alert.add_tag(tag)

    # save the files to disk and add them as observables of type file
    for f in request.files.getlist('data'):
        logging.debug("recording file {}".format(f.filename))
        temp_dir = tempfile.mkdtemp(dir=saq.CONFIG.get('server', 'incoming_dir'))
        _path = os.path.join(temp_dir, secure_filename(f.filename))
        try:
            if os.path.exists(_path):
                logging.error("duplicate file name {}".format(_path))
                raise RuntimeError("duplicate file name {}".format(_path))

            logging.debug("saving file to {}".format(_path))
            try:
                f.save(_path)
            except Exception as e:
                logging.error("unable to save file to {}: {}".format(_path, e))
                raise e

            full_path = os.path.join(alert.storage_dir, f.filename)

            try:
                dest_dir = os.path.dirname(full_path)
                if not os.path.isdir(dest_dir):
                    try:
                        os.makedirs(dest_dir)
                    except Exception as e:
                        logging.error("unable to create directory {}: {}".format(dest_dir, e))
                        raise e

                logging.debug("copying file {} to {}".format(_path, full_path))
                shutil.copy(_path, full_path)

                # add this as a F_FILE type observable
                alert.add_observable(F_FILE, os.path.relpath(full_path, start=alert.storage_dir))

            except Exception as e:
                logging.error("unable to copy file from {} to {} for alert {}: {}".format(
                              _path, full_path, alert, e))
                raise e

        except Exception as e:
            logging.error("unable to deal with file {}: {}".format(f, e))
            report_exception()
            return "", 500

        finally:
            try:
                shutil.rmtree(temp_dir)
            except Exception as e:
                logging.error("unable to delete temp dir {}: {}".format(temp_dir, e))

    try:
        if not alert.sync():
            logging.error("unable to sync alert")
            return "", 500

        # send the alert to the automated analysis engine
        alert.request_correlation()

    except Exception as e:
        logging.error("unable to sync to database: {}".format(e))
        report_exception()
        return "", 500

    return str(alert.id), 200
Example #2
0
def request_handler_1_2():
    assert saq.CONFIG is not None

    # TODO actually use the library
    # protocol constants copied over from the client library ;)
    KEY_ID = 'id'
    KEY_UUID = 'uuid'
    KEY_TOOL = 'tool'
    KEY_TOOL_INSTANCE = 'tool_instance'
    KEY_TYPE = 'type'
    KEY_DESCRIPTION = 'description'
    KEY_EVENT_TIME = 'event_time'
    KEY_DETAILS = 'details'
    KEY_OBSERVABLES = 'observables'
    KEY_TAGS = 'tags'
    KEY_ATTACHMENTS = 'attachments'
    KEY_NAME = 'name'

    # client passes in the JSON contents of the alert
    contents = json.loads(request.form['alert'])

    alert = Alert()

    # set all of the properties individually
    # XXX fix me
    # it looks like the construction logic doesn't quite work here
    # when loading from the arguments to the constructor, the internal
    # variables with leading underscores get set rather than the properties
    # representing the database columns it was designed that way to allow the
    # JSON stuff to work correctly, so I'll need to revisit that later

    alert.uuid = contents[KEY_UUID]
    alert.storage_dir = os.path.join(saq.CONFIG['global']['data_dir'], saq.SAQ_NODE, alert.uuid[0:3], alert.uuid)
    alert.initialize_storage()
    alert.tool = contents[KEY_TOOL]
    alert.tool_instance = contents[KEY_TOOL_INSTANCE]
    alert.alert_type = contents[KEY_TYPE]
    alert.description = contents[KEY_DESCRIPTION]
    alert.event_time = contents[KEY_EVENT_TIME]
    alert.details = contents[KEY_DETAILS]

    # XXX shame on me for not testing well enough
    if KEY_NAME in contents:
        alert.name = contents[KEY_NAME]

    # add all the specified observables
    # each key in the observable dictionary is the type
    for o_type in contents[KEY_OBSERVABLES].keys():
        # protocol verison 1.2 only had two elements (value, time)
        # version 1.3 has three (value, time, is_suspect)
        for values in contents[KEY_OBSERVABLES][o_type]:
            o_value = values[0]
            o_time = values[1]
            is_suspect = False # deprecated
            if len(values) > 2:
                is_suspect = values[2]

            alert.add_observable(o_type, o_value, o_time)

    # add all the specified tags
    for tag in contents[KEY_TAGS]:
        alert.add_tag(tag)

    #alert._materialize()

    # save the attachments to disk and add them as observables of type file
    for f in request.files.getlist('data'):
        logging.debug("recording file {0}".format(f.filename))
        # XXX why not just save straight to the destination address?
        temp_dir = tempfile.mkdtemp(dir=saq.CONFIG.get('server', 'incoming_dir'))
        _path = os.path.join(temp_dir, secure_filename(f.filename))
        try:
            if os.path.exists(_path):
                logging.error("duplicate file name {0}".format(_path))
                raise RuntimeError("duplicate file name {0}".format(_path))

            logging.debug("saving file to {0}".format(_path))
            try:
                f.save(_path)
            except Exception as e:
                logging.error("unable to save file to {0}: {1}".format(_path, str(e)))
                raise e

            full_path = os.path.join(alert.storage_dir, f.filename)

            try:
                dest_dir = os.path.dirname(full_path)
                if not os.path.isdir(dest_dir):
                    try:
                        os.makedirs(dest_dir)
                    except Exception as e:
                        logging.error("unable to create directory {0}: {1}".format(dest_dir, str(e)))
                        raise e

                logging.debug("copying file {0} to {1}".format(_path, full_path))
                shutil.copy(_path, full_path)

                # add this as a F_FILE type observable
                alert.add_observable(F_FILE, os.path.relpath(full_path, start=alert.storage_dir))

            except Exception as e:
                logging.error("unable to copy file from {0} to {1} for alert {2}: {3}".format(
                    _path, full_path, alert, str(e)))
                raise e

        except Exception as e:
            logging.error("unable to deal with file {0}: {1}".format(f, str(e)))
            report_exception()
            return "", 500

        finally:
            try:
                shutil.rmtree(temp_dir)
            except Exception as e:
                logging.error("unable to delete temp dir {0}: {1}".format(temp_dir, str(e)))

    try:
        if not alert.sync():
            logging.error("unable to sync alert")
            return "", 500

        # send the alert to the automated analysis engine
        alert.request_correlation()

    except Exception as e:
        logging.error("unable to sync to database: {0}".format(str(e)))
        report_exception()
        return "", 500

    return str(alert.id), 200