Example #1
0
def main(rule_name=None):
    RUN_METADATA = {
        'RUN_ID': RUN_ID,
        'RUN_TYPE': 'ALERT QUERY',
        'START_TIME': datetime.datetime.utcnow(),
    }
    if rule_name:
        metadata = [create_alerts(rule_name)]
    else:
        rules = list(db.load_rules(ALERT_QUERY_POSTFIX))
        metadata = Pool(POOLSIZE).map(create_alerts, rules)

    RUN_METADATA['ROW_COUNT'] = {
        'INSERTED': sum(q['ROW_COUNT']['INSERTED'] for q in metadata),
        'UPDATED': sum(q['ROW_COUNT']['UPDATED'] for q in metadata),
    }
    db.record_metadata(RUN_METADATA, table=RUN_METADATA_TABLE)

    try:
        if CLOUDWATCH_METRICS:
            log.metric(
                'Run',
                'SnowAlert',
                [{
                    'Name': 'Component',
                    'Value': 'Alert Query Runner'
                }],
                1,
            )
    except Exception as e:
        log.error("Cloudwatch metric logging failed: ", e)
Example #2
0
def run_suppression(squelch_name):
    metadata = {
        'QUERY_NAME': squelch_name,
        'RUN_ID': RUN_ID,
        'ATTEMPTS': 1,
        'START_TIME': datetime.datetime.utcnow(),
        'ROW_COUNT': {
            'SUPPRESSED': 0
        }
    }
    log.info(f"{squelch_name} processing...")
    try:
        query = VIOLATION_SUPPRESSION_QUERY.format(squelch_name=squelch_name)
        num_violations_suppressed = next(
            db.fetch(query))['number of rows updated']
        log.info(f"{squelch_name} updated {num_violations_suppressed} rows.")
        metadata['ROW_COUNT']['SUPPRESSED'] = num_violations_suppressed
        db.record_metadata(metadata, table=QUERY_METADATA_TABLE)
        RULE_METADATA_RECORDS.append(metadata)

    except Exception as e:
        db.record_metadata(metadata, table=QUERY_METADATA_TABLE, e=e)
        log.error("Suppression query {squelch_name} execution failed.", e)

    print(f"Suppression query {squelch_name} executed")
Example #3
0
def main():
    RUN_METADATA = {
        'RUN_TYPE': 'VIOLATION SUPPRESSION',
        'START_TIME': datetime.datetime.utcnow(),
        'RUN_ID': RUN_ID,
    }

    for squelch_name in db.load_rules(VIOLATION_SQUELCH_POSTFIX):
        run_suppression(squelch_name)

    num_violations_passed = next(
        db.fetch(SET_SUPPRESSED_FALSE))['number of rows updated']
    RUN_METADATA['ROW_COUNT'] = {
        'SUPPRESSED':
        sum(rmr['ROW_COUNT']['SUPPRESSED'] for rmr in RULE_METADATA_RECORDS),
        'PASSED':
        num_violations_passed,
    }
    db.record_metadata(RUN_METADATA, table=RUN_METADATA_TABLE)

    if CLOUDWATCH_METRICS:
        log.metric('Run', 'SnowAlert', [{
            'Name': 'Component',
            'Value': 'Violation Suppression Runner'
        }], 1)
def create_alerts(rule_name: str) -> Dict[str, Any]:
    metadata: Dict[str, Any] = {
        'QUERY_NAME': rule_name,
        'RUN_ID': RUN_ID,
        'ATTEMPTS': 1,
        'START_TIME': datetime.datetime.utcnow(),
        'ROW_COUNT': {
            'INSERTED': 0,
            'UPDATED': 0,
        }
    }

    try:
        db.execute(RUN_ALERT_QUERY.format(
            query_name=rule_name,
            from_time_sql="DATEADD(minute, -90, CURRENT_TIMESTAMP())",
            to_time_sql="CURRENT_TIMESTAMP()",
        ), fix_errors=False)
        insert_count, update_count = merge_alerts(rule_name, GROUPING_CUTOFF)
        metadata['ROW_COUNT'] = {
            'INSERTED': insert_count,
            'UPDATED': update_count,
        }
        db.execute(f"DROP TABLE results.RUN_{RUN_ID}_{rule_name}")

    except Exception as e:
        db.record_metadata(metadata, table=QUERY_METADATA_TABLE, e=e)
        return metadata

    db.record_metadata(metadata, table=QUERY_METADATA_TABLE)

    log.info(f"{rule_name} done.")

    return metadata
def main(squelch_name=None):
    RUN_METADATA = {
        'RUN_TYPE': 'ALERT SUPPRESSION',
        'START_TIME': datetime.datetime.utcnow(),
        'RUN_ID': RUN_ID,
    }

    rules = (
        db.load_rules(ALERT_SQUELCH_POSTFIX) if squelch_name is None else [squelch_name]
    )
    for squelch_name in rules:
        run_suppressions(squelch_name)

    num_rows_updated = next(db.fetch(SET_SUPPRESSED_FALSE))['number of rows updated']
    log.info(
        f'All suppressions done, {num_rows_updated} remaining alerts marked suppressed=FALSE.'
    )

    RUN_METADATA['ROW_COUNT'] = {
        'PASSED': num_rows_updated,
        'SUPPRESSED': sum(m['ROW_COUNT']['SUPPRESSED'] for m in METADATA_HISTORY),
    }

    db.record_metadata(RUN_METADATA, table=RUN_METADATA_TABLE)

    try:
        if CLOUDWATCH_METRICS:
            log.metric(
                'Run',
                'SnowAlert',
                [{'Name': 'Component', 'Value': 'Alert Suppression Runner'}],
                1,
            )
    except Exception as e:
        log.error("Cloudwatch metric logging failed: ", e)
def create_alerts(rule_name: str) -> Dict[str, Any]:
    metadata: Dict[str, Any] = {
        'QUERY_NAME': rule_name,
        'RUN_ID': RUN_ID,
        'ATTEMPTS': 1,
        'START_TIME': datetime.datetime.utcnow(),
        'ROW_COUNT': {
            'INSERTED': 0,
            'UPDATED': 0,
        }
    }

    try:
        insert_count, update_count = db.insert_alerts_query_run(rule_name, GROUPING_CUTOFF)
        metadata['ROW_COUNT'] = {
            'INSERTED': insert_count,
            'UPDATED': update_count,
        }

    except Exception as e:
        log_failure(rule_name, e)
        db.record_metadata(metadata, table=QUERY_METADATA_TABLE, e=e)
        return metadata

    db.record_metadata(metadata, table=QUERY_METADATA_TABLE)

    log.info(f"{rule_name} done.")

    return metadata
Example #7
0
def connection_run(connection_table):
    table_name = connection_table['name']
    table_comment = connection_table['comment']

    log.info(f"-- START DC {table_name} --")
    try:
        metadata = {'START_TIME': datetime.utcnow()}
        options = yaml.load(table_comment) or {}

        if 'module' in options:
            module = options['module']

            metadata.update({
                'RUN_ID': RUN_ID,
                'TYPE': module,
                'LANDING_TABLE': table_name,
                'INGEST_COUNT': 0,
            })

            connector = importlib.import_module(f"connectors.{module}")

            for module_option in connector.CONNECTION_OPTIONS:
                name = module_option['name']
                if module_option.get('secret') and name in options:
                    options[name] = vault.decrypt_if_encrypted(options[name])
                if module_option.get('type') == 'json':
                    options[name] = json.loads(options[name])
                if module_option.get('type') == 'list':
                    if type(options[name]) is str:
                        options[name] = options[name].split(',')
                if module_option.get('type') == 'int':
                    options[name] = int(options[name])

            if callable(getattr(connector, 'ingest', None)):
                ingested = connector.ingest(table_name, options)
                if isinstance(ingested, int):
                    metadata['INGEST_COUNT'] += ingested
                elif isinstance(ingested, GeneratorType):
                    for n in ingested:
                        metadata['INGEST_COUNT'] += n
                else:
                    metadata['INGESTED'] = ingested

            db.record_metadata(metadata, table=DC_METADATA_TABLE)

    except Exception as e:
        log.error(f"Error loading logs into {table_name}: ", e)
        db.record_metadata(metadata, table=DC_METADATA_TABLE, e=e)

    log.info(f"-- END DC --")
def main(connection_table="%_CONNECTION"):
    for table in db.fetch(f"SHOW TABLES LIKE '{connection_table}' IN data"):
        table_name = table['name']
        table_comment = table['comment']

        log.info(f"-- START DC {table_name} --")
        try:
            options = yaml.load(table_comment) or {}

            if 'module' in options:
                module = options['module']

                metadata = {
                    'RUN_ID': RUN_ID,
                    'TYPE': module,
                    'START_TIME': datetime.utcnow(),
                    'LANDING_TABLE': table_name,
                    'INGEST_COUNT': 0
                }

                connector = importlib.import_module(f"connectors.{module}")

                for module_option in connector.CONNECTION_OPTIONS:
                    name = module_option['name']
                    if module_option.get('secret') and name in options:
                        options[name] = vault.decrypt_if_encrypted(
                            options[name])

                if callable(getattr(connector, 'ingest', None)):
                    ingested = connector.ingest(table_name, options)
                    if isinstance(ingested, int):
                        metadata['INGEST_COUNT'] += ingested
                    elif isinstance(ingested, GeneratorType):
                        for n in ingested:
                            metadata['INGEST_COUNT'] += n
                    else:
                        metadata['INGESTED'] = ingested

                db.record_metadata(metadata, table=DC_METADATA_TABLE)

        except Exception as e:
            log.error(f"Error loading logs into {table_name}: ", e)
            db.record_metadata(metadata, table=DC_METADATA_TABLE, e=e)

        log.info(f"-- END DC --")
def run_suppressions(squelch_name):
    log.info(f"{squelch_name} processing...")
    metadata = {
        'QUERY_NAME': squelch_name,
        'RUN_ID': RUN_ID,
        'ATTEMPTS': 1,
        'START_TIME': datetime.datetime.utcnow(),
    }

    try:
        suppression_count = run_suppression_query(squelch_name)
        log.info(f"{squelch_name} updated {suppression_count} rows.")
        metadata['ROW_COUNT'] = {'SUPPRESSED': suppression_count}
        db.record_metadata(metadata, table=QUERY_METADATA_TABLE)

    except Exception as e:
        metadata['ROW_COUNT'] = {'SUPPRESSED': 0}
        db.record_metadata(metadata, table=QUERY_METADATA_TABLE, e=e)

    METADATA_HISTORY.append(metadata)
    log.info(f"{squelch_name} done.")
Example #10
0
def connection_run(connection_table, run_now=False):
    table_name = connection_table['name']
    table_comment = connection_table['comment']

    log.info(f"-- START DC {table_name} --")
    try:
        metadata = {'START_TIME': datetime.utcnow()}
        options = yaml.safe_load(table_comment) or {}

        if 'schedule' in options:
            schedule = options['schedule']
            now = datetime.now()
            if not run_now and not time_to_run(schedule, now):
                log.info(f'not scheduled: {schedule} at {now}')
                log.info(f"-- END DC --")
                return

        if 'module' not in options:
            log.info(f'no module in options')
            log.info(f"-- END DC --")
            return

        module = options['module']

        metadata.update({
            'RUN_ID': RUN_ID,
            'TYPE': module,
            'LANDING_TABLE': table_name
        })

        connector = importlib.import_module(f"connectors.{module}")

        for module_option in connector.CONNECTION_OPTIONS:
            name = module_option['name']
            if module_option.get('secret') and name in options:
                options[name] = vault.decrypt_if_encrypted(options[name])
            if module_option.get('type') == 'json':
                options[name] = json.loads(options[name])
            if module_option.get('type') == 'list':
                if type(options[name]) is str:
                    options[name] = options[name].split(',')
            if module_option.get('type') == 'int':
                options[name] = int(options[name])

        if callable(getattr(connector, 'ingest', None)):
            db.record_metadata(metadata, table=DC_METADATA_TABLE)
            result = do_ingest(connector, table_name, options)
            if result is not None:
                metadata['INGEST_COUNT'] = result
            else:
                metadata['INGESTED'] = result

        db.record_metadata(metadata, table=DC_METADATA_TABLE)

    except Exception as e:
        log.error(f"Error loading logs into {table_name}: ", e)
        db.record_metadata(metadata, table=DC_METADATA_TABLE, e=e)

    log.info(f"-- END DC --")