Пример #1
0
class LoggerCountShipper(Shipper):
    PARSER = parsers.Options({
        'level':
        parsers.String(validators=[is_valid_level]),
    })

    def __init__(self, level: int = logging.INFO, **kwargs):
        super().__init__(**kwargs)
        self.level: int = level

    def configure(self, config: Config) -> None:
        config = super().configure(config)
        if 'level' in config:
            self.level = logging.getLevelName(config['level'].value)

    def ship(self, investigation, anomalies: DataFrame) -> None:
        logger.log(self.level, "Received %d events", len(anomalies))
Пример #2
0
class LoggerShipper(Shipper):
    """Monitor that logs alerts to the standard logger."""

    PARSER = parsers.Options({
        'level':
        parsers.String(validators=[is_valid_level]),
    })

    def __init__(self, level: int = logging.INFO, **kwargs):
        super().__init__(**kwargs)
        self.level: int = level

    def configure(self, config: Config) -> None:
        config = super().configure(config)
        if 'level' in config:
            self.level = logging.getLevelName(config['level'].value)

    def ship(self, investigation, anomalies: DataFrame) -> None:
        for row in anomalies.iterrows():
            logger.log(self.level, "\n%s", row)
Пример #3
0
class CSVShipper(Shipper):
    """Saves the malicious anomalies to a CSV.

    Appends results instead of writing over the file.

    Parameters
    ----------
    filename: str
        Path to the CSV file.
    """
    PARSER = parsers.Options({
        'filename': parsers.String(),
    })
    """Monitor that writes alerts to a CSV file."""
    def __init__(self, filename: str = 'alerts.csv', **kwargs):
        super().__init__(**kwargs)
        self.filename: str = filename

    def configure(self, config: Config) -> Config:
        config = super().configure(config)
        self.filename = config.get_value('filename', self.filename)
        return config

    def ship(self, investigation, anomalies: pa.DataFrame) -> None:
        logger.info(
            "Writing %d anomalies to '%s'",
            len(anomalies),
            self.filename,
        )
        write_header = True
        try:
            write_header = os.path.getsize(self.filename) == 0
        except FileNotFoundError:
            pass
        anomalies.to_csv(
            self.filename,
            mode='a+',
            header=write_header,
            index=False,
        )
Пример #4
0
from logging.config import dictConfig

from scrywarden.config import parsers
from scrywarden.config.base import Config

LOGGING_PARSER = parsers.Options(
    {
        'version':
        parsers.Integer(default=1),
        'formatters':
        parsers.Dict(
            parsers.Options({
                'format': parsers.String(),
                'datefmt': parsers.String(),
            })),
        'handlers':
        parsers.Dict(
            parsers.Options({
                'class': parsers.String(required=True),
                'level': parsers.String(),
                'formatter': parsers.String(),
                'filters': parsers.List(parsers.String()),
            })),
        'loggers':
        parsers.Dict(
            parsers.Options({
                'level': parsers.String(),
                'propagate': parsers.Boolean(),
                'filters': parsers.List(parsers.String()),
                'handlers': parsers.List(parsers.String()),
            })),
Пример #5
0
        sa.Integer,
        sa.ForeignKey(Investigation.id, ondelete='CASCADE'),
        nullable=False,
    ),
    sa.Column(
        'event_id',
        sa.BigInteger,
        sa.ForeignKey(Event.id, ondelete='CASCADE'),
        nullable=False,
    ),
    sa.UniqueConstraint('investigation_id', 'event_id'),
)
"""Many to many relation for investigations and events."""

PARSER = parsers.Options({
    'host': parsers.String(default='localhost'),
    'port': parsers.Integer(default=5432),
    'name': parsers.String(default='scrywarden'),
    'user': parsers.String(default='scrywarden'),
    'password': parsers.String(default='scrywarden'),
})


def parse_engine(config: Config) -> Engine:
    """Parses an SQLAlchemy engine from a config object.

    Parameters
    ----------
    config: Config
        Configuration object to parse engine from.
Пример #6
0
class CSVTransport(EphemeralTransport):
    """Transport that reads messages from a CSV file.

    By default it yields each row as a message with the message data as a
    dictionary containing each row value. The dictionary has each header as
    the key and each value as the string value from the CSV.

    Override the `transform` method to transform this dictionary value before
    setting it as the message value.

    Setting a `process_check` integer value will log a report message every
    number of rows to keep the user updated on it's progress.

    Parameters
    ----------
    file: str
        Path the CSV is located at.
    headers: Iterable[str]
        Header names to use for the CSV if none are given in the file.
    process_check: int
        Log a processing check message after this values set value.
    """

    PARSER = parsers.Options({
        'file': parsers.String(),
        'headers': parsers.List(parsers.String()),
        'process_check': parsers.Integer(),
    })

    def __init__(
            self,
            file: str = '',
            headers: t.Iterable[str] = (),
            process_check: int = 0,
            **kwargs,
    ):
        super().__init__(**kwargs)
        self.file: str = file
        self.headers = headers or None
        self.process_check: int = process_check
        self._create_messages: t.Callable[
            [csv.DictReader],
            t.Iterable[Message], ] = self._create_messages_without_log

    def process(self) -> t.Iterable[Message]:
        """Reads all rows as messages.

        Returns
        -------
        Iterable[Messages]
            Iterable of each row as messages.
        """
        if self.process_check:
            self._create_messages = self._create_messages_with_log
        with open(self.file) as file:
            reader = csv.DictReader(file, fieldnames=self.headers or None)
            yield from self._create_messages(reader)

    def configure(self, config: Config) -> Config:
        self.file = config.get_value('file', self.file)
        self.headers = config.get_value('headers', self.headers)
        self.process_check = config.get_value(
            'process_check',
            self.process_check,
        )
        return config

    def transform(self, row: t.Dict) -> t.Dict:
        """Overridable method that transforms the row value.

        This modifies the row data before setting it as the message data. This
        is useful for things like parsing numbers from the string values.

        Parameters
        ----------
        row: Dict
            Row represented as a dictionary value.

        Returns
        -------
        Dict
            Modified row dictionary.
        """
        return row

    def _create_messages_without_log(
        self,
        reader: csv.DictReader,
    ) -> t.Iterable[Message]:
        for row in reader:
            yield self._create_message(row)

    def _create_message(self, row: t.Dict) -> Message:
        return Message.create(self.transform(row))

    def _create_messages_with_log(
        self,
        reader: csv.DictReader,
    ) -> t.Iterable[Message]:
        for index, row in enumerate(reader, 1):
            if index % self.process_check == 0:
                logger.info("%d rows read from '%s'", index, self.file)
            yield self._create_message(row)