Exemplo n.º 1
0
def prepareLoggers(logLevel: int, logFilter: str, sensorID: str, outDir: Path):
    """
    :param logLevel: log level for the stream handler.
    :param logFilter: logger name to filter on.
    :param sensorID: ID to differentiate between instances of this program in the JSON log.
    :param outDir: output directory.
    """
    logDir = outDir / "logs"
    logDir.mkdir(exist_ok=True)

    formatter = VariableFormatter(
        "[{asctime}] - {levelname} - {sessionID} - {name} - {message}",
        style="{",
        defaultVariables={"sessionID": "GLOBAL"})

    streamHandler = logging.StreamHandler()
    streamHandler.setFormatter(formatter)
    streamHandler.setLevel(logLevel)
    streamHandler.addFilter(LoggerNameFilter(logFilter))

    logFileHandler = logging.handlers.TimedRotatingFileHandler(logDir /
                                                               "mitm.log",
                                                               when="D")
    logFileHandler.setFormatter(formatter)

    jsonFileHandler = logging.FileHandler(logDir / "mitm.json")
    jsonFileHandler.setFormatter(JSONFormatter({"sensor": sensorID}))
    jsonFileHandler.setLevel(logging.INFO)

    rootLogger = logging.getLogger(LOGGER_NAMES.PYRDP)
    rootLogger.addHandler(streamHandler)
    rootLogger.addHandler(logFileHandler)
    rootLogger.setLevel(logging.DEBUG)

    connectionsLogger = logging.getLogger(LOGGER_NAMES.MITM_CONNECTIONS)
    connectionsLogger.addHandler(jsonFileHandler)

    crawlerFormatter = VariableFormatter(
        "[{asctime}] - {sessionID} - {message}",
        style="{",
        defaultVariables={"sessionID": "GLOBAL"})

    crawlerFileHandler = logging.FileHandler(logDir / "crawl.log")
    crawlerFileHandler.setFormatter(crawlerFormatter)

    jsonCrawlerFileHandler = logging.FileHandler(logDir / "crawl.json")
    jsonCrawlerFileHandler.setFormatter(JSONFormatter({"sensor": sensorID}))

    crawlerLogger = logging.getLogger(LOGGER_NAMES.CRAWLER)
    crawlerLogger.addHandler(crawlerFileHandler)
    crawlerLogger.addHandler(jsonCrawlerFileHandler)
    crawlerLogger.setLevel(logging.INFO)

    log.prepareSSLLogger(logDir / "ssl.log")
Exemplo n.º 2
0
def prepare_loggers(logLevel, sensorID):
    """
        Sets up the "mitm" and the "mitm.connections" loggers.
    """
    log.prepare_pyrdp_logger(logLevel)
    log.prepare_ssl_session_logger()

    if not os.path.exists("log"):
        os.makedirs("log")

    mitm_logger = getLoggerPassFilters(LOGGER_NAMES.MITM)
    mitm_logger.setLevel(logLevel)

    mitm_connections_logger = getLoggerPassFilters(
        LOGGER_NAMES.MITM_CONNECTIONS)
    mitm_connections_logger.setLevel(logLevel)

    formatter = log.get_formatter()

    stream_handler = logging.StreamHandler()
    file_handler = logging.handlers.TimedRotatingFileHandler("log/mitm.log",
                                                             when="D")
    stream_handler.setFormatter(formatter)
    file_handler.setFormatter(formatter)
    mitm_logger.addHandler(stream_handler)
    mitm_logger.addHandler(file_handler)

    # Make sure that the library writes to the file as well
    pyrdp_logger = log.get_logger()
    pyrdp_logger.addHandler(file_handler)

    exceptions_logger = getLoggerPassFilters(LOGGER_NAMES.PYRDP_EXCEPTIONS)
    exceptions_logger.propagate = False
    exceptions_logger.addHandler(file_handler)

    jsonFormatter = JSONFormatter()
    jsonFileHandler = logging.FileHandler("log/mitm.json")
    sensorFilter = SensorFilter(sensorID)

    jsonFileHandler.setFormatter(jsonFormatter)
    jsonFileHandler.setLevel(logging.INFO)
    jsonFileHandler.addFilter(sensorFilter)

    getLoggerPassFilters(
        LOGGER_NAMES.MITM_CONNECTIONS).addHandler(jsonFileHandler)