Exemplo n.º 1
0
def init_logger(sd_loglevel=logging.WARN, stream_loglevel=logging.CRITICAL):
    logging.setLogRecordFactory(LogRecordWithHexThereadID)
    logger = logging.getLogger('deepfx')
    logger.setLevel(sd_loglevel)
    formatter = logging.Formatter('[%(hex_threadid)s] %(message)s')

    if sd_loglevel:
        import google
        from google.cloud.logging import Client
        from google.cloud.logging.handlers import CloudLoggingHandler
        client = google.cloud.logging.Client.from_service_account_json(
            os.environ.get('GOOGLE_SERVICE_ACCOUNT_JSON_PATH'))
        handler = CloudLoggingHandler(client, name='deepfx')
        handler.setLevel(sd_loglevel)
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        handler = None

    if stream_loglevel:
        handler = StreamHandler()
        handler.setLevel(stream_loglevel)
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        handler = None

    return logger
Exemplo n.º 2
0
def makeLogger(name):
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)

    client = google.cloud.logging.Client()
    gcpHandler = CloudLoggingHandler(client, name=APP_NAME)
    gcpHandler.setLevel(logging.INFO)
    logger.addHandler(gcpHandler)

    stdoutHandler = logging.StreamHandler(sys.stdout)
    stdoutHandler.setLevel(logging.DEBUG)
    logger.addHandler(stdoutHandler)

    return logger
Exemplo n.º 3
0
    def __init__(self):
        self._refresh_balance()

        gcloud_logging = True
        try:
            import google.cloud.logging
            from google.cloud.logging.handlers import CloudLoggingHandler
        except ImportError:
            gcloud_logging = False

        handlers = []

        if gcloud_logging:
            client = google.cloud.logging.Client()
            ghandler = CloudLoggingHandler(client)
            ghandler.setLevel(logging.INFO)
            handlers.append(ghandler)

        fmt = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
        shandler = logging.StreamHandler()
        shandler.setFormatter(fmt)
        shandler.setLevel(logging.DEBUG)
        handlers.append(shandler)

        fhandler = logging.handlers.TimedRotatingFileHandler("bot.log", when="d", interval=1, backupCount=14)
        fhandler.setFormatter(fmt)
        fhandler.setLevel(logging.DEBUG)
        handlers.append(fhandler)

        logging.basicConfig(
            level=logging.DEBUG,
            format="%(levelname)s:%(message)s",
            handlers=handlers
        )

        if not os.path.isfile("history.json"):
            self._save_history()

        with open('history.json', 'r') as fp:
            self.buy_history = json.load(fp)

        self.buy_history = {key: val for key, val in self.buy_history.items() if key in self.coin_pairs}

        logging.getLogger("requests").setLevel(logging.WARNING)
        logging.getLogger("urllib3").setLevel(logging.WARNING)

        logging.info("Bot initialized")
Exemplo n.º 4
0
def config_logger(
    name: str,
    level: int = logging.INFO,
    write_to_file: bool = True,
    use_stackdriver: bool = False,
    stackdriver_level: int = logging.INFO,
    stackdriver_name: Optional[str] = None,
    tracemalloc: bool = False,
    upload_func: Optional[Callable[[str, str], None]] = None,
    upload_frequency: Optional[float] = None,
    custom_loggers_config: Optional[Dict[str, Dict]] = None,
    format: str = LOG_FORMAT,
    logdir: str = "logs",
) -> None:

    logger = logging.getLogger()

    if name.endswith(".py"):
        name = name.rsplit(".")[0]

    handlers: Dict[str, LogConfig] = {
        "default": {
            "level": logging.getLevelName(level),
            "formatter": "standard",
            "class": "logging.StreamHandler",
        }
    }
    if write_to_file:
        os.makedirs(logdir, exist_ok=True)
        handlers.update({
            "file": {
                "level": "INFO",
                "formatter": "standard",
                "class": "logging.handlers.RotatingFileHandler",
                "filename": f"{logdir}/{name}.log",
                "maxBytes": 1024 * 1024 * 100,
                "backupCount": 3,
                "delay": True,
            },
            "file_debug": {
                "level": "DEBUG",
                "formatter": "standard",
                "class": "logging.handlers.RotatingFileHandler",
                "filename": f"{logdir}/{name}.debug.log",
                "maxBytes": 1024 * 1024 * 100,
                "backupCount": 3,
                "delay": True,
            },
            "web_access": {
                "level": "DEBUG",
                "formatter": "",
                "class": "logging.handlers.RotatingFileHandler",
                "filename": f"{logdir}/access.log",
                "maxBytes": 1024,
                "backupCount": 0,
                "delay": True,
            },
        })
    else:
        handlers.update({
            "file": {
                "class": "logging.NullHandler",
            },
            "file_debug": {
                "class": "logging.NullHandler",
            },
            "web_access": {
                "class": "logging.NullHandler",
            },
        })

    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "standard": {
                "format": format
            },
        },
        "handlers": handlers,
        "loggers": {
            "": {
                "handlers": ["default", "file", "file_debug"],
                "level": "DEBUG",
                "propagate": True,
            },
            "cherrypy.access": {
                "handlers": ["web_access"],
                "level": "WARN",
                "propagate": False,
            },
            "sanic.access": {
                "handlers": ["web_access"],
                "level": "WARN",
                "propagate": False,
            },
            "libav.AVBSFContext": {
                "handlers": ["default", "file", "file_debug"],
                "level": "CRITICAL",
                "propagate": False,
            },
            "libav.swscaler": {
                "handlers": ["default", "file", "file_debug"],
                "level": "CRITICAL",
                "propagate": False,
            },
            "datadog.api": {
                "handlers": [],
                "level": "ERROR",
                "propagate": False
            },
            **(custom_loggers_config or {}),
        },
    })

    if use_stackdriver:
        import google.cloud.logging
        from google.cloud.logging.handlers import CloudLoggingHandler
        from google.cloud.logging.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS

        # noinspection PyUnresolvedReferences
        client = google.cloud.logging.Client()
        # client.setup_logging()

        handler = CloudLoggingHandler(client, name=stackdriver_name or name)
        handler.setLevel(stackdriver_level)
        logger.addHandler(handler)
        for logger_name in EXCLUDED_LOGGER_DEFAULTS + (
                "urllib3.connectionpool", ):
            exclude = logging.getLogger(logger_name)
            exclude.propagate = False
            # exclude.addHandler(logging.StreamHandler())

    if tracemalloc:
        import tracemalloc

        tracemalloc.start()

        tracemalloc_logger = logging.getLogger("tracemalloc")

        def tracemalloc_loop():
            while True:
                time.sleep(5 * 60)
                snapshot = tracemalloc.take_snapshot()
                top_stats = snapshot.statistics("lineno")
                tracemalloc_logger.info(f"tracemalloc:")
                for stat in top_stats[:10]:
                    tracemalloc_logger.info(f"  {stat}")

        Thread(target=tracemalloc_loop, name="tracemalloc",
               daemon=True).start()

    # if use_stackdriver_error:
    #     from google.cloud import error_reporting
    #     client = error_reporting.Client()

    # if use_datadog:
    #     import datadog
    #     from datadog_logger import DatadogLogHandler
    #     datadog.initialize(api_key=os.environ['DATADOG_API_KEY'], app_key=os.environ['DATADOG_APP_KEY'])
    #     datadog_handler = DatadogLogHandler(
    #         tags=[
    #             f'host:{socket.gethostname()}',
    #             f'pid:{os.getpid()}',
    #             f'stack:{name}',
    #             'type:log'],
    #         mentions=[],
    #         level=logging.INFO
    #     )
    #     logger.addHandler(datadog_handler)

    for _ in range(3):
        logger.info("")
    logger.info(
        f'Command: "{" ".join(sys.argv)}", pid={os.getpid()}, name={name}')
    if use_stackdriver:
        logger.info(
            f"Connected to google cloud logging. Using name={name!r}. Logging class: {logging.getLoggerClass()}"
        )

    upload_logs_settings["write_to_file"] = write_to_file
    if write_to_file and upload_func and upload_frequency:
        upload_logs_settings["upload_func"] = upload_func
        file: str = handlers["file"]["filename"]
        file_debug: str = handlers["file_debug"]["filename"]
        # noinspection PyTypeChecker
        upload_logs_settings["args"] = file, file_debug

        def upload_loop() -> None:
            while True:
                assert upload_frequency
                assert upload_func
                time.sleep(upload_frequency)
                upload_func(handlers["file"]["filename"],
                            handlers["file_debug"]["filename"])

        logger.info(f"Uploading log files every {upload_frequency}s")
        Thread(target=upload_loop, daemon=True).start()

    logging.getLogger("tensorflow").setLevel(logging.ERROR)
    os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
Exemplo n.º 5
0
def logger_config():
    global is_init
    if is_init:
        logging.warning('logger is already initialized')
        return

    root_module = __name__.split('.')[0]
    formatters = {
        'f': {
            'format': BASE_FORMAT
        },
        'json': {
            'format': '%(asctime)s %(name)s %(levelname)s %(message)s',
            "class": f'{root_module}.formatter.CustomJsonFormatter'
        }
    }

    handlers = {
        "console": {
            "class": "logging.StreamHandler",
            "level": "DEBUG",
            "formatter": "json",
            "stream": "ext://sys.stdout"
        }
    }

    loggers = {
        "boto3": {
            "level": "WARNING",
        },
        "botocore": {
            "level": "WARNING",
        },
        "engineio": {
            "level": "WARNING",
        },
        "urllib3": {
            "level": "WARNING",
            "propagate": "no"
        },
        "tweepy": {
            "level": "WARNING",
            "propagate": "no"
        },
        "prawcore": {
            "level": "WARNING",
            "propagate": "no"
        },
        "requests": {
            "level": "WARNING",
            "propagate": "no"
        },
        "socketio": {
            "level": "INFO" if env == 'development' else "WARNING"
        },
        "sqlalchemy": {
            "level": "WARNING",
        }
    }

    root_handlers = ["console"]

    logging_config = dict(version=1,
                          disable_existing_loggers=False,
                          formatters=formatters,
                          handlers=handlers,
                          loggers=loggers,
                          root={
                              "level": "INFO",
                              "handlers": root_handlers
                          })
    is_init = True
    dictConfig(logging_config)

    # Setup google client
    if is_google_cloud:
        try:
            client = google.cloud.logging.Client()
            handler = CloudLoggingHandler(client)
            handler.setLevel(logging.INFO)
            root_logger = logging.getLogger('root')
            root_logger.addHandler(handler)
            logging.info('Google cloud logger was installed successfully')
        except Exception as e:
            print('Failed to add Google Cloud logger')