def provide_logger_UpdateWindData(self) -> Logger: """innit logger""" logger = get_logger( logger_name="UpdateWindData", logger_version='0.0.1', logger_format=LoggingFormat.JSON, logger_level=logging.INFO, logger_output=LoggingOutput.STDOUT, ) return logger
from datetime import datetime, timedelta from django.http import HttpResponse from core.logging import get_logger from notify.models import EventSeen from notify.main import run_all log = get_logger(__name__) def run_triggers(request): run_all() return HttpResponse("ok") def gc(request): """ General garbage collection tasks """ # event_seen is time sensitive and old entries are irrelevant try: EventSeen.objects.filter(last_seen__gt=datetime.utcnow() - timedelta(hours=2)).delete() except Exception: log.exception( "Exception occurred trying to do event garbage collection") return HttpResponse("ok")
from types import FunctionType from typing import Coroutine, Dict, Awaitable, List, Union from discord import Message from core import utils, types from core.logging import get_logger from core.abc.server import AbstractServer logger = get_logger() class CommandSystem: __Commands: Dict[str, types.Coroutine] = {} def Command(self, func: Coroutine[Awaitable[int], AbstractServer, Message], cname: str = None): """ The @decorator for commands :param func: coroutine to mark as command :param cname: usually None, used to set the command name """ func: types.Coroutine name = func.__code__.co_name.lower() self.__Commands[name if cname is None else cname] = func return func def getOrDefault(
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """Benchmarking functions.""" import core.logging as logging import datasets.loader as loader import torch from core.config import cfg from core.timer import Timer logger = logging.get_logger(__name__) @torch.no_grad() def compute_time_eval(model): """Computes precise model forward test time using dummy data.""" # Use eval mode model.eval() # Generate a dummy mini-batch and copy data to GPU im_size, batch_size = cfg.TRAIN.IM_SIZE, int(cfg.TEST.BATCH_SIZE / cfg.NUM_GPUS) inputs = torch.zeros(batch_size, 3, im_size, im_size).cuda(non_blocking=False) # Compute precise forward pass time timer = Timer() total_iter = cfg.PREC_TIME.NUM_ITER + cfg.PREC_TIME.WARMUP_ITER for cur_iter in range(total_iter): # Reset the timers after the warmup phase
from types import FunctionType from typing import Dict, Union from core.abc.eventSystem import AbstractEventSystem from core.logging import get_logger from core.types import ListenerList, Event, Coroutine logger = get_logger('EventSystem') class Events: ReactionAdded = 'ReactionAdded'.lower() ReactionRemoved = 'ReactionRemoved'.lower() MessageArrived = 'message' Reload = 'reload' class EventSystem(AbstractEventSystem): INSTANCE: 'EventSystem' _listeners: Dict[ Event, ListenerList ] def __init__(self): EventSystem.INSTANCE = self self._listeners = {} def removeListeners( self, module: str ): # cycle in all event lists for listenerList in self._listeners.values(): toRemove = [] for listener in listenerList:
#!/usr/bin/python # -*- coding: UTF-8 -*- # 业务包:通用函数 import core.mysql as mysql import core.logging as log import core.request as request import core.config as conf import constants as cs import os logging = log.get_logger() def prepare_data(host, user, password, db, sql): """ 数据准备,添加测试数据 :param host: 服务地址 :param user: 用户 :param password: 密码 :param db: 数据库名 :param sql: 执行的SQL :return: """ mysql.connect(host, user, password, db) res = mysql.execute(sql) mysql.close() logging.info("Run sql: the row number affected is %s" % res) return res
logger = get_logger( logger_name="UpdateWindData", logger_version='0.0.1', logger_format=LoggingFormat.JSON, logger_level=logging.INFO, logger_output=LoggingOutput.STDOUT, ) return logger if __name__ == "__main__": configer = ConfigService() con = CoreModule() print(con.provide_postgres_client(configer)) log = get_logger(logger_name=__name__, logger_version='0.0.0.1', logger_format=LoggingFormat.JSON, logger_level=logging.INFO, logger_output=LoggingOutput.STDOUT) log.info("logging: info", extra={'test': __name__}) log.debug("logging: debug", extra={'test': __name__}) log.note("logging: note", extra={'test': __name__}) log.warning("logging: warning", extra={'test': __name__}) log.error("logging: error", extra={'test': __name__}) log.critical("logging: critical", extra={'test': __name__}) log.exception("logging: exception", extra={'test': __name__})
import sqlite3 as sql from pathlib import Path from typing import Dict, List, Any from core.abc.database.backend import AbstractBackend from core.logging import get_logger GuildId = str Games = List[Dict] Members = List[Dict] logger = get_logger('database') class SqlBackend(AbstractBackend): dinstance: sql.Connection cursor: sql.Cursor dbpath: Path def __init__(self, path: str = None): super(SqlBackend, self).__init__(path) self.dbpath = Path(path) self.dinstance = sql.connect(path) self.cursor = self.dinstance.cursor() # conditionally creates the tables self.cursor.execute(''' CREATE TABLE IF NOT EXISTS games ( guildID INT NOT NULL, channelID INT NOT NULL, gameID TEXT NOT NULL, gameType TEXT NOT NULL,