def __init__(self, logger=None): self.channels = {} self.publisher = Publisher() self.threads = [] self._error_thread = None self._running = False # It is considered running only after its setup. self.logger = logger or logging.getLogger(__name__) logaugment.set( self.logger, code="", site="Aggregator", server="", event="", keywords="null", ) logging_extra = { "code": "Initialize", "site": "Aggregator.__init__", "keywords": ["aggregator", "init", "data structure", "controller"], } self.logger.info( "Aggregator created.", extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), )
def new_logger(level=default_level, torrent_name=None, user_id=None): logger = logging.Logger('') logger.setLevel(level) handler = logging.StreamHandler(sys.stdout) torrent_name_format = '|[%(torrent)s]' if torrent_name is not None else "" user_id_format = '|<%(id)s>' if user_id is not None else "" formatter = logging.Formatter( "%(levelname)s|(%(asctime)s){}{}: %(message)s".format( torrent_name_format, user_id_format)) handler.setFormatter(formatter) logger.addHandler(handler) logaugment.set(logger, torrent=None if torrent_name is None else torrent_name[:40], id=str(user_id)) return logger
def __init__(self, logger=None): """Constructor of the Publisher class. Parameters ---------- logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ self.channels = None self._running = True self.logger = logger or logging.getLogger(__name__) logaugment.set(self.logger, code="", site="Publisher", server="", event="", keywords="null")
def __init__(self, logger=None): """Constructor of the ProbeListener. Parameters ---------- logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ self.logger = logger or logging.getLogger(__name__) logaugment.set( self.logger, code="", site="ProbeListener", server="", event="", keywords="null", )
def setup_logger(application): """ :return: """ log_level = logging.getLevelName(LOG_LEVEL) logger = logging.getLogger('CUSTOM') logger.setLevel(log_level) logaugment.set(logger, metadata="") host_name = socket.gethostname() formatter_syslog = MyFormatter( fmt=host_name + " %(asctime)s[%(levelname)s][" + application + "]: [MSG]%(message)s %(metadata)s", datefmt="%Y-%m-%d %H:%M:%S,%F%z") formatter_syslog.converter = time.gmtime formatter_stream = MyFormatter(fmt="%(asctime)s[%(levelname)s][" + application + "]: [MSG]%(message)s %(metadata)s", datefmt="%Y-%m-%d %H:%M:%S,%F%z") formatter_stream.converter = time.gmtime stream_handler = logging.StreamHandler() stream_handler.setFormatter(formatter_stream) logger.addHandler(stream_handler) RSYSLOG_HOST = os.getenv('RSYSLOG_HOST') RSYSLOG_PORT = os.getenv('RSYSLOG_PORT') if RSYSLOG_HOST is None and RSYSLOG_PORT is None: logger.warning( "RSYSLOG is DISABLED. Set RSYSLOG_HOST and RSYSLOG_PORT environment variables to ENABLE" ) else: syslog_handler = logging.handlers.SysLogHandler( address=(RSYSLOG_HOST, int(RSYSLOG_PORT))) syslog_handler.setFormatter(formatter_syslog) logger.addHandler(syslog_handler) return logger
def __init__(self, callback_url, servers=None, get_raw=False, hook_id=None, logger=None): """Constructor of the WebhookRegister. Parameters ---------- callback_url : str URL of the callback to be registered. servers : list List of servers to register. get_raw : bool True if it requests raw webhooks to be received. False otherwise. hook_id : int Specify a hook ID. logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ self._callback_url = callback_url self._get_raw = get_raw self._hook_id = hook_id self._success_servers = [] # List of servers registered successfully. self._failed_servers = [] # List of servers that failed to register. self.logger = logger or logging.getLogger(__name__) logaugment.set( self.logger, code="", site="WebhookRegister", server="", event="", keywords="null", ) if servers: # Use the servers passed as argument. self._servers = servers else: # Otherwise, fetch a server list to be used from database. self._fetch_servers_from_database()
def __init__(self, name, maxsize=0, logger=None): """Constructor of the Channel class. Parameters ---------- name : str An identifier of the channel. maxsize : int The maximum size of the channel. If it is zero or negative, the channel accepts any number of elements. logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ self.name = name self.queue = queue.Queue(maxsize=maxsize) self.logger = logger or logging.getLogger(__name__) logaugment.set(self.logger, code="", site="Channel", server="", event="", keywords="null")
def __init__(self, publisher, channel, logger=None): """Constructor of WebhookEventHandler. Parameters ---------- publisher : aggregator.Publisher channel : str Channel where event will be published. logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ self.publisher = publisher self.channel = channel self.logger = logger or logging.getLogger(__name__) logaugment.set( self.logger, code="", site="WebhookEventHandler", server="", event="", keywords="null", )
def __init__(self, subscriber, errorevent, logger=None, **kwargs): """Constructor of the `SubscriberThread`. Parameters ---------- subscriber : Subscriber A Subscriber with channel and callback objetcs. logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ threading.Thread.__init__(self, **kwargs) self.subscriber = subscriber self._errorevent = errorevent self._stopevent = threading.Event() self.logger = logger or logging.getLogger(__name__) logaugment.set( self.logger, code="", site="SubscriberThread", server="", event="", keywords="null", )
def __init__(self, server, secret, logger=None): """Constructor of `WebhookServer`. Parameters ---------- server : str Hostname of the server. secret : str Shared secret (token) of the server. logger : logging.Logger If not supplied, it will instantiate a new logger from __name__. """ self._server = server self._secret = secret self.logger = logger or logging.getLogger(__name__) logaugment.set( self.logger, code="", site="WebhookServer", server="", event="", keywords="null", )
def __init__(self, flagAction): super().__init__() if (not flagAction): return try: sucessoLogin = False self.sessionCookie = None self.browserSession = None self.dominio = cfgEspecifico.configParams['DOMAIN'] self.urlDominio = cfgEspecifico.configParams['SITE_DOMAIN'] self.limitadorTempo = cfgEspecifico.configParams['INTERVALO_TEMPO'] self.execucaoDiaria = cfgEspecifico.configParams['EXECUCAO_DIARIA'] adsProv = AdversaryProvider() self.listaAdversarys = adsProv.retornaTodos() nomeLog = f'{self.dominio}_DIARIO' if self.execucaoDiaria else f'{self.dominio}_PERIODO' logging.basicConfig( format= '%(asctime)s -- %(custom_key)s - %(levelname)s - %(message)s', level=logging.INFO, handlers=[ logging.FileHandler(f"log/{nomeLog}.log", mode="w"), logging.StreamHandler() ]) logging.getLogger('schedule').propagate = False self.logger = logging.getLogger() logaugment.set(self.logger, custom_key='N/A') # dbConKey = 'PROD' if 'localhost' not in cfgPadrao.database['mysql_conn'] else 'DESE' # self.logger.info(f'@@@ BANCO DE DADOS: {dbConKey}') try: self.logger.info('') self.logger.info('#############################') listaPeriodos = self.carregaPeriodos() self.logger.info( f'Total de períodos retornados: {len(listaPeriodos)}. Iniciando threads...' ) for item in listaPeriodos: self.logger.info(item) self.logger.info('#############################') self.logger.info('') self.startChrome() sucessoLogin = self.efetuaLogin() if (sucessoLogin): self.logger.info( 'Login efetuado com sucesso. Iniciando pesquisa...') time.sleep(3) self.carregaSessao() self.logger.info(f'Sessao pronta.') self.fecharModalMensagem() def job(): self.logger.info(f'Filtrando resultados...') listaPeriodos = self.carregaPeriodos() if (len(listaPeriodos) > 0): pool = ThreadPool(cfgPadrao.threads['MAX_COUNT']) pool.map(self.processaDetalhes, listaPeriodos) pool.close() pool.join() self.logger.info('THREAD FINALIZADA.') #LOOP 3MIN TIMER = cfgPadrao.configParams['TIMER'] if (TIMER == 0): job() else: schedule.every(TIMER).seconds.do(job) self.logger.info('Aguardando inicio do job...') while True: schedule.run_pending() else: self.logger.info('Não foi possivel efetuar o login.') except Exception as fe: self.logger.info('Erro na consulta:' + str(fe)) finally: if (sucessoLogin): time.sleep(3) self.logger.info('Iniciando processo de logout...') saiu = self.efetuaLogout() if (saiu): self.logger.info('Logout efetuado.') else: self.logger.info('Logout com problemas.') time.sleep(2) self.logger.info('') self.logger.info('=================== ') self.logger.info('') except Exception as ex: print('ERRO --> ' + str(ex))
def processaDetalhes(self, tuplaPeriodoData): try: self.logger.info('') self.logger.info('===========================================') self.logger.info(f'Processando período: {tuplaPeriodoData}.') self.logger.info('===========================================') idEsporte = cfgEspecifico.configParams['DATA_SPORT_ID_MYSQL'] dataInicio = tuplaPeriodoData[0] dataFim = tuplaPeriodoData[1] #COMPETICOES listaCompeticoes = self.retornaCompeticoes(idEsporte, dataInicio, dataFim) limitadorNumComp = cfgEspecifico.configParams[ 'LIMITE_COLETA_COMPETICAO'] if (limitadorNumComp is not None): listaCompeticoes = listaCompeticoes[:limitadorNumComp] self.logger.info( f'Lista de competições encontradas: {len(listaCompeticoes)}.') for competition in listaCompeticoes: idCompetition = competition.idCompetition logaugment.set(self.logger, custom_key=idCompetition) urlFixture = self.montarUrlFixture(idCompetition, dataInicio, dataFim) #FIXTURES listaFixtures = self.retornaFixtures(urlFixture) limitadorNumFix = cfgEspecifico.configParams[ 'LIMITE_COLETA_FIXTURE'] if (limitadorNumFix is not None): listaFixtures = listaFixtures[:limitadorNumFix] self.logger.info( f'Lista de fixtures encontradas: {len(listaFixtures)}.') fixtureProv = FixtureProvider() matchdataProv = MatchDataProvider() matchdatarawProv = MatchDataRawProvider() #BUSCA AS FICTURES NO BANCO PARA ESTA COMPETICAO/PERIODO listaMatchFixturesTemp = matchdataProv.retornaListaFixturesPorCompeticaoPeriodo( idCompetition, dataInicio, dataFim) listaMatchFixturesExistentes = [ f.idFixture for f in listaMatchFixturesTemp if f.idFixture is not None ] self.logger.info( f'Qtd. Fixture/MatchData localizadas no banco: {len(listaMatchFixturesExistentes)}' ) for fixture in listaFixtures: if (fixture.time < datetime.now() - dtime.timedelta( minutes=cfgEspecifico. configParams['DELTA_LIMITE_COLETA_DIARIA']) and self.execucaoDiaria): self.logger.info( f'A Fixture será desconsiderada pois é antiga. Time:{fixture.time}' ) continue if (fixture.idFixture in listaMatchFixturesExistentes): self.logger.info( f'A Fixture/MatchData {fixture.idFixture} já existe no banco.' ) continue idFixture = fixture.idFixture idChallenge = fixture.idChallenge self.logger.info( '-------------------------------------------') self.logger.info( f'FIXTURE -->> IdFixture:{idFixture} -- Descricao:{fixture.description} -- Data: {fixture.dateDescription}' ) fixtureProv.atualizar(fixture) self.logger.info( f'Fixture gravada. Id Gerado: {idFixture}') urlPartida = self.montarUrlResultadoPartida( idCompetition, dataInicio, dataFim, idFixture, idChallenge) matchData, matchrawData, dicNomesTimes = self.retornaDadosPartida( urlPartida, idCompetition, idFixture, idChallenge) if (matchData is not None and matchrawData is not None): self.logger.info( f'PARTIDA -->> Vencedor:{matchData.idWinner} -- Resultado: {matchData.matchResult} -- Gols:{matchData.sumScore}' ) matchResult = matchData.matchResult halfTimeResult = matchData.halfTimeResult idAdversary1 = matchData.idAdversary1 idAdversary2 = matchData.idAdversary2 idAdversaryScoreFirst = matchData.idAdversaryScoreFirst matchMarketsRaw = matchrawData.matchMarkets matchdataProv.atualizar(matchData) matchdatarawProv.atualizar(matchrawData) _thread.start_new_thread( self.calculaMaximas, (matchMarketsRaw, idCompetition, matchResult, halfTimeResult, idAdversary1, idAdversary2, idAdversaryScoreFirst)) self.logger.info( '-------------------------------------------') self.sleep() self.sleep( cfgEspecifico.configParams['INTERVALO_TEMPO_COMPETICOES']) return True except Exception as me: self.logger.error(f'Falha ao processar os detalhes: {me}.')
def map_webhook_event(event): """Map from a webhook event received to the corresponding data structure. This function calls the corresponding function based on the type of event received. It can be thought as an event dispatcher. Parameters ---------- event : dict Dict with fields and values of the event as received by the webhook. Returns ------- mapped_event : event_mapper.WebhookEvent It encapsulates both the event type and the event itself. """ logger = logging.getLogger(__name__) logaugment.set(logger, code="", site="map_webhook_event", server="", event="", keywords="null") logging_extra = { "code": "Webhook mapping", "keywords": ["webhook", "map", "event", "data structure", "data"], } try: event_type = event["data"]["id"] server_url = event["server_url"] except (KeyError, TypeError) as err: logging_extra["code"] = "Invalid message id" logging_extra["keywords"] += (["warning"] if ( "warning" not in logging_extra["keywords"]) else []) logger.warn( "Webhook message dos not contain a valid id: {}".format(err), extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) raise InvalidWebhookMessageError( "Webhook message dos not contain a valid id") logging_extra["server"] = server_url logging_extra["event"] = event_type logger.debug( "Mapping event", extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) if event_type == "meeting-created": mapped_event = _map_create_event(event, event_type, server_url) elif event_type == "meeting-ended": mapped_event = _map_end_event(event, event_type, server_url) elif event_type == "user-joined": mapped_event = _map_user_joined_event(event, event_type, server_url) elif event_type == "user-left": mapped_event = _map_user_left_event(event, event_type, server_url) elif event_type == "user-audio-voice-enabled": mapped_event = _map_user_voice_enabled_event(event, event_type, server_url) elif event_type in [ "user-audio-voice-enabled", "user-audio-voice-disabled", "user-audio-listen-only-enabled", "user-audio-listen-only-disabled", "user-cam-broadcast-start", "user-cam-broadcast-end", "user-presenter-assigned", "user-presenter-unassigned", ]: mapped_event = _map_user_event(event, event_type, server_url) elif event_type in [ "rap-publish-started", "rap-post-publish-started", "rap-post-publish-ended", ]: mapped_event = _map_rap_publish_event(event, event_type, server_url) elif event_type == "rap-publish-ended": mapped_event = _map_rap_publish_ended_event(event, event_type, server_url) elif event_type in [ "rap-process-started", "rap-process-ended", "rap-post-process-started", "rap-post-process-ended", ]: mapped_event = _map_rap_process_event(event, event_type, server_url) elif event_type in [ "rap-sanity-started", "rap-sanity-ended", "rap-post-archive-started", "rap-post-archive-ended", "rap-archive-started", ]: mapped_event = _map_rap_event(event, event_type, server_url) elif event_type in ["rap-archive-ended"]: mapped_event = _map_rap_archive_event(event, event_type, server_url) elif event_type in ["rap-unpublished", "rap-published"]: mapped_event = _map_rap_published_unpublished_event( event, event_type, server_url) elif event_type == "rap-deleted": mapped_event = _map_rap_deleted_event(event, event_type, server_url) elif event_type in [ "meeting-transfer-enabled", "meeting-transfer-disabled" ]: mapped_event = _map_transfer_event(event, event_type, server_url) else: logging_extra["code"] = "Invalid webhook event id" logging_extra["keywords"] += ["warning"] logger.warn( "Webhook event id is not valid: '{}'".format(event_type), extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) raise InvalidWebhookEventError( "Webhook event '{}' is not valid".format(event_type)) return mapped_event
def test_set_combines_add_and_remove(self): logaugment.add(self.logger, custom_key='custom-value-1') logaugment.set(self.logger, custom_key='custom-value-2') self.logger.info('message') self.assertEqual(self.stream.getvalue(), "This is the message: custom-value-2\n")
def test_latest_set_value_takes_priority(self): logaugment.set(self.logger, custom_key='custom-value-2') logaugment.add(self.logger, custom_key='custom-value-1') self.logger.info('message') self.assertEqual(self.stream.getvalue(), "This is the message: custom-value-1\n")
def process_request(self, req, resp): """Process the request before routing it. If the request is in any way invalid, raise an error. Otherwise, it returns normally. Parameters ---------- req : falcon.Request Request object that will eventually be routed to an on_* responder method. resp : falcon.Response Response object that will be routed to the on_* responder. Raises ------ falcon.HTTPUnauthorized If request authentication fails. References ---------- * https://tools.ietf.org/html/rfc7235 * http://self-issued.info/docs/draft-ietf-oauth-v2-bearer.html * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/WWW-Authenticate """ self.logger = logging.getLogger(__name__) logaugment.set( self.logger, code="", site="AuthMiddleware", server="", event="", keywords="null", ) logging_extra = { "code": "Processing requests", "site": "AuthMiddleware.process_request", "keywords": ["https", "falcon", "requests", "domain"], } self.logger.info( f"Received request: '{req.params}'", extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) auth_required = cfg.config["MCONF_WEBHOOK_AUTH_REQUIRED"] if auth_required: server_url = req.get_param("domain") if not server_url: logging_extra["code"] = "Missing domain" logging_extra["keywords"] += ["warning"] self.logger.warn( "Domain missing from (last hop) '{}'.".format(req.host), extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) raise falcon.HTTPUnauthorized( title="Domain required for authentication", description="Provide a valid domain as part of the request", ) server_url = _normalize_server_url(server_url) token = req.get_header("Authorization") www_authentication = {"Bearer realm": '"mconf-aggregator"'} logging_extra["server"] = server_url if token is None: logging_extra["code"] = "Missing token" logging_extra["keywords"] += (["warning"] if ( "warning" not in logging_extra["keywords"]) else []) self.logger.warn( "Authentication token missing from '{}'.".format( server_url), extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) raise falcon.HTTPUnauthorized( title="Authentication required", description="Provide an authentication token as part of the " "request", headers=www_authentication, ) if not self._token_is_valid(server_url, token): requester = req.host logging_extra["code"] = "Validate token" logging_extra["keywords"] += (["warning"] if ( "warning" not in logging_extra["keywords"]) else []) self.logger.warn( "Unable to validate token '{}' from '{}' (last hop: '{}')." .format(token, server_url, requester), extra=dict(logging_extra, keywords=json.dumps(logging_extra["keywords"])), ) raise falcon.HTTPUnauthorized( title="Unable to validate authentication token", description="The provided authentication token could not be " "validate", headers=www_authentication, )
def test_at_least_one_custom_key_needed(self): with self.assertRaises(ValueError): logaugment.add(self.logger) with self.assertRaises(ValueError): logaugment.set(self.logger)
def setUp(self): logger = logging.getLogger("test_channel") logaugment.set( logger, code="", site="TestChannel", server="", event="", keywords="null" ) self.channel = Channel("test_channel", maxsize=5, logger=logger)
def set_metadata(logger, metadata_json): metadata = "[METADATA]{}".format(metadata_json) logaugment.set(logger, metadata=metadata)