def _submit_trace(exc_info): kwargs: Dict[str, str] = dict() if "+git" not in snapcraft_legacy.__version__: kwargs["release"] = snapcraft_legacy.__version__ client = RavenClient( "https://*****:*****@sentry.io/277754", transport=RequestsHTTPTransport, # Should Raven automatically log frame stacks (including locals) # for all calls as it would for exceptions. auto_log_stacks=False, # Set a name to not send the real hostname. name="snapcraft", # Removes all stacktrace context variables. This will cripple the # functionality of Sentry, as you’ll only get raw tracebacks, # but it will ensure no local scoped information is available to the # server. processors=( "raven.processors.RemoveStackLocalsProcessor", "raven.processors.SanitizePasswordsProcessor", ), **kwargs ) client.captureException(exc_info=exc_info)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') installed_apps = [ app_config.name for app_config in apps.get_app_configs() ] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration # Since raven is required in production only, # imports might (most surely will) be wiped out # during PyCharm code clean up started # in other environments. # @formatter:off from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal # @formatter:on raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client) if hasattr(settings, 'OPBEAT'): from opbeat.contrib.django.models import client as opbeat_client from opbeat.contrib.django.models import logger as opbeat_logger from opbeat.contrib.django.models import register_handlers as opbeat_register_handlers from opbeat.contrib.celery import register_signal as opbeat_register_signal try: opbeat_register_signal(opbeat_client) except Exception as e: opbeat_logger.exception('Failed installing celery hook: %s' % e) if 'opbeat.contrib.django' in settings.INSTALLED_APPS: opbeat_register_handlers()
def on_configure(self): client = RavenClient(SETTINGS.get('sentry_dsn')) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def ravenClient(dsn=None): """ :return: RavenClient """ if not hasattr(ravenClient, "client_object"): if dsn is not None: ravenClient.client_object = RavenClient(dsn) else: return DummyRavenObject() return ravenClient.client_object
def setup(self): sentry_config = self.container.config.get("SENTRY", {}) dsn = sentry_config.get("DSN", None) if dsn: client = RavenClient( dsn, environment=os.environ.get("SENTRY_ENVIRONMENT", "local") ) handler = SentryHandler(client) handler.setLevel(logging.ERROR) setup_logging(handler)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') app.autodiscover_tasks() raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def ready(self): app.config_from_object('django.conf:settings', namespace='CELERY') installed_apps = [app_config.name for app_config in apps.get_app_configs()] app.autodiscover_tasks(lambda: installed_apps, force=True) app.autodiscover_tasks(lambda: installed_apps, related_name='periodic_tasks', force=True) if hasattr(settings, 'RAVEN_CONFIG'): from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def configure_raven(config, transport='sync', _client=None): # pragma: no cover if _client is not None: return set_raven_client(_client) if config and '+' not in config.split(':')[0]: # no explicit transport was specified in the dsn config = '%s+%s' % (transport, config) client = RavenClient(dsn=config) return set_raven_client(client)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings', namespace='CELERY') installed_apps = [ app_config.name for app_config in apps.get_app_configs() ] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration # Since raven is required in production only, # imports might (most surely will) be wiped out # during PyCharm code clean up started # in other environments. # @formatter:off from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal # @formatter:on raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client) if hasattr(settings, 'OPBEAT'): # Since opbeat is required in production only, # imports might (most surely will) be wiped out # during PyCharm code clean up started # in other environments. # @formatter:off from opbeat.contrib.django.models import client as opbeat_client from opbeat.contrib.django.models import logger as opbeat_logger from opbeat.contrib.django.models import register_handlers as opbeat_register_handlers from opbeat.contrib.celery import register_signal as opbeat_register_signal # @formatter:on try: opbeat_register_signal(opbeat_client) except Exception as e: opbeat_logger.exception('Failed installing celery hook: %s' % e) if 'opbeat.contrib.django' in settings.INSTALLED_APPS: opbeat_register_handlers() app.conf.beat_schedule = { 'check-every-1-hour': { 'task': 'approve_proposed_talks', 'schedule': 60 * 60 }, }
def ready(self): installed_apps = [app_config.name for app_config in apps.get_app_configs()] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN']) raven_register_signal(raven_client)
def update_categories_list(): from models import settings as plugin_settings from oauth2client.client import Error as OAuthError c = None try: from raven import Client as RavenClient from django.conf import settings c = RavenClient(settings.RAVEN_CONFIG['dsn']) except KeyError as e: logger.error("Raven is not set up properly: {0}".format(str(e))) except ImportError as e: logger.error("Raven is not installed. Unable to alert Sentry") try: #if any settings are not set, then this should raise an exception that can be caught by Raven logger.info("Attempting to update YouTube categories list") clientID = plugin_settings.objects.get(key='clientID').value privateKey = plugin_settings.objects.get(key='privateKey').value fieldName = plugin_settings.objects.get(key='fieldID').value try: regionCode = plugin_settings.objects.get(key='regionCode').value except plugin_settings.DoesNotExist: regionCode = 'gb' logger.info("Connecting to YouTube with clientID {0}".format(clientID)) i = YoutubeInterface() i.authorize_pki(clientID, privateKey) logger.info( "Requesting category list for region {0}".format(regionCode)) data = i.list_categories(region_code=regionCode) update_vidispine_field_values(fieldName, data['items']) except OAuthError as e: from traceback import format_exc if c is not None: c.captureException() logger.error(str(e)) logger.error(format_exc()) raise except StandardError as e: #ensure that any errors get reported back to Sentry from traceback import format_exc if c is not None: c.captureException() logger.error(str(e)) logger.error(format_exc()) raise
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings') installed_apps = [ app_config.name for app_config in apps.get_app_configs() ] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def _submit_trace(exception): client = RavenClient( 'https://*****:*****@sentry.io/277754', transport=RequestsHTTPTransport, # Should Raven automatically log frame stacks (including locals) # for all calls as it would for exceptions. auto_log_stacks=False, # Removes all stacktrace context variables. This will cripple the # functionality of Sentry, as you’ll only get raw tracebacks, # but it will ensure no local scoped information is available to the # server. processors=('raven.processors.RemoveStackLocalsProcessor', )) try: raise exception except Exception: client.captureException()
def configure_raven(config, transport=None, _client=None): # pragma: no cover """ Configure, globally set and return a :class:`raven.Client` instance. :param transport: The transport to use, one of the :data:`RAVEN_TRANSPORTS` keys. :param _client: Test-only hook to provide a pre-configured client. """ if _client is not None: return set_raven_client(_client) transport = RAVEN_TRANSPORTS.get(transport) if not transport: raise ValueError('No valid raven transport was configured.') client = RavenClient(dsn=config, transport=transport) return set_raven_client(client)
def get_raven_client(user_context=None, extra_context=None): if os.environ.get("ONE_CODEX_NO_TELEMETRY") is None: key = base64.b64decode( b"NmFlMjMwYWY4NjI5NDg3NmEyYzYwYjZjNDhhZDJiYzI6ZTMyZmYwZTVhNjUwNGQ5NGJhODc0NWZlMmU1ZjNmZjA=" ).decode("utf-8") # Set Client params # Capture exceptions on exit if onecodex CLI being invoked if os.path.basename(sys.argv[0]) in ["onecodex", "py.test"]: install_sys_hook = True else: install_sys_hook = False try: from raven import Client as RavenClient client = RavenClient( dsn=os.environ.get( "ONE_CODEX_SENTRY_DSN", "https://{}@sentry.onecodex.com/9".format(key)), install_sys_hook=install_sys_hook, raise_send_errors=False, ignore_exceptions=[], include_paths=[__name__.split(".", 1)[0]], release=__version__, ) if extra_context is None: extra_context = {} if user_context is None: user_context = {} try: _setup_sentry_for_ipython(client) extra_context["ipython"] = True except Exception: pass extra_context["platform"] = platform.platform() client.user_context(user_context) client.extra_context(extra_context) return client except Exception: return
def ready(self): installed_apps = [app_config.name for app_config in apps.get_app_configs()] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration # Since raven is required in production only, # imports might (most surely will) be wiped out # during PyCharm code clean up started # in other environments. # @formatter:off from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal # @formatter:on raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') installed_apps = [app_config.name for app_config in apps.get_app_configs()] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
def ready(self): # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) app.conf.update( result_backend='rpc://', result_expires=3600, ) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client) if hasattr(settings, 'OPBEAT'): from opbeat.contrib.django.models import client as opbeat_client from opbeat.contrib.django.models import logger as opbeat_logger from opbeat.contrib.django.models import register_handlers as opbeat_register_handlers from opbeat.contrib.celery import register_signal as opbeat_register_signal try: opbeat_register_signal(opbeat_client) except Exception as e: opbeat_logger.exception('Failed installing celery hook: %s' % e) if 'opbeat.contrib.django' in settings.INSTALLED_APPS: opbeat_register_handlers()
def ready(self): # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) app.conf.update( result_backend='rpc://', result_expires=3600, ) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal from raven.contrib.celery import register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) register_logger_signal(raven_client) register_signal(raven_client)
# See the License for the specific language governing permissions and # limitations under the License. import logging from potion_client import Client from potion_client.auth import HTTPBearerAuth import requests import numpy as np from raven import Client as RavenClient from .settings import Default logging.config.dictConfig(Default.LOGGING) raven_client = RavenClient(Default.SENTRY_DSN) def iloop_client(api, token): requests.packages.urllib3.disable_warnings() return Client( api, auth=HTTPBearerAuth(token), verify=False ) def _isnan(value): if isinstance(value, str): return False return np.isnan(value)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings', namespace='CELERY') app.conf.timezone = settings.TIME_ZONE app.conf.ONCE = { 'backend': 'celery_once.backends.Redis', 'settings': { 'url': settings.REDIS_LOCATION, 'default_timeout': 60 * 60 } } app.conf.beat_schedule = { 'users_notify_inactivity_session': { 'task': 'his.users.tasks.notify_inactivity_session', 'schedule': crontab() # execute every minute }, 'core_create_real_order_and_end_plan_item': { 'task': 'his.core.tasks.create_real_order_and_end_plan_item', 'schedule': crontab(minute=0, hour=0) } } if 'ADM' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['ADM_bill_room_items'] = { 'task': 'his.apps.ADM.tasks.bill_room_items', 'schedule': crontab(hour=23, minute=55) } if 'INF' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['inbound_interface_product'] = { 'task': 'his.apps.INF.tasks.inbound_interface_product', 'schedule': crontab(minute='*/5') } app.conf.beat_schedule['inbound_interface_stock'] = { 'task': 'his.apps.INF.tasks.inbound_interface_stock', 'schedule': crontab(minute='*/5') } app.conf.beat_schedule['inbound_interface_movement'] = { 'task': 'his.apps.INF.tasks.inbound_interface_movement', 'schedule': crontab(minute='*/5') } app.conf.beat_schedule['inbound_interface_hr'] = { 'task': 'his.apps.INF.tasks.inbound_interface_hr', 'schedule': crontab(minute='*/5') } app.conf.beat_schedule['outbound_interface_patient_record'] = { 'task': 'his.apps.INF.tasks.outbound_interface_patient_record', 'schedule': crontab(hour=1, minute=0) } app.conf.beat_schedule['outbound_interface_staging'] = { 'task': 'his.apps.INF.tasks.outbound_interface_staging', 'schedule': crontab(hour=1, minute=30) } app.conf.beat_schedule['outbound_interface_drug_dispensing'] = { 'task': 'his.apps.INF.tasks.outbound_interface_drug_dispensing', 'schedule': crontab(hour=0, minute=0) } app.conf.beat_schedule['outbound_interface_drug_transfer'] = { 'task': 'his.apps.INF.tasks.outbound_interface_drug_transfer', 'schedule': crontab(hour=23, minute=0) } app.conf.beat_schedule['outbound_interface_supply_dispensing'] = { 'task': 'his.apps.INF.tasks.outbound_interface_supply_dispensing', 'schedule': crontab(hour=0, minute=0) } app.conf.beat_schedule['generate_send_claim_data'] = { 'task': 'his.apps.INF.tasks.generate_send_claim_data', 'schedule': crontab(hour=0, minute=5) } if 'FLM' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['check_expired_queue_flow_transaction'] = { 'task': 'his.apps.FLM.tasks.check_expired_queue_flow_transaction', 'schedule': crontab(hour=0, minute=10) } if 'HRM' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['HRM_reject_broadcast_and_offer'] = { 'task': 'his.apps.HRM.tasks.reject_broadcast_and_offer', 'schedule': crontab(hour=0, minute=0) } if 'appointment' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['penta_create_provider_available_slot'] = { 'task': 'his.penta.appointment.tasks.create_provider_available_slot', 'schedule': crontab(day_of_month=25) } if 'REG' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['check_certified_death_document'] = { 'task': 'his.apps.REG.tasks.check_certified_death_document', 'schedule': crontab(minute='*/30') } if 'DPO' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['email_notifications'] = { 'task': 'his.apps.DPO.tasks.email_notifications', 'schedule': crontab(hour=config.core_EMAIL_NOTIFICATION_TIME, minute=0) } if 'LAB' in settings.DJANGO_INCLUDE_APPS: app.conf.beat_schedule['LAB_send_set_lab_order'] = { 'task': 'his.apps.LAB.tasks.send_set_lab_order', 'schedule': crontab() # execute every minute } installed_apps = [ app_config.name for app_config in apps.get_app_configs() ] app.autodiscover_tasks(lambda: installed_apps, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal raven_client = RavenClient( dsn=settings.RAVEN_CONFIG['DSN'], release=settings.RAVEN_CONFIG.get('release'), site=settings.RAVEN_CONFIG.get('site'), ignore_exceptions=settings.RAVEN_CONFIG.get( 'ignore_exceptions')) raven_register_logger_signal(raven_client) raven_register_signal(raven_client)
{% if cookiecutter.use_sentry == 'y' -%} if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration {% if cookiecutter.use_pycharm == 'y' -%} # Since raven is required in production only, # imports might (most surely will) be wiped out # during PyCharm code clean up started # in other environments. # @formatter:off {%- endif %} from raven import Client as RavenClient from raven.contrib.celery import register_signal as raven_register_signal from raven.contrib.celery import register_logger_signal as raven_register_logger_signal {% if cookiecutter.use_pycharm == 'y' -%} # @formatter:on {%- endif %} raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['DSN']) raven_register_logger_signal(raven_client) raven_register_signal(raven_client) {%- endif %} @app.task(bind=True) def debug_task(self): print(f'Request: {self.request!r}') # pragma: no cover {% else %} # Use this as a starting point for your project with celery. # If you are not using celery, you can remove this app {% endif -%}
def configure_raven(config, _client=None): # pragma: no cover if _client is not None: return set_raven_client(_client) client = RavenClient(dsn=config) return set_raven_client(client)
rx_coding = re.compile(r"coding[:=]\s*([-\w.]+)") # CP error reporting ENABLE_CP = config.features.cp CP_NEW = config.path.cp_new CP_SET_UID = None SERVICE_NAME = os.path.relpath(sys.argv[0] or sys.executable) # Sentry error reporting if config.features.sentry: from raven import Client as RavenClient raven_client = RavenClient( config.sentry.url, processors=('raven.processors.SanitizePasswordsProcessor', ), release=version.version) def get_lines_from_file(filename, lineno, context_lines, loader=None, module_name=None): """ Returns context_lines before and after lineno from file. Returns (pre_context_lineno, pre_context, context_line, post_context). (Borrowed from Django) """ source = None if loader is not None and hasattr(loader, "get_source"):
backend=redis_url, include=include) celery.conf.update( CELERY_REDIS_SENTINEL_SENTINELS=redis_cfg.hosts, CELERY_REDIS_SENTINEL_SERVICE_NAME=redis_cfg.sentinel, CELERY_REDIS_SENTINEL_SOCKET_TIMEOUT=redis_cfg.timeout, CELERY_REDIS_SENTINEL_SENTINEL_TIMEOUT=redis_cfg.sentinel_timeout, BROKER_TRANSPORT_OPTIONS={ 'service_name': redis_cfg.sentinel, 'sentinels': redis_cfg.hosts, 'sentinel_timeout': redis_cfg.sentinel_timeout, 'socket_timeout': redis_cfg.timeout, }) if conf.sentry.backend: client = RavenClient(dsn=conf.sentry.backend) register_signal(client) def schedule_from_conf(conf): schedule = {} for k, v in conf.items(): if isinstance(v['schedule'], int): v['schedule'] = timedelta(seconds=v['schedule']) elif isinstance(v['schedule'], dict): v['schedule'] = crontab(**v['schedule']) schedule.update({k: v}) return schedule
import logging import sys from functools import lru_cache from potion_client import Client from potion_client.auth import HTTPBearerAuth from raven import Client as RavenClient from raven.conf import setup_logging from raven.handlers.logging import SentryHandler from . import settings logger = logging.getLogger('iloop-to-model') logger.addHandler(logging.StreamHandler(stream=sys.stdout)) # Logspout captures logs from stdout if docker containers logger.setLevel(logging.DEBUG) # Configure Raven to capture warning logs raven_client = RavenClient(settings.Default.SENTRY_DSN) handler = SentryHandler(raven_client) handler.setLevel(logging.WARNING) setup_logging(handler) @lru_cache(128) def iloop_client(api, token): return Client( api, auth=HTTPBearerAuth(token), )
def app(self): return Sentry(self.wsgi_app, RavenClient(self.SENTRY_DSN))
from tmhack.poststats import DiscordBotsOrg, BotsDiscordPw from collections import deque from raven import Client as RavenClient import raven import discord import aiohttp import asyncio import sys import logging import json logging.basicConfig(filename='tmhack.log',level=logging.INFO,format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') handler = logging.FileHandler(config.get("logging-location", "tmhackbot.log")) logging.getLogger('TmHackBot') logging.getLogger('sqlalchemy') try: raven_client = RavenClient(config["sentry-dsn"]) except raven.exceptions.InvalidDsn: pass class TmHack(discord.AutoShardedClient): def __init__(self): super().__init__(max_messages=20000) self.aiosession = aiohttp.ClientSession(loop=self.loop) self.http.user_agent += ' TmHack-Bot' self.redisqueue = RedisQueue(self, config["redis-uri"]) self.command = Commands(self, config) self.socketio = SocketIOInterface(self, config["redis-uri"]) self.delete_list = deque(maxlen=100) # List of msg ids to prevent duplicate delete self.discordBotsOrg = None