def test_load_config_multiple_times(): cfg = {'logger': 'addons-marketplace-dev', 'sender': {'class': 'metlog.senders.UdpSender', 'host': ['logstash1', 'logstash2'], 'port': '5566'}} client_from_dict_config(cfg) client_from_dict_config(cfg)
def test_metlog_timing(self): client_from_dict_config = self.check_metlog() # Need to load within the test in case metlog is not installed from metlog.config import client_from_dict_config METLOG_CONF = { 'logger': 'django-statsd', 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, } metlog = client_from_dict_config(METLOG_CONF) with mock.patch.object(settings, 'METLOG', metlog): with mock.patch.object(settings, 'STATSD_CLIENT', 'django_statsd.clients.moz_metlog'): client = get_client() eq_(len(client.metlog.sender.msgs), 0) client.timing('testing', 512, rate=2) eq_(len(client.metlog.sender.msgs), 1) msg = json.loads(client.metlog.sender.msgs[0]) eq_(msg['severity'], 6) eq_(msg['payload'], '512') eq_(msg['fields']['rate'], 2) eq_(msg['fields']['name'], 'testing') eq_(msg['type'], 'timer')
def __init__(self, cfg, ssh_keys): self._cfg = cfg self._ssh_keypath = ssh_keys print "SSH Keys are in : %s" % self._ssh_keypath self.HADOOP_USER = cfg.get('metlog_metrics_hdfs', 'HADOOP_USER') self.HADOOP_HOST = cfg.get('metlog_metrics_hdfs', 'HADOOP_HOST') self.SRC_LOGFILE = cfg.get('metlog_metrics_hdfs', 'SRC_LOGFILE') self.DST_FNAME = os.path.join(os.path.split( cfg.get('metlog_metrics_hdfs', 'DST_FNAME'))[0], os.path.split(time.strftime( self.SRC_LOGFILE))[-1]) # Make a copy of the log file in case it gets rotated out from # under us self.TMP_DIR = cfg.get('metlog_metrics_hdfs', 'TMP_DIR') self.LOCAL_FNAME = os.path.join(self.TMP_DIR, os.path.split(self.DST_FNAME)[-1]) self.ERR_RM_HDFS = 'Failed to remove [%s] from %s' self.ERR_RM_HDFS = self.ERR_RM_HDFS % (self.DST_FNAME, self.HADOOP_HOST) self.ERR_XFER_HADOOP = "Transport of [%s] to HDFS failed" self.ERR_XFER_HADOOP = self.ERR_XFER_HADOOP % self.DST_FNAME self.ERR_REMOVE_LOCAL = "Error removing: [%s]" % self.LOCAL_FNAME self.ERR_DFS_WRITE = "DFS Write failure for [%s]" % self.DST_FNAME self.LOGGER = client_from_dict_config(dict(cfg.items('metlog')))
def test_metlog_timing(self): client_from_dict_config = self.check_metlog() # Need to load within the test in case metlog is not installed from metlog.config import client_from_dict_config METLOG_CONF = { 'logger': 'django-statsd', 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, } metlog = client_from_dict_config(METLOG_CONF) with mock.patch.object(settings, 'METLOG', metlog): with mock.patch.object(settings, 'STATSD_CLIENT', 'django_statsd.clients.moz_metlog'): client = get_client() eq_(len(client.metlog.sender.msgs), 0) client.timing('testing', 512, rate=2) eq_(len(client.metlog.sender.msgs), 1) msg = json.loads(client.metlog.sender.msgs[0]) eq_(msg['severity'], 6) eq_(msg['payload'], '512') eq_(msg['fields']['rate'], 2) eq_(msg['fields']['name'], 'testing') eq_(msg['type'], 'timer')
def mb(): arguments = docopt(mb_doc) host = arguments.get('HOST') port = int(arguments.get('PORT')) DEFAULT_CONFIG = {'logger': 'mb', 'sender': {'class': 'metlog.senders.udp.UdpSender', 'args': (host, port), }, } if arguments.get('--raw'): udpsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) utcnow = datetime.utcnow() if utcnow.microsecond == 0: timestamp = "%s.000000Z" % utcnow.isoformat() else: timestamp = "%sZ" % utcnow.isoformat() msg = {"severity": 6, "timestamp": timestamp, "metlog_hostname": "spire", "fields": {"userid": 25, "req_time": 4}, "metlog_pid": 34328, "logger": "syncstorage", "type": "services", "payload": "foo", "env_version": "0.8"} json_msg = json.dumps(msg) while True: udpsock.sendto(json_msg, (host, port)) if arguments.get('--metlogcfg'): with open(arguments['--metlogcfg']) as cfgfile: client = client_from_stream_config(cfgfile, 'metlog') else: client = client_from_dict_config(DEFAULT_CONFIG) while True: client.metlog('MBTEST', payload='MBTEST')
def setUp(self): """ We need to set the settings.METLOG instance to use a DebugCaptureSender so that we can inspect the sent messages. We also need to force list of handlers for 'django.request.tastypie' to use only the MetlogTastypieHandler, then revert the list of handlers back to whatever they were prior to invoking the test case. """ metlog = settings.METLOG METLOG_CONF = { 'logger': 'zamboni', 'sender': {'class': 'metlog.senders.DebugCaptureSender'}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog) self.metlog.sender.msgs.clear() self.logger = logging.getLogger('django.request.tastypie') """ When logging.config.dictConfig is used to configure logging with a 'one-shot' config dictionary, any previously instantiated singleton loggers (ie: all old loggers not in the new config) will be explicitly disabled. """ self.logger.disabled = False self._orig_handlers = self.logger.handlers self.logger.handlers = [MetlogTastypieHandler(settings.METLOG)]
def test_clients_expose_configuration(): cfg = {'logger': 'addons-marketplace-dev', 'sender': {'class': 'metlog.senders.UdpSender', 'host': ['logstash1', 'logstash2'], 'port': '5566'}} client = client_from_dict_config(cfg) eq_(client._config, json.dumps(cfg))
def setUp(self): self.orig_default_client = CLIENT_HOLDER.global_config.get('default') client = CLIENT_HOLDER.get_client(self.client_name) client_config = { 'sender_class': 'metlog.senders.DebugCaptureSender', } self.client = client_from_dict_config(client_config, client) CLIENT_HOLDER.set_default_client_name(self.client_name)
def setUp(self): self.orig_default_client = CLIENT_HOLDER.global_config.get('default') client = CLIENT_HOLDER.get_client(self.client_name) client_config = { 'sender_class': 'metlog.senders.DebugCaptureSender', } self.client = client_from_dict_config(client_config, client) CLIENT_HOLDER.set_default_client_name(self.client_name)
def setUp(self): from lib.settings_base import METLOG_CONF # workaround metlog config mutating dict config :P METLOG_CONF['sender']['class'] = ('metlog.senders.logging' '.StdLibLoggingSender') self.metlog = client_from_dict_config(METLOG_CONF) self.logger = logging.getLogger('z.metlog') self._orig_handlers = self.logger.handlers self.handler = logging.handlers.BufferingHandler(65536) self.logger.handlers = [self.handler]
def setUp(self): self.client.login(username='******', password='******') metlog = settings.METLOG METLOG_CONF = { 'logger': 'zamboni', 'plugins': {'cef': ('metlog_cef.cef_plugin:config_plugin', {'override': True})}, 'sender': {'class': 'metlog.senders.DebugCaptureSender'}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog) self.metlog.sender.msgs.clear()
def setUp(self): self.orig_default_client = CLIENT_HOLDER.global_config.get('default') client = CLIENT_HOLDER.get_client(self.client_name) client_config = {'sender_class': 'metlog.senders.DebugCaptureSender', 'plugins': {'plugin_section_name': ('metlog_raven.raven_plugin:config_plugin', {'dsn': 'udp://*****:*****@somehost.com:5000/2'})} } self.client = client_from_dict_config(client_config, client) CLIENT_HOLDER.set_default_client_name(self.client_name)
def setUp(self): self.client.login(username="******", password="******") metlog = settings.METLOG METLOG_CONF = { "logger": "zamboni", "plugins": {"cef": ("metlog_cef.cef_plugin:config_plugin", {"override": True})}, "sender": {"class": "metlog.senders.DebugCaptureSender"}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog) self.metlog.sender.msgs.clear()
def setUp(self): self.client.login(username='******', password='******') metlog = settings.METLOG METLOG_CONF = { 'logger': 'zamboni', 'plugins': {'cef': ('metlog_cef.cef_plugin:config_plugin', {'override': True})}, 'sender': {'class': 'metlog.senders.DebugCaptureSender'}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog) self.metlog.sender.msgs.clear()
def explode(self): error = self.cleaned_data.get("error") from metlog.config import client_from_dict_config new_metlog = client_from_dict_config(settings.METLOG_CONF) # The next two variables are captured by the raven client as local variables # which are passed into the sentry server. # They are needed to verify that the active metlog # configuration is not the actual expected metlog # configuration. metlog_conf = settings.METLOG_CONF active_metlog_conf = settings.METLOG._config if error == "zerodivisionerror": 1 / 0 elif error == "iorequesterror": class IOError(Exception): pass raise IOError("request data read error") elif error == "metlog_cef": environ = { "REMOTE_ADDR": "127.0.0.1", "HTTP_HOST": "127.0.0.1", "PATH_INFO": "/", "REQUEST_METHOD": "GET", "HTTP_USER_AGENT": "MySuperBrowser", } config = { "cef.version": "0", "cef.vendor": "mozilla", "cef.device_version": "3", "cef.product": "zamboni", "cef": True, } new_metlog.cef("xx\nx|xx\rx", 5, environ, config, username="******", ext1="ok=ok", ext2="ok\\ok") elif error == "metlog_statsd": new_metlog.incr(name=LOGGER_NAME) elif error == "metlog_json": new_metlog.metlog(type="metlog_json", fields={"foo": "bar", "secret": 42}) elif error == "metlog_sentry": # If this works, we have some kind of import ordering # problem try: 1 / 0 except: new_metlog.raven("metlog_sentry error triggered")
def setUp(self): """ We need to set the settings.METLOG instance to use a DebugCaptureSender so that we can inspect the sent messages. """ metlog = settings.METLOG METLOG_CONF = { 'logger': 'zamboni', 'sender': {'class': 'metlog.senders.DebugCaptureSender'}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog)
def setUp(self): """ We need to set the settings.METLOG instance to use a DebugCaptureSender so that we can inspect the sent messages. """ metlog = settings.METLOG METLOG_CONF = { 'logger': 'zamboni', 'sender': {'class': 'metlog.senders.DebugCaptureSender'}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog)
def setUp(self): self.orig_default_client = CLIENT_HOLDER.global_config.get('default') client = CLIENT_HOLDER.get_client(self.client_name) self.dsn = "udp://*****:*****@somehost.com:9000/2" client_config = {'sender_class': 'metlog.senders.DebugCaptureSender', 'plugins': {'plugin_section_name': ['metlog_raven.raven_plugin:config_plugin', {'dsn': self.dsn}] }} self.client = client_from_dict_config(client_config, client) CLIENT_HOLDER.set_default_client_name(self.client_name)
def _create_client(self): client_from_dict_config = self.check_metlog() # Need to load within the test in case metlog is not installed from metlog.config import client_from_dict_config METLOG_CONF = { 'logger': 'django-statsd', 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, } return client_from_dict_config(METLOG_CONF)
def main(global_config, **settings): config_file = global_config['__file__'] config_file = os.path.abspath( os.path.normpath( os.path.expandvars( os.path.expanduser( config_file)))) settings['config'] = config = Config(config_file) # Put values from the config file into the pyramid settings dict. for section in config.sections(): setting_prefix = section.replace(":", ".") for name, value in config.get_map(section).iteritems(): settings[setting_prefix + "." + name] = value config = Configurator( root_factory=Root, settings=settings, authentication_policy=QueueyAuthenticationPolicy(), authorization_policy=ACLAuthorizationPolicy() ) config.registry['backend_storage'] = configure_from_settings( 'storage', settings['config'].get_map('storage')) config.registry['backend_metadata'] = configure_from_settings( 'metadata', settings['config'].get_map('metadata')) # Load the Metlog Client instance config.registry['metlog_client'] = client_from_dict_config( settings['config'].get_map('metlog') ) # Load the application keys app_vals = settings['config'].get_map('application_keys') app_keys = {} for k, v in app_vals.items(): for item in v: app_keys[item] = k config.registry['app_keys'] = app_keys config.registry['app_names'] = app_vals.keys() # adds Mozilla default views config.include("mozsvc") config.scan('queuey.views') # Replace default renderer with ujson rendering config.add_renderer(None, 'queuey.views.UJSONRendererFactory') return config.make_wsgi_app()
def _create_client(self): client_from_dict_config = self.check_metlog() # Need to load within the test in case metlog is not installed from metlog.config import client_from_dict_config METLOG_CONF = { 'logger': 'django-statsd', 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, } return client_from_dict_config(METLOG_CONF)
def setup_metlog(config_dict, default=False): """ Instantiate a Metlog client and add it to the client holder. :param config_dict: Dictionary object containing the metlog client configuration. :param default: Should this be specified as CLIENT_HOLDER's default client? Note that the first client to be added will automatically be specified as the default, regardless of the value of this argument. """ name = config_dict.get('logger', '') client = CLIENT_HOLDER.get_client(name) client = client_from_dict_config(config_dict, client) if default: CLIENT_HOLDER.set_default_client_name(name)
def setUp(self): self.orig_default_client = CLIENT_HOLDER.global_config.get('default') client = CLIENT_HOLDER.get_client(self.client_name) client_config = { 'sender_class': 'metlog.senders.DebugCaptureSender', 'plugins': { 'plugin_section_name': ('metlog_raven.raven_plugin:config_plugin', { 'dsn': 'udp://*****:*****@somehost.com:5000/2' }) } } self.client = client_from_dict_config(client_config, client) CLIENT_HOLDER.set_default_client_name(self.client_name)
def setup_metlog(config_dict, default=False): """ Instantiate a Metlog client and add it to the client holder. :param config_dict: Dictionary object containing the metlog client configuration. :param default: Should this be specified as CLIENT_HOLDER's default client? Note that the first client to be added will automatically be specified as the default, regardless of the value of this argument. """ name = config_dict.get('logger', '') client = CLIENT_HOLDER.get_client(name) client = client_from_dict_config(config_dict, client) if default: CLIENT_HOLDER.set_default_client_name(name)
def test_get_client(self): client_from_dict_config = self.check_metlog() METLOG_CONF = { 'logger': 'django-statsd', 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, } metlog = client_from_dict_config(METLOG_CONF) with mock.patch.object(settings, 'METLOG', metlog): with mock.patch.object(settings, 'STATSD_CLIENT', 'django_statsd.clients.moz_metlog'): client = get_client() eq_(client.__module__, 'django_statsd.clients.moz_metlog')
def mb(): arguments = docopt(mb_doc) host = arguments.get('HOST') port = int(arguments.get('PORT')) DEFAULT_CONFIG = { 'logger': 'mb', 'sender': { 'class': 'metlog.senders.udp.UdpSender', 'args': (host, port), }, } if arguments.get('--raw'): udpsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) utcnow = datetime.utcnow() if utcnow.microsecond == 0: timestamp = "%s.000000Z" % utcnow.isoformat() else: timestamp = "%sZ" % utcnow.isoformat() msg = { "severity": 6, "timestamp": timestamp, "metlog_hostname": "spire", "fields": { "userid": 25, "req_time": 4 }, "metlog_pid": 34328, "logger": "syncstorage", "type": "services", "payload": "foo", "env_version": "0.8" } json_msg = json.dumps(msg) while True: udpsock.sendto(json_msg, (host, port)) if arguments.get('--metlogcfg'): with open(arguments['--metlogcfg']) as cfgfile: client = client_from_stream_config(cfgfile, 'metlog') else: client = client_from_dict_config(DEFAULT_CONFIG) while True: client.metlog('MBTEST', payload='MBTEST')
def setUp(self): self.orig_default_client = CLIENT_HOLDER.global_config.get('default') client = CLIENT_HOLDER.get_client(self.client_name) self.dsn = "udp://*****:*****@somehost.com:9000/2" client_config = { 'sender_class': 'metlog.senders.DebugCaptureSender', 'plugins': { 'plugin_section_name': ['metlog_raven.raven_plugin:config_plugin', { 'dsn': self.dsn }] } } self.client = client_from_dict_config(client_config, client) CLIENT_HOLDER.set_default_client_name(self.client_name)
def test_get_client(self): client_from_dict_config = self.check_metlog() METLOG_CONF = { 'logger': 'django-statsd', 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, } metlog = client_from_dict_config(METLOG_CONF) with mock.patch.object(settings, 'METLOG', metlog): with mock.patch.object(settings, 'STATSD_CLIENT', 'django_statsd.clients.moz_metlog'): client = get_client() eq_(client.__module__, 'django_statsd.clients.moz_metlog')
def setUp(self): from lib.settings_base import METLOG_CONF # workaround metlog config mutating dict config :P METLOG_CONF['sender']['class'] = ('metlog.senders.logging' '.StdLibLoggingSender') self.metlog = client_from_dict_config(METLOG_CONF) self.logger = logging.getLogger('z.metlog') """ When logging.config.dictConfig is used to configure logging with a 'one-shot' config dictionary, any previously instantiated singleton loggers (ie: all old loggers not in the new config) will be explicitly disabled. """ self.logger.disabled = False self._orig_handlers = self.logger.handlers self.handler = logging.handlers.BufferingHandler(65536) self.logger.handlers = [self.handler]
def setUp(self): """ We need to set the settings.METLOG instance to use a DebugCaptureSender so that we can inspect the sent messages. We also need to force list of handlers for 'django.request.tastypie' to use only the MetlogTastypieHandler, then revert the list of handlers back to whatever they were prior to invoking the test case. """ metlog = settings.METLOG METLOG_CONF = { 'logger': 'zamboni', 'sender': {'class': 'metlog.senders.DebugCaptureSender'}, } from metlog.config import client_from_dict_config self.metlog = client_from_dict_config(METLOG_CONF, metlog)
def setUp(self): """ This is not entirely obvious. settings.SENTRY_CLIENT : * This is the classname of the object that raven.contrib.django.models.get_client() will return. The sentry client is a subclass of raven.base.Client. This is the control point that all messages are going to get routed through For metlog integration, this *must* be 'raven_metlog.djangometlog.MetlogDjangoClient' settings.METLOG_CONF : * configuration for the metlog client instance settings.METLOG : * This is the actual metlog client instance """ """ 'sender': { 'class': 'metlog.senders.UdpSender', 'host': '192.168.20.2', 'port': 5565, }, """ self.METLOG_CONF = { 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, 'plugins': { 'raven': ('metlog_raven.raven_plugin:config_plugin', { 'dsn': DSN }) }, } self.SENTRY_CLIENT = 'djangoraven.metlog.MetlogDjangoClient' from metlog.config import client_from_dict_config self.METLOG = client_from_dict_config(self.METLOG_CONF)
def setUp(self): """ This is not entirely obvious. settings.SENTRY_CLIENT : * This is the classname of the object that raven.contrib.django.models.get_client() will return. The sentry client is a subclass of raven.base.Client. This is the control point that all messages are going to get routed through For metlog integration, this *must* be 'raven_metlog.djangometlog.MetlogDjangoClient' settings.METLOG_CONF : * configuration for the metlog client instance settings.METLOG : * This is the actual metlog client instance """ """ 'sender': { 'class': 'metlog.senders.UdpSender', 'host': '192.168.20.2', 'port': 5565, }, """ self.METLOG_CONF = { 'sender': { 'class': 'metlog.senders.DebugCaptureSender', }, 'plugins': {'raven': ('metlog_raven.raven_plugin:config_plugin', {'dsn': DSN}) }, } self.SENTRY_CLIENT = 'djangoraven.metlog.MetlogDjangoClient' from metlog.config import client_from_dict_config self.METLOG = client_from_dict_config(self.METLOG_CONF)
def main(global_config, **settings): config_file = global_config['__file__'] config_file = os.path.abspath( os.path.normpath(os.path.expandvars(os.path.expanduser(config_file)))) settings['config'] = config = Config(config_file) # Put values from the config file into the pyramid settings dict. for section in config.sections(): setting_prefix = section.replace(":", ".") for name, value in config.get_map(section).iteritems(): settings[setting_prefix + "." + name] = value config = Configurator(root_factory=Root, settings=settings, authentication_policy=QueueyAuthenticationPolicy(), authorization_policy=ACLAuthorizationPolicy()) config.registry['backend_storage'] = configure_from_settings( 'storage', settings['config'].get_map('storage')) config.registry['backend_metadata'] = configure_from_settings( 'metadata', settings['config'].get_map('metadata')) # Load the Metlog Client instance config.registry['metlog_client'] = client_from_dict_config( settings['config'].get_map('metlog')) # Load the application keys app_vals = settings['config'].get_map('application_keys') app_keys = {} for k, v in app_vals.items(): for item in v: app_keys[item] = k config.registry['app_keys'] = app_keys config.registry['app_names'] = app_vals.keys() # adds Mozilla default views config.include("mozsvc") config.scan('queuey.views') # Replace default renderer with ujson rendering config.add_renderer(None, 'queuey.views.UJSONRendererFactory') return config.make_wsgi_app()
def setUp(self): from lib.settings_base import METLOG_CONF # workaround metlog config mutating dict config :P METLOG_CONF['sender']['class'] = ('metlog.senders.logging' '.StdLibLoggingSender') self.metlog = client_from_dict_config(METLOG_CONF) self.logger = logging.getLogger('z.metlog') """ When logging.config.dictConfig is used to configure logging with a 'one-shot' config dictionary, any previously instantiated singleton loggers (ie: all old loggers not in the new config) will be explicitly disabled. """ self.logger.disabled = False self._orig_handlers = self.logger.handlers self.handler = logging.handlers.BufferingHandler(65536) self.logger.handlers = [self.handler]
def setUp(self): METLOG_CONF = { 'sender': { 'class': 'metlog.senders.logging.StdLibLoggingSender', 'logger_name': 'z.metlog', } } self.metlog = client_from_dict_config(METLOG_CONF) self.logger = logging.getLogger('z.metlog') """ When logging.config.dictConfig is used to configure logging with a 'one-shot' config dictionary, any previously instantiated singleton loggers (ie: all old loggers not in the new config) will be explicitly disabled. """ self.logger.disabled = False self._orig_handlers = self.logger.handlers self.handler = logging.handlers.BufferingHandler(65536) self.logger.handlers = [self.handler]
def get_client(name, config_dict=None): """ Return client of the specified name from the CLIENT_HOLDER. :param name: String token to identify the MetlogClient, also used for the default `logger` value of that client. `ValueError` will be raised if a config is provided w/ a different `logger` value. :param config_dict: Configuration dictionary to be applied to the fetched client. """ client = CLIENT_HOLDER.get_client(name) if config_dict: logger = config_dict.get('logger') if logger and logger != name: raise ValueError('Config `logger` value must either match `name` ' 'argument or be left blank.') if not logger: config_dict['logger'] = name client = client_from_dict_config(config_dict, client=client) return client
def setUp(self): METLOG_CONF = { 'sender': { 'class': 'metlog.senders.logging.StdLibLoggingSender', 'logger_name': 'z.metlog', } } self.metlog = client_from_dict_config(METLOG_CONF) self.logger = logging.getLogger('z.metlog') """ When logging.config.dictConfig is used to configure logging with a 'one-shot' config dictionary, any previously instantiated singleton loggers (ie: all old loggers not in the new config) will be explicitly disabled. """ self.logger.disabled = False self._orig_handlers = self.logger.handlers self.handler = logging.handlers.BufferingHandler(65536) self.logger.handlers = [self.handler]
def explode(self): error = self.cleaned_data.get('error') from metlog.config import client_from_dict_config new_metlog = client_from_dict_config(settings.METLOG_CONF) if error == 'zerodivisionerror': 1 / 0 elif error == 'iorequesterror': class IOError(Exception): pass raise IOError('request data read error') elif error == 'metlog_cef': environ = {'REMOTE_ADDR': '127.0.0.1', 'HTTP_HOST': '127.0.0.1', 'PATH_INFO': '/', 'REQUEST_METHOD': 'GET', 'HTTP_USER_AGENT': 'MySuperBrowser'} config = {'cef.version': '0', 'cef.vendor': 'mozilla', 'cef.device_version': '3', 'cef.product': 'zamboni', 'cef': True} new_metlog.cef('xx\nx|xx\rx', 5, environ, config, username='******', ext1='ok=ok', ext2='ok\\ok') elif error == 'metlog_statsd': new_metlog.incr(name=LOGGER_NAME) elif error == 'metlog_json': new_metlog.metlog(type="metlog_json", fields={'foo': 'bar', 'secret': 42}) elif error == 'metlog_sentry': # If this works, we have some kind of import ordering # problem try: 1 / 0 except: new_metlog.raven('metlog_sentry error triggered')
'z.es': {'level': logging.INFO}, 'nose': {'level': logging.WARNING}, }, } METLOG_CONF = { 'logger': 'zamboni', 'plugins': {'cef': ('metlog_cef.cef_plugin:config_plugin', {})}, 'sender': { 'class': 'metlog.senders.logging.StdLibLoggingSender', 'logger_name': 'z.metlog', }, } METLOG = client_from_dict_config(METLOG_CONF) # Send Django signals asynchronously on a background thread. ASYNC_SIGNALS = True # Feature flags SEARCH_EXCLUDE_PERSONAS = True ## elasticsearch ES_HOSTS = ['127.0.0.1:9200'] ES_INDEXES = {'default': 'amo', 'update_counts': 'amo_stats', 'download_counts': 'amo_stats', 'stats_contributions': 'amo_stats', 'stats_collections_counts': 'amo_stats',
}, }, } METLOG_CONF = { 'logger': 'zamboni', 'plugins': { 'cef': ('metlog_cef.cef_plugin:config_plugin', {}) }, 'sender': { 'class': 'metlog.senders.logging.StdLibLoggingSender', 'logger_name': 'z.metlog', }, } METLOG = client_from_dict_config(METLOG_CONF) # Send Django signals asynchronously on a background thread. ASYNC_SIGNALS = True # Feature flags SEARCH_EXCLUDE_PERSONAS = True ## elasticsearch ES_HOSTS = ['127.0.0.1:9200'] ES_INDEXES = { 'default': 'amo', 'update_counts': 'amo_stats', 'download_counts': 'amo_stats', 'stats_contributions': 'amo_stats', 'stats_collections_counts': 'amo_stats',
def explode(self): error = self.cleaned_data.get('error') from metlog.config import client_from_dict_config new_metlog = client_from_dict_config(settings.METLOG_CONF) if error == 'zerodivisionerror': 1 / 0 elif error == 'iorequesterror': class IOError(Exception): pass raise IOError('request data read error') elif error == 'metlog_cef': environ = {'REMOTE_ADDR': '127.0.0.1', 'HTTP_HOST': '127.0.0.1', 'PATH_INFO': '/', 'REQUEST_METHOD': 'GET', 'HTTP_USER_AGENT': 'MySuperBrowser'} config = {'cef.version': '0', 'cef.vendor': 'mozilla', 'cef.device_version': '3', 'cef.product': 'zamboni', 'cef': True} settings.METLOG.cef('xx\nx|xx\rx', 5, environ, config, username='******', ext1='ok=ok', ext2='ok\\ok', logger_info='settings.METLOG') new_metlog.cef('xx\nx|xx\rx', 5, environ, config, username='******', ext1='ok=ok', ext2='ok\\ok', logger_info='new_metlog') elif error == 'metlog_statsd': new_metlog.incr(name="new_metlog:" + LOGGER_NAME) settings.METLOG.incr(name=LOGGER_NAME) elif error == 'metlog_json': new_metlog.metlog(type="metlog_json", fields={'foo': 'bar', 'secret': 42, 'logger_type': 'new_metlog'}) settings.METLOG.metlog(type="metlog_json", fields={'foo': 'bar', 'secret': 42, 'logger_type': 'settings.METLOG'}) elif error == 'metlog_sentry': # These are local variables only used # by Sentry's frame hacking magic. # They won't be referenced which may trigger flake8 # errors. metlog_conf = settings.METLOG_CONF # NOQA active_metlog_conf = settings.METLOG._config # NOQA # Try to fire off two messages to verify that we don't # have some kind of transient issue where settings.METLOG # doesn't work try: 2 / 0 except: new_metlog.raven('new_metlog: metlog_sentry error triggered') finally: try: 1 / 0 except: settings.METLOG.raven('metlog_sentry error triggered')