def test_runloop_execute(self): from asset_folder_importer.fix_unattached_media import reattach_thread from Queue import PriorityQueue from raven import Client q = PriorityQueue() fake_client = Client('https://*****:*****@fake-sentry-server/1') fake_client.captureException = mock.MagicMock() logger = logging.getLogger("test_runloop") logger.debug = mock.MagicMock() logger.error = mock.MagicMock() logger.info = mock.MagicMock() logger.warning = mock.MagicMock() rat = reattach_thread.ReattachThread(q, options=None, raven_client=fake_client, timeout=2, logger=logger, should_raise=True) q.put((1, {'itemid': 'KP-1234', 'collectionid': 'KP-5678'})) rat.reattach = mock.MagicMock() rat.run() rat.reattach.assert_called_with('KP-1234', 'KP-5678') logger.error.assert_called_with("Input queue timed out, exiting.") logger.info.assert_called_with("Reattach thread terminating")
def lights433(host, port, adapter, adapter_args, switches, sentry): sentry_client, sentry_url = None, None if sentry or os.path.exists(DEFAULT_SENTRY_CONF): sentry = sentry if sentry is not None else DEFAULT_SENTRY_CONF with open(sentry, 'r') as f: sentry_url = f.read().strip() if not sentry_url: log.error("No sentry URL specified in [%s]" % sentry) sys.exit(1) else: sentry_client = Client(sentry_url) log.info("Sentry client configured!") try: log.info("Loading switch configurations from [%s]" % switches) adapter_kwargs = dict( pair.split('=') for pair in adapter_args.split(',') ) adapter = get_adapter(adapter)(**adapter_kwargs) server = Lights433Server(host, port, adapter, switches) except: if sentry_client: sentry_client.captureException() raise if sentry_client: Sentry(dsn=sentry_url).init_app(server.app) server.run()
def process(self, record): if match_record(self.rules_tree, record): dsn = self.dsns.get(record.application) if dsn is None: projects = requests.get("%s/api/0/projects/" % self.url, auth=self.auth).json() for project in projects: if project["name"] == record.application: break else: project = requests.post("%s/api/0/teams/%s/%s/projects/" % (self.url, self.organization, self.team), auth=self.auth, headers={"Content-type": "application/json"}, data=json.dumps({"name": record.application})).json() for key in requests.get("%s/api/0/projects/%s/%s/keys/" % (self.url, self.organization, project["slug"]), auth=self.auth).json(): dsn = key["dsn"]["secret"] self.dsns[record.application] = dsn client = Client(dsn, raise_send_errors=True) client.capture("raven.events.Message", message=record.msg, formatted=record.explanation or record.msg, data={"logger": record.logger}, date=record.datetime, extra=record._asdict())
class SentryMiddleware(Sentry): """ As raven.middleware.Sentry doesn't really do what we need, we build our own. It merely extends Sentry in order to reuse the get_http_context method. """ def __init__(self, app, config): self.app = app dsn = aconfig.get('adhocracy.sentry.dsn', config=config) if not dsn: raise Exception( 'Sentry misconfigured. Please add adhocracy.sentry.dsn ' 'to your adhocracy config.') self.client = Client(dsn) handler = SentryHandler( self.client, level=aconfig.get('adhocracy.sentry.loglevel')) setup_logging(handler) def __call__(self, environ, start_response): self.client.tags_context({'version': version.get_version()}) self.client.http_context(self.get_http_context(environ)) return self.app(environ, start_response)
def main(): root = logging.getLogger('sentry.errors') root.setLevel(logging.DEBUG) root.addHandler(logging.StreamHandler()) dsn = ' '.join(sys.argv[2:]) if not (dsn or os.environ.get('SENTRY_DSN')): print "Error: No configuration detected!" print "You must either pass a DSN to the command, or set the SENTRY_DSN environment variable." sys.exit(1) print "Using DSN configuration:" print " ", dsn print client = Client(dsn) print "Client configuration:" for k in ('servers', 'project', 'public_key', 'secret_key'): print ' %-15s: %s' % (k, getattr(client, k)) print if not all([client.servers, client.project, client.public_key, client.secret_key]): print "Error: All values must be set!" sys.exit(1) print 'Sending a test message...', ident = client.get_ident(client.captureMessage('This is a test message generated using ``raven test``')) print 'success!' print print 'The test message can be viewed at the following URL:' url = client.servers[0].split('/api/store/', 1)[0] print ' %s/%s/search/?q=%s' % (url, client.project, ident)
def main(self): # consume standard input early lines = [] p = re.compile("Subject: Cron <[^@]+@[^ ]+> (.*)") mail_subject = 'This mail has no subject.' for line in sys.stdin: line = line.rstrip() lines.append(line) if line.startswith('Subject:'): mail_subject = line # Removes hostname from cron subject to aggregate sentry events if p.search(line): cron_subject = p.search(line).group(1) mail_subject = "Subject: Cron {0}".format(cron_subject) body = os.linesep.join(lines) if not len(body): sys.stderr.write("Empty stdin, nothing to report") sys.stderr.write(os.linesep) sys.exit(1) # init raven quickly, so if something is wrong it get logged early from raven import Client dsn = self.config['sentry_dsn'] if not dsn.startswith("requests+"): dsn = "requests+" + dsn client = Client(dsn=dsn) if self.config['subject']: subject = self.config['subject'] else: subject = mail_subject msg = os.linesep.join((subject, body)) client.captureMessage(msg, extra=os.environ)
def process_jobs(self, beanstalk): while True: logger.debug("Beanstalk connection established, waiting for jobs") job = beanstalk.reserve() job_name = job.stats()["tube"] if job_name in self.jobs: logger.debug("Calling %s with arg: %s" % (job_name, job.body)) try: connection = db.connections["default"] if connection.connection: try: connection.connection.ping() except OperationalError as e: connection.close() flush_transaction() self.jobs[job_name](job.body) except Exception, e: tp, value, tb = sys.exc_info() logger.error('Error while calling "%s" with arg "%s": ' "%s" % (job_name, job.body, e)) logger.debug("%s:%s" % (tp.__name__, value)) logger.debug("\n".join(traceback.format_tb(tb))) client = Client(dsn=settings.RAVEN_CONFIG["dsn"]) client.captureMessage(str(e), stack=True, level=logging.ERROR) job.bury() else: job.delete() else: job.release()
def run_from_argv(self, argv): if len(argv) <= 2 or argv[2] in ['-h', '--help']: print self.usage(argv[1]) sys.exit(1) subcommand_class = self._get_subcommand_class(argv[2]) parser = self.create_parser(argv[0], argv[2], subcommand_class) if hasattr(self, 'use_argparse') and self.use_argparse: subcommand_class.add_arguments(parser) options = parser.parse_args(argv[3:]) cmd_options = vars(options) args = cmd_options.pop('args', ()) else: options, args = parser.parse_args(argv[3:]) handle_default_options(options) try: subcommand_class.execute(*args, **options.__dict__) except Exception as e: if not isinstance(e, CommandError): if hasattr(settings, 'SENTRY_DSN'): dsn = settings.SENTRY_DSN elif hasattr(settings, 'RAVEN_CONFIG'): dsn = settings.RAVEN_CONFIG.get('dsn') else: raise sentry = Client(dsn) # Force sync transport to avoid race condition with the process exiting for url in sentry.servers: parsed = urlparse.urlparse(url) transport = sentry._registry.get_transport(parsed) transport.async = False sentry.get_ident(sentry.captureException()) self._write_error_in_stderr(e)
class PyLogConf (PyLog): def __init__ (self, conf): """ Initialize object based on the provided configuration """ self.conf = conf self.client = Client (conf.RAVEN['dsn']) self.formatters = {} for k,v in self.conf.FILE_FORMATTERS.iteritems(): if isinstance(v,str): raise ValueError ('Please use a list or a tuple for the file formatters values') self.formatters[k] = [item_import(i)() for i in v] dbname = os.path.join(os.path.dirname(conf.__file__),'pylogwatch.db') return super(PyLogConf, self).__init__ (self.conf.FILE_FORMATTERS.keys(), dbname = dbname) def process_lines (self, fname, fileobject, lines): """Main workhorse. Called with the filename that is being logged and an iterable of lines""" for line in lines: paramdict = {} data = {'event_type':'Message', 'message': line.replace('%','%%'), 'data' :{'logger':fname}} for fobj in self.formatters[fname]: fobj.format_line(line, data, paramdict) if not data.pop('_do_not_send', False): # Skip lines that have the '_do_not_send' key if paramdict: data['params'] = tuple([paramdict[i] for i in sorted(paramdict.keys())]) if self.conf.DEBUG: print data self.client.capture(**data) self.update_bytes(fname, fileobject.tell())
def send_to_sentry(self): """ Send this error to sentry where it will be stored and aggregated. """ print self.stack_trace log_level = self.log_level filename = get_filename(self.js_url) data={ filename: { 'url': self.js_url, 'data': {}, 'query_string': '...', 'method': 'POST', }, 'logger': 'front_end', 'site': 'site.name', } if self.extra: data.update({"extra": json.loads(self.extra)}) client = Client(settings.SENTRY_DSN) client.capture( "Message", message=self.message, data=data, )
class PyLogConf (PyLog): def __init__ (self, conf): self.conf = conf self.client = Client (conf.RAVEN['dsn']) self.formatters = {} for k,v in self.conf.FILE_FORMATTERS.iteritems(): if isinstance(v,str): raise ValueError ('Please use a list or a tuple for the file formatters values') self.formatters[k] = [item_import(i)() for i in v] dbname = os.path.join(os.path.dirname(conf.__file__),'pylogwatch.db') return super(PyLogConf, self).__init__ (self.conf.FILE_FORMATTERS.keys(), dbname = dbname) def get_file_signature(self, fname): maxcount = 10 count = 0 result = [] with open(fname) as f: while count < maxcount: result.append(f.readline()) count+=1 return result def process_lines (self, fname, lines): for line in lines: paramdict = {} data = {'event_type':'Message', 'message': line.replace('%','%%'), 'data' :{'logger':fname}} for fobj in self.formatters[fname]: fobj.format_line(line, data, paramdict) if not data.get('do_not_send', False): if paramdict: data['params'] = tuple([paramdict[i] for i in sorted(paramdict.keys())]) if self.conf.DEBUG: print data self.client.capture(**data)
def connect_sentry(result): pillar_data = __salt__['pillar.raw']() sentry_data = { 'result': result, 'returned': ret, 'pillar': pillar_data, 'grains': __salt__['grains.items']() } servers = [] try: for server in pillar_data['raven']['servers']: servers.append(server + '/api/store/') client = Client( servers=servers, public_key=pillar_data['raven']['public_key'], secret_key=pillar_data['raven']['secret_key'], project=pillar_data['raven']['project'], ) except KeyError as missing_key: logger.error("Sentry returner need config '%s' in pillar", missing_key) else: try: client.captureMessage(ret['comment'], extra=sentry_data) except Exception as err: logger.error("Can't send message to sentry: %s", err, exc_info=True)
def run_from_argv(self, argv): if len(argv) <= 2 or argv[2] in ['-h', '--help']: stdout = OutputWrapper(sys.stdout) stdout.write(self.usage(argv[1])) sys.exit(1) subcommand_class = self._get_subcommand_class(argv[2]) parser = self.create_parser(argv[0], argv[2], subcommand_class) if hasattr(self, 'use_argparse') and self.use_argparse: subcommand_class.add_arguments(parser) options = parser.parse_args(argv[3:]) cmd_options = vars(options) args = cmd_options.pop('args', ()) else: options, args = parser.parse_args(argv[3:]) handle_default_options(options) try: subcommand_class.execute(*args, **options.__dict__) except Exception as e: if not isinstance(e, CommandError): if hasattr(settings, 'SENTRY_DSN'): dsn = settings.SENTRY_DSN elif hasattr(settings, 'RAVEN_CONFIG'): dsn = settings.RAVEN_CONFIG.get('dsn') else: raise sentry = Client(dsn) if not sentry.is_enabled(): raise sentry.get_ident(sentry.captureException()) self._write_error_in_stderr(e)
def save(self, *a, **k): """ Don't save to the database, send to sentry instead. """ log_level = self.log_level filename = get_filename(self.js_url) data={ filename: { 'url': self.js_url, 'data': {}, 'query_string': '...', 'method': 'POST', }, 'logger': 'front_end', 'site': 'site.name', } if self.extra: data.update({"extra": json.loads(self.extra)}) client = Client(settings.SENTRY_DSN) client.capture( "Message", message=self.message, data=data, )
def handle(self, *args, **kwargs): if args: pk = args[0] feed = UniqueFeed.objects.get(pk=pk) return update_feed(feed.url, use_etags=False) # This command is run every 5 minutes. Don't queue more than # 5/45 = a ninth of the total number of feeds. limit = UniqueFeed.objects.count() / 9 uniques = UniqueFeed.objects.filter( muted=False, ).order_by('last_update')[:limit] for unique in uniques: try: if unique.should_update(): enqueue(update_feed, unique.url, timeout=20) except Exception: # We don't know what to expect, and anyway # we're reporting the exception if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'): raise else: client = Client(dsn=settings.SENTRY_DSN) client.captureException() connection.close()
def connect_to_sentry() -> RavenClient: """Connect to Sentry and enable sys hook for pyinstaller failures""" with open("sentry") as fi: link = fi.read().strip() raven = RavenClient(link) raven.install_sys_hook() return raven
class SentryPlugin(object): name = 'sentry' def __init__(self, dsn, **kwargs): self.client = None self.kwargs = kwargs self.dsn = dsn def setup(self,app): for other in app.plugins: if not isinstance(other, SentryPlugin): continue if self.client is None: self.client = Client(self.dsn, **self.kwargs) def apply(self,callback,context): def wrapper(*args,**kwargs): try: rv = callback(*args, **kwargs) except Exception, e: if not isinstance(e, HTTPError): self.client.captureException() raise return rv return wrapper
def error_trigger(request): context = {} if request.method == 'POST': form = forms.TriggerErrorForm(request.POST) if form.is_valid(): if form.cleaned_data['capture_with_raven']: try: dsn = settings.RAVEN_CONFIG['dsn'] except AttributeError: messages.error( request, "No settings.RAVEN_CONFIG['dsn'] set up" ) return redirect('manage:error_trigger') client = Client(dsn) try: raise NameError(form.cleaned_data['message']) except NameError: messages.info( request, str(client.captureException()) ) return redirect('manage:error_trigger') raise NameError( 'MANUAL ERROR TRIGGER: %s' % form.cleaned_data['message'] ) else: form = forms.TriggerErrorForm() context['form'] = form return render(request, 'manage/error_trigger.html', context)
def main(): root = logging.getLogger('sentry.errors') root.setLevel(logging.DEBUG) root.addHandler(logging.StreamHandler()) parser = OptionParser() parser.add_option("--data", action="callback", callback=store_json, type="string", nargs=1, dest="data") (opts, args) = parser.parse_args() dsn = ' '.join(args[1:]) or os.environ.get('SENTRY_DSN') if not dsn: print "Error: No configuration detected!" print "You must either pass a DSN to the command, or set the SENTRY_DSN environment variable." sys.exit(1) print "Using DSN configuration:" print " ", dsn print client = Client(dsn, include_paths=['raven']) print "Client configuration:" for k in ('servers', 'project', 'public_key', 'secret_key'): print ' %-15s: %s' % (k, getattr(client, k)) print if not all([client.servers, client.project, client.public_key, client.secret_key]): print "Error: All values must be set!" sys.exit(1) print 'Sending a test message...', ident = client.get_ident(client.captureMessage( message='This is a test message generated using ``raven test``', data=opts.data or { 'culprit': 'raven.scripts.runner', 'logger': 'raven.test', 'sentry.interfaces.Http': { 'method': 'GET', 'url': 'http://example.com', } }, level=logging.INFO, stack=True, extra={ 'user': pwd.getpwuid(os.geteuid())[0], 'loadavg': os.getloadavg(), } )) if client.state.did_fail(): print 'error!' return False print 'success!' print print 'The test message can be viewed at the following URL:' url = client.servers[0].split('/api/store/', 1)[0] print ' %s/%s/search/?q=%s' % (url, client.project, ident)
class SentryPlugin(Plugin): def __init__(self, container, dsn=None, **kwargs): self.container = container self.client = Client(dsn) self.container.error_hook.install(self.on_error) def on_error(self, exc_info): self.client.captureException(exc_info)
def __init__(self, sentry_dsn, descriptor=None, data_limit=100, sentry_client_kwargs=None): if sentry_client_kwargs: self.sentry_client = SentryClient(dsn=sentry_dsn, **sentry_client_kwargs) else: self.sentry_client = SentryClient(dsn=sentry_dsn) super(ErrorSentry, self).__init__(descriptor=descriptor, data_limit=data_limit)
def send_exception(exinfo): if can_send() and errorreporting: client = Client( dsn='http://*****:*****@sentry.kartoza.com/17', release=roam.__version__ ) roam.utils.info("Sending error report.") client.captureException(exinfo)
def ravenify(*args, **kwargs): try: function(*args, **kwargs) except Exception: if not settings.DEBUG: client = Client(dsn=settings.SENTRY_DSN) client.captureException() raise
def common_except_info(info, from_function): """ 输出特定消息到sentry中 by: 马志 at: 2015-11-27 """ client = Client(settings.SENTRY_CLIENT_KEY) client.captureMessage("info:%s,from:%s" % (info, from_function))
def log_sentry_error(msg): try: client = Client(settings.RAVEN_CONFIG['dsn']) except (AttributeError, KeyError): # If RAVEN_CONFIG isn't set, we can't log the error. return return client.captureMessage(msg)
def handle(self, *args, **kwargs): try: self.handle_sentry(*args, **kwargs) except Exception: if settings.DEBUG or 'SENTRY_DSN' not in os.environ: raise client = Client() client.captureException()
def post_process(self, group, event, is_new, is_sample, **kwargs): sentry_dsn = self.get_option('sentry_dsn', event.project) client = Client(dsn=sentry_dsn) data = event.as_dict() del data['id'] data['message'] = event.message data = client.encode(data) client.send_encoded(message=data)
def clean_url(self): url = URLObject(self.cleaned_data["url"]) # URLObject doesn't handle ipv6 very well yet. In the meantime, ... if url.netloc.count(":") > 3: raise forms.ValidationError(_("Enter a valid URL.")) URLValidator()(url.without_auth()) if url.scheme not in ["http", "https"]: raise forms.ValidationError( _("Invalid URL scheme: '%s'. Only HTTP and HTTPS are " "supported.") % url.scheme ) if url.netloc.hostname in ["localhost", "127.0.0.1", "::1"]: raise forms.ValidationError(_("Enter a valid URL.")) try: validate_ipv46_address(url.netloc.hostname) except forms.ValidationError: pass else: raise forms.ValidationError(_("Enter a valid URL.")) existing = self.user.feeds.filter(url=url) if self.instance is not None: existing = existing.exclude(pk=self.instance.pk) if existing.exists(): raise forms.ValidationError(_("It seems you're already subscribed to this feed.")) auth = None if url.auth != (None, None): auth = url.auth # Check this is actually a feed with user_lock("feed_check", self.user.pk, timeout=30): headers = {"User-Agent": USER_AGENT % "checking feed", "Accept": feedparser.ACCEPT_HEADER} try: response = requests.get(six.text_type(url.without_auth()), headers=headers, timeout=10, auth=auth) except Exception: if "SENTRY_DSN" in os.environ: client = Client() client.captureException() raise forms.ValidationError(_("Error fetching the feed.")) if response.status_code != 200: raise forms.ValidationError(_("Invalid response code from URL: " "HTTP %s.") % response.status_code) try: parsed = feedparser.parse(response.content) except Exception: raise forms.ValidationError(_("Error parsing the feed.")) if not is_feed(parsed): raise forms.ValidationError(_("This URL doesn't seem to be a valid feed.")) self.cleaned_data["title"] = parsed.feed.title # Cache this in case update_favicon needs it and it's not in the # scheduler data yet. if hasattr(parsed.feed, "link"): cache.set(u"feed_link:{0}".format(url), parsed.feed.link, 600) return url
def connect_sentry(message, result): ''' Connect to the Sentry server ''' pillar_data = __salt__['pillar.raw']() grains = __salt__['grains.items']() raven_config = pillar_data['raven'] hide_pillar = raven_config.get('hide_pillar') sentry_data = { 'result': result, 'pillar': 'HIDDEN' if hide_pillar else pillar_data, 'grains': grains } data = { 'platform': 'python', 'culprit': message, 'level': 'error' } tags = {} if 'tags' in raven_config: for tag in raven_config['tags']: tags[tag] = grains[tag] if ret_is_not_error(ret): data['level'] = 'info' if raven_config.get('report_errors_only') and data['level'] != 'error': return if raven_config.get('dsn'): client = Client(raven_config.get('dsn'), transport=HTTPTransport) else: try: servers = [] for server in raven_config['servers']: servers.append(server + '/api/store/') client = Client( servers=servers, public_key=raven_config['public_key'], secret_key=raven_config['secret_key'], project=raven_config['project'], transport=HTTPTransport ) except KeyError as missing_key: logger.error( 'Sentry returner needs key \'%s\' in pillar', missing_key ) return try: msgid = client.capture('raven.events.Message', message=message, data=data, extra=sentry_data, tags=tags) logger.info('Message id %s written to sentry', msgid) except Exception as exc: logger.error( 'Can\'t send message to sentry: {0}'.format(exc), exc_info=True )
def run_job(instance, job_data_instance): try: val = instance.f(job_data_instance.data_dict) if self.cleanup: job_data_instance.delete() return val except: raven_client = RavenClient(dsn=settings.RAVEN_CONFIG[u'dsn']) raven_client.captureException()
def __init__(self, bot): self.bot = bot self.raven = None if "sentry" in bot.config: self.raven = Client(**bot.config["sentry"])
class CrashReporting(object): """ Pseudo-Class for remote crash reporting """ def __init__(self, transport='threaded'): if transport == 'threaded': transport = ThreadedHTTPTransport else: raise ValueError(f'Unknown transport: {transport}') if sw_version_is_stable(): self.sentinel_file_path = '/tmp/.crashreporting_disabled' else: self.sentinel_file_path = '/data/.crashreporting_disabled' self.logger = logging.getLogger('middlewared.logger.CrashReporting') self.client = Client( dsn= 'https://*****:*****@sentry.ixsystems.com/2?timeout=3', install_sys_hook=False, install_logging_hook=False, string_max_length=10240, release=sw_version(), ) def is_disabled(self): """ Check the existence of sentinel file and its absolute path against STABLE and DEVELOPMENT branches. Returns: bool: True if crash reporting is disabled, False otherwise. """ # Allow report to be disabled via sentinel file or environment var, # if FreeNAS current train is STABLE, the sentinel file path will be /tmp/, # otherwise it's path will be /data/ and can be persistent. if os.path.exists(self.sentinel_file_path ) or 'CRASHREPORTING_DISABLED' in os.environ: return True else: return False def report(self, exc_info, data, t_log_files): """" Args: exc_info (tuple): Same as sys.exc_info(). request (obj, optional): It is the HTTP Request. sw_version (str): The current middlewared version. t_log_files (tuple): A tuple with log file absolute path and name. """ if self.is_disabled(): return extra_data = {} if all(t_log_files): payload_size = 0 for path, name in t_log_files: if os.path.exists(path): with open(path, 'r') as absolute_file_path: contents = absolute_file_path.read()[-10240:] # There is a limit for the whole report payload. # Lets skip the file if its hits areasonable limit. if len(contents) + payload_size > 61440: continue extra_data[name] = contents payload_size += len(contents) self.logger.debug('Sending a crash report...') try: self.client.captureException(exc_info=exc_info, data=data, extra=extra_data) except Exception: pass # We don't care about the exception
import logging import urllib2 from datetime import date, datetime, timedelta from operator import itemgetter import libsaas import xmlrpclib from empowering.utils import make_local_timestamp from raven import Client from .amon import AmonConverter from .empowering_tasks import EmpoweringTasks from .utils import (setup_peek, setup_mongodb, setup_empowering_api, Popper, setup_queue) sentry = Client() logger = logging.getLogger('amon') def enqueue_all_amon_measures(tg_enabled=True, bucket=500): if not tg_enabled: return mongo = setup_mongodb() em_tasks = EmpoweringTasks() q = setup_queue(name='measures') serials = open('serials', 'r') for serial in serials: meter_name = serial.replace('\n', '').strip() if not meter_name.startswith('ZIV00'):
def on_configure(self): client = Client(settings.RAVEN_CONFIG["dsn"]) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
from muckrock.foia.models import ( FOIAFile, FOIARequest, FOIAMultiRequest, FOIACommunication, ) from muckrock.foia.codes import CODES from muckrock.task.models import ResponseTask from muckrock.utils import generate_status_action from muckrock.vendor import MultipartPostHandler foia_url = r'(?P<jurisdiction>[\w\d_-]+)-(?P<jidx>\d+)/(?P<slug>[\w\d_-]+)-(?P<idx>\d+)' logger = logging.getLogger(__name__) client = Client(os.environ.get('SENTRY_DSN')) register_logger_signal(client) register_signal(client) class FOIAOptions(dbsettings.Group): """DB settings for the FOIA app""" enable_followup = dbsettings.BooleanValue( 'whether to send automated followups or not') enable_weekend_followup = dbsettings.BooleanValue( 'whether to send automated followups or not on the weekends') foia_options = FOIAOptions()
app.conf.task_queues = ( Queue('default', routing_key='celery'), Queue('high_prio', routing_key='high'), Queue('higher_prio', routing_key='higher'), ) app.conf.task_default_queue = 'default' app.conf.task_default_exchange = 'celery' app.conf.task_default_exchange_type = 'direct' sentry = None if 'SENTRY_DSN' in flaskapp.config: from raven import Client from raven.contrib.celery import register_signal, register_logger_signal sentry = Client( flaskapp.config['SENTRY_DSN'], release=version.get_versions()['version']) register_logger_signal(sentry) register_signal(sentry) class DBTask(Task): def __call__(self, *args, **kwargs): try: super().__call__(*args, **kwargs) finally: db.session.close() app.Task = DBTask
pyximport.install() import json import os import subprocess from delorean import epoch from raven import Client from gryphon.data_service.consts import * from gryphon.data_service.queue_consumer import QueueConsumer from gryphon.lib import session from gryphon.lib.models.emeraldhavoc.exchange_volume import ExchangeVolume from gryphon.lib.money import Money s = Client(dsn=os.environ.get('SENTRY_DSN')) def exchange_volumes_consumer_function(message, db): subprocess.call(['touch', 'monit/heartbeat/exchange_volumes_consumer.txt']) exchange_volume_json = json.loads(message) timestamp = epoch(exchange_volume_json['timestamp']).datetime exchange = exchange_volume_json['exchange_name'] exch_vol_money = Money(exchange_volume_json['volume'], 'BTC') t = ExchangeVolume( exchange_volume=exch_vol_money, exchange=exchange, timestamp=timestamp, )
def main(): """Entry point""" from nipype import logging as nlogging from multiprocessing import set_start_method, Process, Manager from .. import __version__ from ..viz.reports import generate_reports from ..utils.bids import write_derivative_description set_start_method('forkserver') warnings.showwarning = _warn_redirect opts = get_parser().parse_args() # FreeSurfer license default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt') # Precedence: --fs-license-file, $FS_LICENSE, default_license license_file = opts.fs_license_file or os.getenv('FS_LICENSE', default_license) if not os.path.exists(license_file): raise RuntimeError( 'ERROR: a valid license file is required for FreeSurfer to run. ' 'FMRIPREP looked for an existing license file at several paths, in this ' 'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` ' 'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. ' 'Get it (for free) by registering at https://' 'surfer.nmr.mgh.harvard.edu/registration.html') os.environ['FS_LICENSE'] = license_file # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) errno = 0 # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() if p.exitcode != 0: sys.exit(p.exitcode) fmriprep_wf = retval['workflow'] plugin_settings = retval['plugin_settings'] bids_dir = retval['bids_dir'] output_dir = retval['output_dir'] work_dir = retval['work_dir'] subject_list = retval['subject_list'] run_uuid = retval['run_uuid'] retcode = retval['return_code'] if fmriprep_wf is None: sys.exit(1) if opts.write_graph: fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) # Sentry tracking if not opts.notrack: try: from raven import Client dev_user = bool(int(os.getenv('FMRIPREP_DEV', 0))) msg = 'fMRIPrep running%s' % (int(dev_user) * ' [dev]') client = Client( 'https://*****:*****@sentry.io/1137693', release=__version__) client.captureMessage(message=msg, level='debug' if dev_user else 'info', tags={ 'run_id': run_uuid, 'npart': len(subject_list), 'type': 'ping', 'dev': dev_user}) except Exception: pass # Check workflow for missing commands missing = check_deps(fmriprep_wf) if missing: print("Cannot run fMRIPrep. Missing dependencies:") for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() try: fmriprep_wf.run(**plugin_settings) except RuntimeError as e: if "Workflow did not execute cleanly" in str(e): errno = 1 else: raise # Generate reports phase errno += generate_reports(subject_list, output_dir, work_dir, run_uuid) write_derivative_description(bids_dir, str(Path(output_dir) / 'fmriprep')) sys.exit(int(errno > 0))
from raven import Client SCOPE = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] TABLE_NAME = 'СКЛАД' WORKSHEET_NO = 0 HEAD_ROW = 2 COLUMNS = ['НАИМЕНОВАНИЕ', 'Артикул', 'Олег', 'Женя'] MAIN_COLUMNS = ['НАИМЕНОВАНИЕ', 'Артикул'] DELIMITER = '\t' from .local import * raven_client = Client(DSN)
def run(base_dir, start_gunicorn_app=True): # Store a pidfile before doing anything else store_pidfile(base_dir) # For dumping stacktraces register_diag_handlers() # Capture warnings to log files logging.captureWarnings(True) # Start initializing the server now os.chdir(base_dir) try: import pymysql pymysql.install_as_MySQLdb() except ImportError: pass # We're doing it here even if someone doesn't use PostgreSQL at all # so we're not suprised when someone suddenly starts using PG. # TODO: Make sure it's registered for each of the subprocess psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY) repo_location = os.path.join(base_dir, 'config', 'repo') # Configure the logging first, before configuring the actual server. logging.addLevelName('TRACE1', TRACE1) with open(os.path.join(repo_location, 'logging.conf')) as f: dictConfig(yaml.load(f)) logger = logging.getLogger(__name__) kvdb_logger = logging.getLogger('zato_kvdb') config = get_config(repo_location, 'server.conf') # New in 2.0 - Start monitoring as soon as possible if config.get('newrelic', {}).get('config'): import newrelic.agent newrelic.agent.initialize( config.newrelic.config, config.newrelic.environment or None, config.newrelic.ignore_errors or None, config.newrelic.log_file or None, config.newrelic.log_level or None) # New in 2.0 - override gunicorn-set Server HTTP header gunicorn.SERVER_SOFTWARE = config.misc.get('http_server_header', 'Zato') # Store KVDB config in logs, possibly replacing its password if told to kvdb_config = get_kvdb_config_for_log(config.kvdb) kvdb_logger.info('Master process config `%s`', kvdb_config) # New in 2.0 hence optional user_locale = config.misc.get('locale', None) if user_locale: locale.setlocale(locale.LC_ALL, user_locale) value = 12345 logger.info('Locale is `%s`, amount of %s -> `%s`', user_locale, value, locale.currency(value, grouping=True).decode('utf-8')) # Spring Python app_context = get_app_context(config) # Makes queries against Postgres asynchronous if asbool(config.odb.use_async_driver) and config.odb.engine == 'postgresql': make_psycopg_green() # New in 2.0 - Put HTTP_PROXY in os.environ. http_proxy = config.misc.get('http_proxy', False) if http_proxy: os.environ['http_proxy'] = http_proxy crypto_manager = get_crypto_manager(repo_location, app_context, config) parallel_server = app_context.get_object('parallel_server') zato_gunicorn_app = ZatoGunicornApplication(parallel_server, repo_location, config.main, config.crypto) parallel_server.crypto_manager = crypto_manager parallel_server.odb_data = config.odb parallel_server.host = zato_gunicorn_app.zato_host parallel_server.port = zato_gunicorn_app.zato_port parallel_server.repo_location = repo_location parallel_server.base_dir = base_dir parallel_server.tls_dir = os.path.join(parallel_server.base_dir, 'config', 'repo', 'tls') parallel_server.fs_server_config = config parallel_server.user_config.update(config.user_config_items) parallel_server.startup_jobs = app_context.get_object('startup_jobs') parallel_server.app_context = app_context # Remove all locks possibly left over by previous server instances kvdb = app_context.get_object('kvdb') kvdb.component = 'master-proc' clear_locks(kvdb, config.main.token, config.kvdb, crypto_manager.decrypt) # Turn the repo dir into an actual repository and commit any new/modified files RepoManager(repo_location).ensure_repo_consistency() # New in 2.0 so it's optional. profiler_enabled = config.get('profiler', {}).get('enabled', False) # New in 2.0 so it's optional. sentry_config = config.get('sentry') dsn = sentry_config.pop('dsn', None) if dsn: from raven import Client from raven.handlers.logging import SentryHandler handler_level = sentry_config.pop('level') client = Client(dsn, **sentry_config) handler = SentryHandler(client=client) handler.setLevel(getattr(logging, handler_level)) logger = logging.getLogger('') logger.addHandler(handler) for name in logging.Logger.manager.loggerDict: if name.startswith('zato'): logger = logging.getLogger(name) logger.addHandler(handler) if asbool(profiler_enabled): profiler_dir = os.path.abspath(os.path.join(base_dir, config.profiler.profiler_dir)) parallel_server.on_wsgi_request = ProfileMiddleware( parallel_server.on_wsgi_request, log_filename = os.path.join(profiler_dir, config.profiler.log_filename), cachegrind_filename = os.path.join(profiler_dir, config.profiler.cachegrind_filename), discard_first_request = config.profiler.discard_first_request, flush_at_shutdown = config.profiler.flush_at_shutdown, path = config.profiler.url_path, unwind = config.profiler.unwind) # New in 2.0 - set environmet variables for servers to inherit os_environ = config.get('os_environ', {}) for key, value in os_environ.items(): os.environ[key] = value # Run the app at last if start_gunicorn_app: zato_gunicorn_app.run() else: return zato_gunicorn_app.zato_wsgi_app
consoleHandler.setLevel(consoleLevel) # intialize the user preferences from bauble.prefs import prefs, use_sentry_client_pref prefs.init() try: # no raven.conf.setup_logging: just standard Python logging from raven import Client from raven.handlers.logging import SentryHandler # only register the sentry client if the user agrees on it if prefs[use_sentry_client_pref]: logger.debug('registering sentry client') sentry_client = Client('https://*****:*****@' 'app.getsentry.com/45704') handler = SentryHandler(sentry_client) logging.getLogger().addHandler(handler) handler.setLevel(logging.WARNING) else: logger.debug('not registering sentry client') except Exception, e: logger.warning("can't configure sentry client") logger.debug('%s - %s' % (type(e), e)) import gtk.gdk import pygtk if not main_is_frozen(): pygtk.require("2.0")
with open('config.yml', 'r') as config: try: config = yaml.load(config) except yaml.YAMLError as ex: logging.error(ex) ENV = os.getenv('ENV', 'staging') mysql_host = config[ENV]['database']['host'] mysql_port = config[ENV]['database']['port'] mysql_user = config[ENV]['database']['user'] mysql_passwd = config[ENV]['database']['pass'] mysql_db = config[ENV]['database']['dbname'] dayu_key = config[ENV]['dayu']['key'] dayu_secret = config[ENV]['dayu']['secret'] tpl_id = config[ENV]['dayu']['tpl_id'] sentry = Client(config['common']['raven']['url']) conn = mysql.connect(host=mysql_host, user=mysql_user, passwd=mysql_passwd, db=mysql_db, port=mysql_port) conn.autocommit(1) cur = conn.cursor(mysql.cursors.DictCursor) def send_sms(phone, param): req = top.api.AlibabaAliqinFcSmsNumSendRequest() req.set_app_info(top.appinfo(dayu_key, dayu_secret)) req.extend = "" req.sms_type = "normal"
def __init__(self, *args, **kwargs): from django.conf import settings super().__init__(*args, **kwargs) client = Client(settings.SENTRY_DSN, transport=HTTPTransport) register_sentry(client, self)
import boto from raven import Client from omaha.models import Version as OmahaVersion from omaha.utils import valuedispatch from sparkle.models import SparkleVersion from crash.models import Crash, Symbols from feedback.models import Feedback from dynamic_preferences_registry import global_preferences_manager as gpm dsn = getattr(settings, 'RAVEN_CONFIG', None) if dsn: dsn = dsn['dsn'] raven = Client(dsn, name=getattr(settings, 'HOST_NAME', None), release=getattr(settings, 'APP_VERSION', None)) @valuedispatch def bulk_delete(cls, qs): raise NotImplementedError @bulk_delete.register(Crash) def _(cls, qs): if settings.DEFAULT_FILE_STORAGE == 'omaha_server.s3utils.S3Storage': qs = s3_bulk_delete(qs, file_fields=['archive', 'upload_file_minidump'], s3_fields=['minidump_archive', 'minidump'])
class KernelPanicReceiver(object): @staticmethod def default_parser_title__(addr, klogs): key_words = [ "BUG", "Kernel panic", "kernel stack overflow", "divide error", "general protection fault", "SMP" ] for key in key_words: title = find_and_slice(klogs, key) if title is not None: break if title is None: title = "Unknown error" title = " ".join(title.split()) idx = klogs.find("[kmdolve]") if idx != -1: title = "[kmodlve] " + title return title @staticmethod def default_parser_user__(addr, klogs): return addr[0] @staticmethod def default_parser_fingerprint__(addr, klogs): return KernelPanicReceiver.default_parser_title__(addr, klogs) @staticmethod def default_parser_message__(addr, klogs): return "\n\nKERNEL LOGS:\n\n" + klogs def __init__(self, listen_host, listen_port, sentry_dsn): """ listen_host, listen_port - ip/port that will be listened to sentry_dsn - sentry DSN """ # sentry init self._sentry_client = Client(sentry_dsn) # server init self._host = listen_host self._port = listen_port self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._server_socket.bind((self._host, self._port)) # data structure init self.received_all = {} self.received_all_mtx = Lock() self.data_available = False self.list_recv = [] self.server_mutex = Lock() self._parser_title = KernelPanicReceiver.default_parser_title__ self._parser_user = KernelPanicReceiver.default_parser_user__ self._parser_fingerprint = KernelPanicReceiver.default_parser_fingerprint__ self._parser_message = KernelPanicReceiver.default_parser_message__ self._parsers_tags = [] self._parsers_extra = [] def send_to_sentry_(self, title, fingerprint, message, user_id, tags, extra): event = { 'message': title + message, 'fingerprint': [fingerprint], 'level': 'fatal', 'user': { 'id': user_id }, 'tags': tags, 'extra': extra, 'platform': 'python' } return self._sentry_client.send(**event) def _process_panic_msg(self, key): """ Process logs when they are all received key - list with ip / port """ with self.received_all_mtx: klogs = self.received_all[key].decode("ascii") self.received_all.pop(key) title = self._parser_title(key, klogs) user = self._parser_user(key, klogs) fingerprint = self._parser_fingerprint(key, klogs) message = self._parser_message(key, klogs) tags = [] for f_parser in self._parsers_tags: ret = f_parser(key, klogs) if ret is not None: tags.append(ret) extra = {} for f_parser in self._parsers_extra: ret = f_parser(key, klogs) if ret is not None: extra[ret[0]] = ret[1] self.send_to_sentry_(title, fingerprint, message, user, tags, extra) print('[', datetime.now(), '] ', key, ' sending logs to sentry [DONE]', sep='') def _wait_for_all_data(self, key): """ Waits for all udp packages from specific host are received (if for past 2 seconds there were no new packages we suppose that's all data) key - list with ip / port """ with self.received_all_mtx: cur_len = len(self.received_all.get(key)) prev_len = 0 # wait for all logs are received while prev_len != cur_len: prev_len = cur_len print('[', datetime.now(), '] ', key, " waiting for all data", sep='') time.sleep(2) self.received_all_mtx.acquire() cur_len = len(self.received_all.get(key)) self.received_all_mtx.release() self._process_panic_msg(key) def _monitor_data(self): """ Waits for new data in global list_recv list which contains received udp packages """ while True: while not self.data_available: time.sleep(0.01) self.server_mutex.acquire() local_list_recv = self.list_recv self.list_recv = [] self.data_available = False self.server_mutex.release() while local_list_recv: d = local_list_recv.pop(0) received = d[0] client_addr = d[1] self.received_all_mtx.acquire() # first udp package we get from the specific host if self.received_all.get(client_addr) is None: self.received_all[client_addr] = received thread = Thread(target=self._wait_for_all_data, args=(client_addr, ), daemon=True) thread.start() # not first else: self.received_all[client_addr] += received self.received_all_mtx.release() def register_parser_title(self, f_parser): self._parser_title = f_parser def register_parser_user(self, f_parser): self._parser_user = f_parser def register_parser_fingerprint(self, f_parser): self._parser_fingerprint = f_parser def register_parser_message(self, f_parser): self._parser_message = f_parser def register_parser_tag(self, f_parser): """ Register a hook that will be called when all logs from a specific host are received A returned value of the hook will be sent to sentry as tag args: f_parser - a function (hook) that has to be registered. prototype: f_parser(addr, klogs) addr - list with client's ip / port klogs - string with all logs Return value: none """ self._parsers_tags.append(f_parser) def unregister_parser_tag(self, f_parser): """ Unregisters a function registered with register_parser_tag() args: f_parser - a function that has to be unregistered Return value: True - successfully unregistered False - error occured """ try: self._parsers_tags.remove(f_parser) except: return False return True def register_parser_extra(self, f_parser): """ Register a hook that will be called when all logs from a specific host are received A returned value of the hook will be sent to sentry as extra field args: f_parser - a function (hook) that has to be registered. prototype: f_parser(addr, klogs) addr - list with client's ip / port klogs - string with all logs Return value: none """ self._parsers_extra.append(f_parser) def unregister_parser_extra(self, f_parser): """ Unregisters a function registered with register_parser_extra() args: f_parser - a function to unregister Return value: True - successfully unregistered False - error occured """ try: self._parsers_extra.remove(f_parser) except: return False return True def start_receiving_logs(self): """ Starts receiving logs Blocking method """ thread = Thread(target=self._monitor_data, args=(), daemon=True) thread.start() print('[', datetime.now(), '] ', 'Start listening...', sep='') while True: d = self._server_socket.recvfrom(8192) self.server_mutex.acquire() self.list_recv.append(d) self.data_available = True self.server_mutex.release()
from newsbot.models import ( Account, AccountStats, Bulletin, ChatUser, ChatUserAnswer, Fragment, PollQuestions, ProfileStory, ProfileStoryFragment, Story ) logger = get_task_logger(__name__) raven_client = Client(settings.SENTRY_DSN) class BotBase(metaclass=ABCMeta): WELCOME_MESSAGES = [ 'Welcome to bulletins.chat!', ('We will show you a set of news, all you need to tell me if you need ' 'to continue with the story I am telling you or go to next story!') ] @abstractmethod def extract_username(self, msg): pass @abstractmethod def send_welcome(self, chat_id):
return make_error(message) else: return format_result({ 'token': result, }) @app.route('/crossdomain.xml') def crossdomain(): response.content_type = 'application/xml' return template('crossdomain') app.install(validate_format) app_v1.install(validate_format) app.mount('/v1', app_v1) SENTRY_DSN = os.getenv('SENTRY_DSN') if SENTRY_DSN: sentry_client = Client(SENTRY_DSN) app = Sentry(app, sentry_client) app_v1 = Sentry(app_v1, sentry_client) def _standalone(port=9876): run(app=app, host='0.0.0.0', port=port) if __name__ == "__main__": _standalone()
"zoneid": vmzoneid, "zonename": vmzonename, "ipaddress": vmipaddress, "macaddress": vmmacaddress, "securitygroupid": vmsecuritygroupid, "securitygroupname": vmsecuritygroupname, "@source_host": sourcehost, "@source_path": scriptpath, "@source": scriptpath, "@type": "ACS-instancereport", "@message": "ACS instance report for vm %s" % vmid, "@timestamp": timestamp, } records.append(doc) # index documents bulk(es, records, index=esindex, doc_type=DOCTYPE) # main if __name__ == "__main__": args = main() try: get_stats(args) except Exception: if args["sentryapikey"] is None: raise else: client = Client(dsn=args["sentryapikey"]) client.captureException()
async def initialize_cog(self): """Saves the original cmd error handler""" if SENTRY_SUPPORT: self.client = Client(self.bot.config.sentry_url, transport=AioHttpTransport)
def make_vidispine_request(agent, method, urlpath, body, headers, content_type='application/xml'): import base64 from pprint import pprint from vsexception import VSException auth = base64.encodestring( '%s:%s' % (settings.VIDISPINE_USERNAME, settings.VIDISPINE_PASSWORD)).replace( '\n', '') headers['Authorization'] = "Basic %s" % auth headers['Content-Type'] = content_type #conn.request(method,url,body,headers) if not re.match(r'^/', urlpath): urlpath = '/' + urlpath url = "{0}:{1}{2}".format(settings.VIDISPINE_URL, settings.VIDISPINE_PORT, urlpath) print("URL is %s" % url) print(body) (headers, content) = agent.request(url, method=method, body=body, headers=headers) print(content) pprint(headers) if int(headers['status']) < 200 or int(headers['status']) > 299: try: from raven import Client as RavenClient if not 'dsn' in settings.RAVEN_CONFIG: logger.error( "RAVEN_CONFIG specified but does not specify DSN. Consult Raven documentation for how to set it up properly" ) return c = RavenClient(settings.RAVEN_CONFIG['dsn']) c.user_context({ 'method': method, 'url': url, 'body': body, 'headers': headers, 'content_type': content_type, 'content': content, }) try: e = VSException() #try: e.fromJSON(content) print(content) #except StandardError: #if we did not get valid XML # raise HttpError("Vidispine error: %s" % headers['status']) except HttpError: c.captureException() c.context.clear() raise except VSException: c.captureException() c.context.clear() raise except ImportError: logger.warning( "No Raven installation detected. Sentry will not be notified.") return return (headers, content)
#!/usr/bin/env python # -*- coding:utf-8 -*- __author__ = 'MFC' __time__ = '2019-01-29 23:01' """ https://docs.sentry.io/server/installation/docker/ https://docs.sentry.io/error-reporting/quickstart/?platform=python#pick-a-client-integration Integrated with Python pip install raven --upgrade Install an SDK pip install --upgrade sentry-sdk """ from raven import Client DSN = "" # find it on your sentry client = Client(DSN) try: 1/0 except ZeroDivisionError: client.captureException()
""" from __future__ import absolute_import import os from celery import Celery from trytond.config import config try: from raven import Client from raven.contrib.celery import register_signal except ImportError: pass else: if os.environ.get('SENTRY_DSN'): register_signal(Client(os.environ.get('SENTRY_DSN'))) config.update_etc() broker_url = config.get('async', 'broker_url') backend_url = config.get('async', 'backend_url') app = Celery('trytond_async', broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'), backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'), include=['trytond_async.tasks']) app.conf.update(CELERY_TASK_RESULT_EXPIRES=3600, CELERY_TASK_SERIALIZER='tryson', CELERY_RESULT_SERIALIZER='tryson',
def send_sentry_exception(): sentry_dsn = settings.RAVEN_CONFIG.get('dsn', '') if sentry_dsn.strip(): client = Client(sentry_dsn) client.captureException()
'''启动命令''' # celery -A handle_data worker -l info -Q to_product -P eventlet 生产数据 # celery -A handle_data worker -l info -Q to_analysis -P eventlet 解析数据 # celery -A handle_data worker -l info -Q to_consume -P eventlet 消费数据 # mitmdump.exe -s start_script.py # celery.exe flower --broker=amqp://guest:guest@localhost:5672/test 开启flower后台监控 # celery.exe flower --broker=amqp://cic_admin:JYcxys@[email protected]:5672/yct from raven import Client cli = Client( 'https://*****:*****@sentry.cicjust.com//5' ) # SURL = "mysql+pymysql://cic_admin:TaBoq,,[email protected]:3306/yct_proxy?charset=utf8&autocommit=true" SURL = "mysql+pymysql://cic_admin:[email protected]:3306/yct_proxy?charset=utf8&autocommit=true" # SURL = "mysql+pymysql://root:GHys1234&,><@[email protected]:3306/yct_proxy?charset=utf8&autocommit=true" REDIS_HOST = 'aliyun_redis' # REDIS_HOST = '127.0.0.1' REDIS_PORT = 6379 # host='192.168.1.170', user='******', password='******', database='yct_proxy',charset='utf8' # MYSQL_HOST = '192.168.1.230' MYSQL_HOST = 'yct_mysql' MYSQL_USER = '******' MYSQL_PASSWORD = '******'
the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os try: from raven import Client client = Client(os.environ.get('RAVEN_DNS')) except ImportError: print("[WARN] raven python module not found") from paver.easy import task, needs from paver.easy import sh @task def test(): sh('./manage.py test --settings=omaha_server.settings_test', cwd='omaha_server') @task def test_tox():
def send_sentry_message(message, **kwargs): sentry_dsn = settings.RAVEN_CONFIG.get('dsn', '') if sentry_dsn.strip(): client = Client(sentry_dsn) client.captureMessage(message, **kwargs)
from celery import Celery, Task from raven import Client from channels.apns import Apns from channels.huawei import HuaWei from channels.xiaomi import XiaoMi from channels.meizu import MeiZu from channels.mqtt import MQTT config = configparser.ConfigParser() config.read(os.path.join(os.path.dirname(__file__), "config.ini")) client = Client( 'https://*****:*****@sentry.io/104967' ) BROKER_URL = 'redis://:{}@{}:{}/0'.format(config['redis']['auth'], config['redis']['host'], config['redis']['port']) app = Celery('tasks', broker=BROKER_URL, backend=BROKER_URL) app.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'], # Ignore other content CELERY_RESULT_SERIALIZER='json', CELERY_TIMEZONE='Asia/Shanghai', CELERY_ENABLE_UTC=True, )
from tornado.escape import json_decode from tornado.escape import json_encode from tornado.options import define, options import requests from raven import Client from handlers.ConnectingPathHandler import FindEdgeLabel, FindOutputHandler, MetaDataHandler, ConnectingPathHandler, EndpointHandler, ConnectingOutputHandler, ConnectingInputHandler, ApiMapHandler, ApiMapHandlerSankey, Input2EndpointHandler, KnowledgeMap, KnowledgeMapPath, Endpoint2OutputHandler from handlers.entitycrawler import Crawler from handlers.basehandler import BaseHandler from handlers.DirectPathHandler import DirectPathHandler from handlers.DirectInput2OutputHandler import DirectInput2OutputHandler from handlers.FindSynonymHandler import SynonymHandler from handlers.SemanticQueryHandler import QuerySemanticsHandler client = Client( 'https://*****:*****@sentry.io/294205' ) class MainHandler(tornado.web.RequestHandler): @tornado.web.addslash def get(self): self.render("index.html", messages=None) class APIHandler(tornado.web.RequestHandler): @tornado.web.addslash def get(self): self.render("api.html", messages=None)
# encoding: utf-8 __author__ = 'mtianyan' __date__ = '2018/3/15 0015 22:15' from raven import Client client = Client( 'http://*****:*****@115.159.122.64:9000//2' ) try: 1 / 0 except ZeroDivisionError: client.captureException()
from datetime import date from typing import List from celery import group from raven import Client from app.core.celery_app import celery_app from app.core.config import settings from app.DSL import dsl_parser client_sentry = Client(settings.SENTRY_DSN) @celery_app.task(acks_late=True) def test_celery(word: str) -> str: return f"test task return {word}" @celery_app.task() def resolve_datestr(datestr: str, year: int) -> date: return dsl_parser(datestr, year) @celery_app.task() def resolve_datestrs(datestrs: List[str], year: int) -> List[date]: return group((linear_resolve_datestrs.s(i, year) for i in datestrs))() @celery_app.task() def linear_resolve_datestrs(datestrs, year): resolved = [dsl_parser(i, year) for i in datestrs]