def mailgun_send(mailgun_data, files_dict=None): logger.debug("Mailgun send: %s" % mailgun_data) logger.debug("Mailgun files: %s" % files_dict) if not settings.MAILGUN_API_KEY: capture_message("Mailgun API key is not defined.") return HttpResponse(status=500) if not settings.MAILGUN_CAUTION_SEND_REAL_MAIL: # We will see this message in the mailgun logs but nothing will # actually be delivered. This gets added at the end so it can't be # overwritten by other functions. mailgun_data["o:testmode"] = "yes" logger.debug("mailgun_send: o:testmode=%s" % mailgun_data["o:testmode"]) try: resp = requests.post("https://api.mailgun.net/v2/%s/messages" % settings.LIST_DOMAIN, auth=("api", settings.MAILGUN_API_KEY), data=mailgun_data, files=files_dict ) if resp.status_code != 200: capture_message('Mailgun POST returned %d' % resp.status_code) return HttpResponse(status=resp.status_code) except requests.ConnectionError as e: logger.error('Connection error. Email "%s" aborted.' % mailgun_data['subject']) capture_exception(e) return HttpResponse(status=500)
def log(self, out, err, returncode): if returncode == 0: return tags = self.opts.tags or {} tags.update({ 'returncode': returncode, 'callable': self.args[0] }) extra = self.opts.extra or {} extra.update({ 'returncode': returncode, 'command': self.get_command(), }) extra.update(string_to_chunks('stdout', out.rstrip())) extra.update(string_to_chunks('stderr', err.rstrip())) capture_message_kwargs = dict( message=self.get_raven_message(returncode), level=logging.ERROR, tags=tags, extra=extra, ) if self.opts.debug: pprint(capture_message_kwargs) else: with sentry_sdk.push_scope() as scope: for k, v in iteritems(extra): scope.set_extra(k, v) for k, v in iteritems(tags): scope.set_tag(k, v) sentry_sdk.capture_message(capture_message_kwargs["message"], level=capture_message_kwargs["level"])
def update_names(): """Update cached names on Encounters and Loggers and reconstructs Surveys.""" msg = "[wastd.observations.tasks.update_names] Start updating names..." logger.info(msg) capture_message(msg, level="info") surveys, names, loggers = allocate_animal_names() msg = ("[wastd.observations.tasks.update_names] {0} surveys reconstructed, " "{1} animal names reconstructed, {2} logger names set. " "Task successfully finished.".format( len(surveys), len(names), len(loggers))) logger.info(msg) capture_message(msg, level="warning")
def dispatch(self, email, attrs): url = '%s/key:%s/send' % (self.url_base, self.external_key) headers = self.token.headers() req_data = { "To": { "Address": email, "SubscriberKey": email, "ContactAttributes": { "SubscriberAttributes": attrs }, }, "OPTIONS": { "RequestType": "SYNC" }, } resp = requests.post(url, headers=headers, json=req_data) resp.raise_for_status() reply = resp.json() with sentry_sdk.configure_scope() as scope: scope.set_extra("reply_data", reply) for response in reply['responses']: if response['hasErrors']: error_messages = response.get('messageErrors', []) # set of error codes in messages error_codes = {err.get('messageErrorCode') for err in error_messages} # coerce ET_IGNORED_ERROR_CODES into set from . import settings codes_to_ignore = set(settings.ET_IGNORED_ERROR_CODES) # if response has any ignorable codes... set intersection if len(codes_to_ignore & error_codes) > 0: # log error, but don't send ignored error codes to Sentry logger.warn("Suppressed exception for ET API exception. Error response: {}".format(reply)) else: sentry_sdk.capture_message('Error occurred while submitting subscriber to exact target') reg_messages = response.get('messages') # sometimes there are structured errors if len(error_messages): raise exceptions.TriggeredSendException(error_messages[0]['messageErrorStatus']) # sometimes there are plain text errors elif len(reg_messages): raise exceptions.TriggeredSendException(reg_messages[0]) else: raise exceptions.TriggeredSendException('Unknown TriggeredSend Error Occurred') return reply
def import_odka(): """Download and import new ODKA submissions.""" capture_message("[wastd.observations.tasks.import_odka] Starting ODKA import.", level="warning") path = os.path.join(settings.MEDIA_ROOT, "odka") os.makedirs(path, exist_ok=True) save_all_odka(path=path) capture_message("[wastd.observations.tasks.import_odka] ODKA submissions downloaded.", level="info") import_all_odka(path=path) capture_message("[wastd.observations.tasks.import_odka] ODKA submissions imported.", level="info") reconstruct_missing_surveys() capture_message( "[wastd.observations.tasks.import_odka] " "ODKA surveys reconstructed, task successfully finished.", level="warning")
def index(request): assert request.json == data assert request.text == json.dumps(data) assert not request.POST capture_message("hi") return Response("ok")
def info(self, text, capture_sentry=False): log_text = '[IP {}] {}'.format(self.request.remote_addr, text) self.log.info(log_text) if capture_sentry: capture_message('[INFO][{}]'.format(self.name) + log_text)
def test_weird_chars(sentry_init, capture_events): sentry_init() events = capture_events() capture_message(u"föö".encode("latin1")) event, = events assert json.loads(json.dumps(event)) == event
def hi(request): capture_message("hi") return Response("hi")
def post_echo(request): sentry_sdk.capture_message("hi") return HttpResponse(request.body)
def consumir_novedades_carga(cant_por_iteracion=None): ahora = timezone.now() desde = ahora - timedelta(minutes=settings.TIMEOUT_CONSOLIDACION) with transaction.atomic(): # Lo hacemos en una transacción para no competir con otros consolidadores. a_procesar = Carga.objects.select_for_update(skip_locked=True).filter( Q(tomada_por_consolidador__isnull=True) | Q(tomada_por_consolidador__lt=desde), procesada=False, ) if cant_por_iteracion: a_procesar = a_procesar[0:cant_por_iteracion] ids_a_procesar = list(a_procesar.values_list('id', flat=True).all()) Carga.objects.filter(id__in=ids_a_procesar).update( tomada_por_consolidador=ahora) # OJO - acá precomputar los ids_a_procesar es importante. Ver (*) al final de este doc para detalles. mesa_categorias_con_novedades = MesaCategoria.objects.filter( cargas__in=ids_a_procesar).distinct() con_error = [] for mesa_categoria_con_novedades in mesa_categorias_con_novedades: try: consolidar_cargas(mesa_categoria_con_novedades) except Exception as e: # Logueamos la excepción y continuamos. capture_message(f""" Excepción {e} al procesar la mesa-categoría {mesa_categoria_con_novedades.id if mesa_categoria_con_novedades else None}. """) logger.error('Carga', mesa_categoria=mesa_categoria_con_novedades.id if mesa_categoria_con_novedades else None, error=str(e)) try: # Eliminamos los ids de las cargas que no se procesaron # para no marcarlas como procesada=True. for carga in mesa_categoria_con_novedades.cargas.all(): if carga.id in ids_a_procesar: # Podría ser que la que generó la novedad sea otra carga de la mesacat. ids_a_procesar.remove(carga.id) con_error.append(carga.id) except Exception as e: capture_message(f""" Excepción {e} al manejar la excepción de la mesa-categoría {mesa_categoria_con_novedades.id if mesa_categoria_con_novedades else None}. """) logger.error('Carga (excepción)', mesa_categoria=mesa_categoria_con_novedades.id if mesa_categoria_con_novedades else None, error=str(e)) # Todas procesadas (hay que seleccionar desde Carga porque 'a_procesar' ya fue sliceado). procesadas = Carga.objects.filter(id__in=ids_a_procesar).update( procesada=True, tomada_por_consolidador=None) # Las que tuvieron error no están procesadas pero se liberan. if con_error: Carga.objects.filter(id__in=con_error).update( tomada_por_consolidador=None) return procesadas
def hi(request): capture_message("hi") return response.text("ok")
def main(*, area_file: str, archive_file: Optional[str] = None, student_file: str, run_id: Optional[int] = None) -> None: conn = psycopg2.connect( host=os.environ.get("PG_HOST"), database=os.environ.get("PG_DATABASE"), user=os.environ.get("PG_USER"), password=os.environ.get("PG_PASSWORD"), ) try: result_id = None args = Arguments(area_files=[area_file], student_files=[student_file], archive_file=archive_file) for msg in dp['run'](args): if isinstance(msg, NoStudentsMsg): logger.critical('no student files provided') elif isinstance(msg, NoAuditsCompletedMsg): logger.critical('no audits completed') elif isinstance(msg, AuditStartMsg): logger.info("auditing #%s against %s %s", msg.stnum, msg.area_catalog, msg.area_code) with sentry_sdk.configure_scope() as scope: scope.user = {"id": msg.stnum} result_id = make_result_id( conn=conn, stnum=msg.stnum, area_code=msg.area_code, catalog=msg.area_catalog, run=run_id, student=msg.student, ) logger.info("result id = %s", result_id) with sentry_sdk.configure_scope() as scope: scope.user = {"id": msg.stnum} scope.set_tag('area_code', msg.area_code) scope.set_tag('catalog', msg.area_catalog) scope.set_extra('result_id', result_id) elif isinstance(msg, ExceptionMsg): sentry_sdk.capture_exception(msg.ex) if result_id: record_error(result_id=result_id, conn=conn, error={"error": str(msg.ex)}) elif isinstance(msg, AreaFileNotFoundMsg): message = "Could not load area file" with sentry_sdk.configure_scope() as scope: scope.user = {"id": msg.stnum} scope.set_tag('area_file', msg.area_file) sentry_sdk.capture_message(message) if result_id: record_error(result_id=result_id, conn=conn, error={ "error": message, "stnum": msg.stnum, "area_file": msg.area_file }) elif isinstance(msg, ProgressMsg): avg_iter_s = sum(msg.recent_iters) / max( len(msg.recent_iters), 1) avg_iter_time = pretty_ms(avg_iter_s * 1_000, format_sub_ms=True, unit_count=1) update_progress(conn=conn, start_time=msg.start_time, count=msg.count, result_id=result_id) logger.info(f"{msg.count:,} at {avg_iter_time} per audit") elif isinstance(msg, ResultMsg): record(conn=conn, result_id=result_id, message=msg) elif isinstance(msg, EstimateMsg): pass else: logger.critical('unknown message %s', msg) finally: conn.close()
def iap_renew(request): if request.method == 'POST': data = JSONParser().parse(request) token_str = data['token'] if not Token.objects.filter(key=token_str).exists(): return JsonResponse({'error': 'Unable to connect this to a user. Please contact [email protected]', 'kick_out': True}, status=400) receipt = data['receipt'] if ZappyUser.objects.filter(apple_receipt=receipt).exists(): existing_user = ZappyUser.objects.filter(apple_receipt=receipt).first() send_mail( 'This Receipt has already been used', 'Existing user email that used this receipt: ' + existing_user.email + '\nReceipt: ' + receipt, '*****@*****.**', ['*****@*****.**'], fail_silently=False, ) return JsonResponse({'error': 'This In App Purchase has already been used. Please contact [email protected]', 'kick_out': True}, status=400) verify_url = 'https://buy.itunes.apple.com/verifyReceipt' receipt_json = json.dumps( {"receipt-data": receipt, 'password': env.str('APP_SHARED_SECRET', default='')}) response = requests.request( method='POST', url=verify_url, headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=receipt_json ) res_json = response.json() if res_json['status'] == 21007: # Apple docs say try prod and if no dice, then do sandbox https://developer.apple.com/library/archive/technotes/tn2413/_index.html#//apple_ref/doc/uid/DTS40016228-CH1-RECEIPTURL response = requests.request( method='POST', url='https://sandbox.itunes.apple.com/verifyReceipt', headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=receipt_json ) res_json = response.json() try: token = Token.objects.get(key=token_str) user = token.user send_mail( 'A member renewed in the iOS app!', str(user.email) + ' ' + str(res_json), '*****@*****.**', ['*****@*****.**'], fail_silently=False, ) user.apple_product_id = res_json['latest_receipt_info'][-1]['product_id'] user.apple_expires_date = datetime.datetime.fromtimestamp(int(res_json['latest_receipt_info'][-1]['expires_date_ms']) / 1000) user.active_membership = True user.apple_receipt = receipt user.save() return JsonResponse({'token': str(token)}, status=201) except IntegrityError: sentry_sdk.capture_message("PSomething is wrong fam1", level="error") return JsonResponse({'error': 'That email has already been taken'}, status=400) except KeyError: sentry_sdk.capture_message("PSomething is wrong fam2", level="error") return JsonResponse({'error': 'We had problems verifying the receipt. Please contact [email protected]', 'kick_out': True}, status=400) except Exception as e: print(e) sentry_sdk.capture_message("PSomething is wrong fam3", level="error") return JsonResponse({'error': 'Something went wrong. Please contact [email protected]', 'kick_out': True}, status=400)
def iap_signup(request): if request.method == 'POST': data = JSONParser().parse(request) receipt = data['receipt'] if ZappyUser.objects.filter(apple_receipt=receipt).exists(): existing_user = ZappyUser.objects.filter(apple_receipt=receipt).first() send_mail( 'This Receipt has already been used', 'Existing user email that used this receipt: ' + existing_user.email + '\nThe email that tried to reuse this receipt: ' + data['email'] + '\nReceipt: ' + receipt, '*****@*****.**', ['*****@*****.**'], fail_silently=False, ) return JsonResponse({'error': 'This In App Purchase has already been used. Please contact [email protected]', 'kick_out': True}, status=400) verify_url = 'https://buy.itunes.apple.com/verifyReceipt' receipt_json = json.dumps( {"receipt-data": receipt, 'password': env.str('APP_SHARED_SECRET', default='')}) response = requests.request( method='POST', url=verify_url, headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=receipt_json ) res_json = response.json() if res_json['status'] == 21007: # Apple docs say try prod and if no dice, then do sandbox https://developer.apple.com/library/archive/technotes/tn2413/_index.html#//apple_ref/doc/uid/DTS40016228-CH1-RECEIPTURL response = requests.request( method='POST', url='https://sandbox.itunes.apple.com/verifyReceipt', headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=receipt_json ) res_json = response.json() try: request.POST._mutable = True request.POST['email'] = data['email'] request.POST['password1'] = data['password'] form = SignupForm(request.POST) form.is_valid() user = form.save(request) dream_and_skill = '[No Dream or Skill]' if 'dream' in data: if 'skill' in data: dream_and_skill = data['dream'] + ' ' + data['skill'] send_mail( 'New Member from the App!', str(data['email']) + ' ' + dream_and_skill + ' ' + str(res_json), '*****@*****.**', ['*****@*****.**'], fail_silently=False, ) user.apple_product_id = res_json['latest_receipt_info'][-1]['product_id'] user.apple_expires_date = datetime.datetime.fromtimestamp(int(res_json['latest_receipt_info'][-1]['expires_date_ms']) / 1000) user.active_membership = True user.apple_receipt = receipt user.save() token = Token.objects.create(user=user) allauth.account.utils.send_email_confirmation(request, user, signup=True) return JsonResponse({'token': str(token)}, status=201) except IntegrityError: sentry_sdk.capture_message("PSomething is wrong fam1", level="error") return JsonResponse({'error': 'That email has already been taken'}, status=400) except KeyError: sentry_sdk.capture_message("PSomething is wrong fam2", level="error") return JsonResponse({'error': 'We had problems verifying the receipt. Please contact [email protected]', 'kick_out': True}, status=400) except Exception as e: print(e) sentry_sdk.capture_message("PSomething is wrong fam3", level="error") return JsonResponse({'error': 'Something went wrong. Please contact [email protected]', 'kick_out': True}, status=400)
def stripe_webhook(request: HttpRequest) -> JsonResponse: response: JsonResponse = JsonResponse({"success": True}, status=status.HTTP_200_OK) error_response: JsonResponse = JsonResponse( {"success": False}, status=status.HTTP_400_BAD_REQUEST, ) signature: str = request.META.get("HTTP_STRIPE_SIGNATURE", "") try: event: Dict = parse_webhook(request.read(), signature) except Exception as e: capture_exception(e) return error_response try: customer_id = event["data"]["object"]["customer"] try: instance = OrganizationBilling.objects.get( stripe_customer_id=customer_id) except OrganizationBilling.DoesNotExist: capture_message( f"Received invoice.payment_succeeded for {customer_id} but customer is not in the database.", ) return response if event["type"] == "invoice.payment_succeeded": # We have to use the period from the invoice line items because on the first month # Stripe sets period_end = period_start because they manage these attributes on an accrual-basis line_items = event["data"]["object"]["lines"]["data"] line_item = None if instance.stripe_subscription_item_id: for _item in line_items: if (instance.stripe_subscription_item_id == _item["subscription_item"]): line_item = _item if line_item is None: capture_message( "Stripe webhook does not match subscription on file " f"({instance.stripe_subscription_item_id}): {json.dumps(event)}", "error", ) return error_response else: if len(line_items) > 1: # This is unexpected behavior, while the code will continue by using only the first item, # this is logged to be properly addressed. capture_message( f"Stripe invoice.payment_succeeded webhook contained more than 1 item: {json.dumps(event)}", "warning", ) # First time receiving the subscription_item ID, record it line_item = line_items[0] instance.stripe_subscription_item_id = line_item[ "subscription_item"] instance.billing_period_ends = datetime.datetime.utcfromtimestamp( line_item["period"]["end"], ).replace(tzinfo=pytz.utc) instance.should_setup_billing = False instance.save() # Special handling for plans that only do card validation (e.g. startup or metered-billing plans) elif event["type"] == "payment_intent.amount_capturable_updated": instance.handle_post_card_validation() # Attempt to cancel the validation charge try: cancel_payment_intent(event["data"]["object"]["id"]) except stripe.error.StripeError as e: capture_exception(e) except KeyError: # Malformed request return error_response return response
async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable=too-many-branches """Render the filings contained in the submission.""" if not flask_app: raise QueueException('Flask App not available.') with flask_app.app_context(): filing_submission = Filing.find_by_id(filing_msg['filing']['id']) if not filing_submission: raise QueueException if filing_submission.status == Filing.Status.COMPLETED.value: logger.warning( 'QueueFiler: Attempting to reprocess business.id=%s, filing.id=%s filing=%s', filing_submission.business_id, filing_submission.id, filing_msg) return None, None legal_filings = filing_submission.legal_filings() if legal_filings: uow = versioning_manager.unit_of_work(db.session) transaction = uow.create_transaction(db.session) business = Business.find_by_internal_id( filing_submission.business_id) for filing in legal_filings: if filing.get('annualReport'): annual_report.process(business, filing) elif filing.get('changeOfAddress'): change_of_address.process(business, filing) elif filing.get('changeOfDirectors'): filing['colinIds'] = filing_submission.colin_event_ids change_of_directors.process(business, filing) elif filing.get('changeOfName'): change_of_name.process(business, filing) elif filing.get('voluntaryDissolution'): voluntary_dissolution.process(business, filing) elif filing.get('incorporationApplication'): business, filing_submission = incorporation_filing.process( business, filing, filing_submission) elif filing.get('correction'): correction.process(filing_submission, filing) filing_submission.transaction_id = transaction.id filing_submission.set_processed() db.session.add(business) db.session.add(filing_submission) db.session.commit() # post filing changes to other services if any('incorporationApplication' in x for x in legal_filings): filing_submission.business_id = business.id db.session.add(filing_submission) db.session.commit() incorporation_filing.update_affiliation( business, filing_submission) incorporation_filing.consume_nr(business, filing_submission) try: await publish_email_message( qsm, APP_CONFIG.EMAIL_PUBLISH_OPTIONS['subject'], filing_submission, 'registered') await publish_email_message( qsm, APP_CONFIG.EMAIL_PUBLISH_OPTIONS['subject'], filing_submission, 'mras') except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; # mark any failure for human review capture_message( f'Queue Error: Failed to place email for filing:{filing_submission.id}' f'on Queue with error:{err}', level='error') try: await publish_event(business, filing_submission) except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; # mark any failure for human review capture_message( f'Queue Error: Failed to publish event for filing:{filing_submission.id}' f'on Queue with error:{err}', level='error')
scope.set_tag('CRAWL_DIRECTORY', CRAWL_DIRECTORY) scope.set_tag('S3_BUCKET', S3_BUCKET) scope.set_tag('HTTP_INSTRUMENT', HTTP_INSTRUMENT) scope.set_tag('COOKIE_INSTRUMENT', COOKIE_INSTRUMENT) scope.set_tag('NAVIGATION_INSTRUMENT', NAVIGATION_INSTRUMENT) scope.set_tag('JS_INSTRUMENT', JS_INSTRUMENT) scope.set_tag('SAVE_JAVASCRIPT', SAVE_JAVASCRIPT) scope.set_tag('DWELL_TIME', DWELL_TIME) scope.set_tag('TIMEOUT', TIMEOUT) scope.set_tag('CRAWL_REFERENCE', '%s/%s' % (S3_BUCKET, CRAWL_DIRECTORY)) # context adds addition information that may be of interest scope.set_context("crawl_config", { 'REDIS_QUEUE_NAME': REDIS_QUEUE_NAME, }) sentry_sdk.capture_message("Crawl worker started") # Loads the default manager params # and NUM_BROWSERS copies of the default browser params manager_params, browser_params = TaskManager.load_default_params(NUM_BROWSERS) # Browser configuration for i in range(NUM_BROWSERS): browser_params[i]['http_instrument'] = HTTP_INSTRUMENT browser_params[i]['cookie_instrument'] = COOKIE_INSTRUMENT browser_params[i]['navigation_instrument'] = NAVIGATION_INSTRUMENT browser_params[i]['js_instrument'] = JS_INSTRUMENT browser_params[i]['save_javascript'] = SAVE_JAVASCRIPT browser_params[i]['headless'] = True # Manager configuration
def critical(self, text, capture_sentry=False): log_text = '[IP {}] {}'.format(self.request.remote_addr, text) self.log.critical(log_text) if capture_sentry: capture_message('[CRITICAL][{}]'.format(self.name) + log_text)
def index(): import bottle assert bottle.request.forms["foo"] == data["foo"] capture_message("hi") return "ok"
def head(self, *args, **kwargs): sentry_sdk.capture_message("hi") return HttpResponse("")
def hi(): capture_message("hi") return "ok"
def handler404(*args, **kwargs): sentry_sdk.capture_message("not found", level="error") return HttpResponseNotFound("404")
def named_hi(): capture_message("hi") return "ok"
async def hi(request): capture_message("hi", level="error") return PlainTextResponse("ok")
def error_handler(err): capture_message("error_msg") return "My error"
def test_simple_transport(): events = [] with Hub(Client(transport=events.append)): capture_message("Hello World!") assert events[0]["message"] == "Hello World!"
def main(): """Entry point""" from nipype import logging as nlogging from multiprocessing import set_start_method, Process, Manager from ..utils.bids import write_derivative_description, validate_input_dir set_start_method('forkserver') warnings.showwarning = _warn_redirect opts = get_parser().parse_args() exec_env = os.name # special variable set in the container if os.getenv('IS_DOCKER_8395080871'): exec_env = 'singularity' cgroup = Path('/proc/1/cgroup') if cgroup.exists() and 'docker' in cgroup.read_text(): exec_env = 'docker' if os.getenv('DOCKER_VERSION_8395080871'): exec_env = 'dmriprep-docker' sentry_sdk = None if not opts.notrack: import sentry_sdk from ..utils.sentry import sentry_setup sentry_setup(opts, exec_env) # Validate inputs if not opts.skip_bids_validation: print("Making sure the input data is BIDS compliant (warnings can be ignored in most " "cases).") validate_input_dir(exec_env, opts.bids_dir, opts.participant_label) # FreeSurfer license default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt') # Precedence: --fs-license-file, $FS_LICENSE, default_license license_file = opts.fs_license_file or Path(os.getenv('FS_LICENSE', default_license)) if not license_file.exists(): raise RuntimeError("""\ ERROR: a valid license file is required for FreeSurfer to run. dMRIPrep looked for an existing \ license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \ 2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \ (for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""") os.environ['FS_LICENSE'] = str(license_file.resolve()) # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() retcode = p.exitcode or retval.get('return_code', 0) bids_dir = Path(retval.get('bids_dir')) output_dir = Path(retval.get('output_dir')) work_dir = Path(retval.get('work_dir')) plugin_settings = retval.get('plugin_settings', None) subject_list = retval.get('subject_list', None) dmriprep_wf = retval.get('workflow', None) run_uuid = retval.get('run_uuid', None) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) if dmriprep_wf and opts.write_graph: dmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) retcode = retcode or int(dmriprep_wf is None) if retcode != 0: sys.exit(retcode) # Check workflow for missing commands missing = check_deps(dmriprep_wf) if missing: print("Cannot run dMRIPrep. Missing dependencies:", file=sys.stderr) for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() # Sentry tracking if not opts.notrack: from ..utils.sentry import start_ping start_ping(run_uuid, len(subject_list)) errno = 1 # Default is error exit unless otherwise set try: dmriprep_wf.run(**plugin_settings) except Exception as e: if not opts.notrack: from ..utils.sentry import process_crashfile crashfolders = [output_dir / 'dmriprep' / 'sub-{}'.format(s) / 'log' / run_uuid for s in subject_list] for crashfolder in crashfolders: for crashfile in crashfolder.glob('crash*.*'): process_crashfile(crashfile) if "Workflow did not execute cleanly" not in str(e): sentry_sdk.capture_exception(e) logger.critical('dMRIPrep failed: %s', e) raise else: if opts.run_reconall: from templateflow import api from niworkflows.utils.misc import _copy_any dseg_tsv = str(api.get('fsaverage', suffix='dseg', extension=['.tsv'])) _copy_any(dseg_tsv, str(output_dir / 'dmriprep' / 'desc-aseg_dseg.tsv')) _copy_any(dseg_tsv, str(output_dir / 'dmriprep' / 'desc-aparcaseg_dseg.tsv')) errno = 0 logger.log(25, 'dMRIPrep finished without errors') if not opts.notrack: sentry_sdk.capture_message('dMRIPrep finished without errors', level='info') finally: from niworkflows.reports import generate_reports from subprocess import check_call, CalledProcessError, TimeoutExpired from pkg_resources import resource_filename as pkgrf from shutil import copyfile citation_files = { ext: output_dir / 'dmriprep' / 'logs' / ('CITATION.%s' % ext) for ext in ('bib', 'tex', 'md', 'html') } if citation_files['md'].exists(): # Generate HTML file resolving citations cmd = ['pandoc', '-s', '--bibliography', pkgrf('dmriprep', 'data/boilerplate.bib'), '--filter', 'pandoc-citeproc', '--metadata', 'pagetitle="dMRIPrep citation boilerplate"', str(citation_files['md']), '-o', str(citation_files['html'])] logger.info('Generating an HTML version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): logger.warning('Could not generate CITATION.html file:\n%s', ' '.join(cmd)) # Generate LaTex file resolving citations cmd = ['pandoc', '-s', '--bibliography', pkgrf('dmriprep', 'data/boilerplate.bib'), '--natbib', str(citation_files['md']), '-o', str(citation_files['tex'])] logger.info('Generating a LaTeX version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): logger.warning('Could not generate CITATION.tex file:\n%s', ' '.join(cmd)) else: copyfile(pkgrf('dmriprep', 'data/boilerplate.bib'), citation_files['bib']) else: logger.warning('dMRIPrep could not find the markdown version of ' 'the citation boilerplate (%s). HTML and LaTeX versions' ' of it will not be available', citation_files['md']) # Generate reports phase failed_reports = generate_reports( subject_list, output_dir, work_dir, run_uuid, packagename='dmriprep') write_derivative_description(bids_dir, output_dir / 'dmriprep') if failed_reports and not opts.notrack: sentry_sdk.capture_message( 'Report generation failed for %d subjects' % failed_reports, level='error') sys.exit(int((errno + failed_reports) > 0))
def hi2(request): capture_message(request.matchdict["msg"]) return Response("hi")
def captureMessage(self, msg, **kwargs): with sentry_sdk.push_scope() as scope: self._kwargs_into_scope(scope, **kwargs) return capture_message(msg)
def index(request): capture_message("hi") return Response("ok")
def main(): """Entry point""" from nipype import logging as nlogging from multiprocessing import set_start_method, Process, Manager from ..viz.reports import generate_reports from ..utils.bids import write_derivative_description set_start_method('forkserver') warnings.showwarning = _warn_redirect opts = get_parser().parse_args() exec_env = os.name # special variable set in the container if os.getenv('IS_DOCKER_8395080871'): exec_env = 'singularity' cgroup = Path('/proc/1/cgroup') if cgroup.exists() and 'docker' in cgroup.read_text(): exec_env = 'docker' if os.getenv('DOCKER_VERSION_8395080871'): exec_env = 'fmriprep-docker' sentry_sdk = None if not opts.notrack: import sentry_sdk from ..__about__ import __version__ environment = "prod" release = __version__ if not __version__: environment = "dev" release = "dev" elif bool(int(os.getenv('FMRIPREP_DEV', 0))) or ('+' in __version__): environment = "dev" def before_send(event, hints): # Filtering log messages about crashed nodes if 'logentry' in event and 'message' in event['logentry']: msg = event['logentry']['message'] if msg.startswith("could not run node:"): return None elif msg.startswith("Saving crash info to "): return None elif re.match("Node .+ failed to run on host .+", msg): return None if 'breadcrumbs' in event and isinstance(event['breadcrumbs'], list): fingerprints_to_propagate = ['no-disk-space', 'memory-error', 'permission-denied', 'keyboard-interrupt'] for bc in event['breadcrumbs']: msg = bc.get('message', 'empty-msg') if msg in fingerprints_to_propagate: event['fingerprint'] = [msg] break return event sentry_sdk.init("https://[email protected]/1137693", release=release, environment=environment, before_send=before_send) with sentry_sdk.configure_scope() as scope: scope.set_tag('exec_env', exec_env) if exec_env == 'fmriprep-docker': scope.set_tag('docker_version', os.getenv('DOCKER_VERSION_8395080871')) free_mem_at_start = round(psutil.virtual_memory().free / 1024**3, 1) scope.set_tag('free_mem_at_start', free_mem_at_start) scope.set_tag('cpu_count', cpu_count()) # Memory policy may have a large effect on types of errors experienced overcommit_memory = Path('/proc/sys/vm/overcommit_memory') if overcommit_memory.exists(): policy = {'0': 'heuristic', '1': 'always', '2': 'never'}.get(overcommit_memory.read_text().strip(), 'unknown') scope.set_tag('overcommit_memory', policy) if policy == 'never': overcommit_kbytes = Path('/proc/sys/vm/overcommit_memory') kb = overcommit_kbytes.read_text().strip() if kb != '0': limit = '{}kB'.format(kb) else: overcommit_ratio = Path('/proc/sys/vm/overcommit_ratio') limit = '{}%'.format(overcommit_ratio.read_text().strip()) scope.set_tag('overcommit_limit', limit) else: scope.set_tag('overcommit_limit', 'n/a') else: scope.set_tag('overcommit_memory', 'n/a') scope.set_tag('overcommit_limit', 'n/a') for k, v in vars(opts).items(): scope.set_tag(k, v) # Validate inputs if not opts.skip_bids_validation: print("Making sure the input data is BIDS compliant (warnings can be ignored in most " "cases).") validate_input_dir(exec_env, opts.bids_dir, opts.participant_label) # FreeSurfer license default_license = str(Path(os.getenv('FREESURFER_HOME')) / 'license.txt') # Precedence: --fs-license-file, $FS_LICENSE, default_license license_file = opts.fs_license_file or os.getenv('FS_LICENSE', default_license) if not os.path.exists(license_file): raise RuntimeError( 'ERROR: a valid license file is required for FreeSurfer to run. ' 'FMRIPREP looked for an existing license file at several paths, in this ' 'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` ' 'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. ' 'Get it (for free) by registering at https://' 'surfer.nmr.mgh.harvard.edu/registration.html') os.environ['FS_LICENSE'] = license_file # Retrieve logging level log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) # Set logging logger.setLevel(log_level) nlogging.getLogger('nipype.workflow').setLevel(log_level) nlogging.getLogger('nipype.interface').setLevel(log_level) nlogging.getLogger('nipype.utils').setLevel(log_level) errno = 0 # Call build_workflow(opts, retval) with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(opts, retval)) p.start() p.join() if p.exitcode != 0: sys.exit(p.exitcode) fmriprep_wf = retval['workflow'] plugin_settings = retval['plugin_settings'] bids_dir = retval['bids_dir'] output_dir = retval['output_dir'] work_dir = retval['work_dir'] subject_list = retval['subject_list'] run_uuid = retval['run_uuid'] if not opts.notrack: with sentry_sdk.configure_scope() as scope: scope.set_tag('run_uuid', run_uuid) scope.set_tag('npart', len(subject_list)) retcode = retval['return_code'] if fmriprep_wf is None: sys.exit(1) if opts.write_graph: fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True) if opts.reports_only: sys.exit(int(retcode > 0)) if opts.boilerplate: sys.exit(int(retcode > 0)) # Sentry tracking if not opts.notrack: sentry_sdk.add_breadcrumb(message='fMRIPrep started', level='info') sentry_sdk.capture_message('fMRIPrep started', level='info') # Check workflow for missing commands missing = check_deps(fmriprep_wf) if missing: print("Cannot run fMRIPrep. Missing dependencies:") for iface, cmd in missing: print("\t{} (Interface: {})".format(cmd, iface)) sys.exit(2) # Clean up master process before running workflow, which may create forks gc.collect() try: fmriprep_wf.run(**plugin_settings) except RuntimeError as e: errno = 1 if "Workflow did not execute cleanly" not in str(e): sentry_sdk.capture_exception(e) raise finally: # Generate reports phase errno += generate_reports(subject_list, output_dir, work_dir, run_uuid, sentry_sdk=sentry_sdk) write_derivative_description(bids_dir, str(Path(output_dir) / 'fmriprep')) if not opts.notrack and errno == 0: sentry_sdk.capture_message('fMRIPrep finished without errors', level='info') sys.exit(int(errno > 0))
def warn(self, text, capture_sentry=False): log_text = '[IP {}] {}'.format(self.request.remote_addr, text) self.log.warn(log_text) if capture_sentry: capture_message('[WARNING][{}]'.format(self.name) + log_text)
def message(request): sentry_sdk.capture_message("hi") return HttpResponse("ok")
def captureMessage(self, msg, **kwargs): with sentry_sdk.push_scope() as scope: self._kwargs_into_scope(scope, **kwargs) return capture_message(msg)
def error(self, text, capture_sentry=False): log_text = '[IP {}] {}'.format(self.request.remote_addr, text) self.log.error(log_text) if capture_sentry: capture_message('[ERROR][{}]'.format(self.name) + log_text)
import sys import time import boto3 from discord.ext import commands from sentry_sdk import init, capture_exception, capture_message ENABLE_SENTRY = os.environ.get('SENTRY_DSN') is not None if ENABLE_SENTRY: init(os.environ['SENTRY_DSN']) try: instance_id = os.environ["INSTANCE_ID"] except KeyError as e: capture_message('No INSTANCE_ID provided') print("No INSTANCE_ID provided as environment variable") sys.exit(1) region = os.environ.get('AWS_REGION', 'us-east-1') command_prefix = os.environ.get('COMMAND_PREFIX', '$') bot = commands.Bot(command_prefix=command_prefix) ec2 = boto3.client('ec2', region_name=region) @bot.event async def on_ready(): print('Logged in as') print(bot.user.name) print(bot.user.id) print('------')
def capture_message(*args, **kwargs): if is_sentry_enabled(): sentry_sdk.capture_message(*args, **kwargs)