'AUDIENCE': 'https://etools.unicef.org/', }) AUTHENTICATION_BACKENDS = ( 'etools_prp.apps.core.mixins.CustomAzureADBBCOAuth2', 'django.contrib.auth.backends.ModelBackend', ) # apm related - it's enough to set those as env variables, here just for documentation # by default logging and apm is off, so below envs needs to be set per environment # ELASTIC_APM_SERVICE_NAME=<app-name> # set app name visible on dashboard # ELASTIC_APM_SECRET_TOKEN=<app-token> #secret token - needs to be exact same as on apm-server # ELASTIC_APM_SERVER_URL=http://elastic.tivixlabs.com:8200 # apm-server url SENTRY_DSN = env('SENTRY_DSN', default=None) if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[DjangoIntegration(), CeleryIntegration()],) if DEBUG: CORS_ORIGIN_WHITELIST += ('http://localhost:8082', 'http://localhost:8081') FIXTURE_DIRS += ["fixtures"] INSTALLED_APPS += [ 'debug_toolbar', ] MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware', ] DOCS_URL = 'api/docs/' UNICEF_LOCATIONS_MODEL = 'core.Location'
STATIC_URL = "/static/" STATIC_ROOT_DEFAULT = pathlib.Path.home() / ".cache" / "promgen" STATIC_ROOT = env.str("STATIC_ROOT", default=str(STATIC_ROOT_DEFAULT)) SITE_ID = 1 TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' if "SENTRY_DSN" in os.environ: import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.celery import CeleryIntegration os.environ.setdefault("SENTRY_RELEASE", __version__) sentry_sdk.init(integrations=[DjangoIntegration(), CeleryIntegration()]) REST_FRAMEWORK = { 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework.authentication.TokenAuthentication', 'rest_framework.authentication.SessionAuthentication', ), 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly', ), 'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend', ) } # If CELERY_BROKER_URL is set in our environment, then we configure celery as # expected. If it is not configured, then we set CELERY_TASK_ALWAYS_EAGER to # force celery to run all tasks in the same process (effectively runs each task
import logging import sentry_sdk from envparse import env from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.falcon import FalconIntegration from sentry_sdk.integrations.redis import RedisIntegration # загружаем конфиг env.read_envfile() # включаем логи logging.basicConfig(format='[%(asctime)s][%(levelname)s] %(message)s', level=logging.INFO) # formatter = json_log_formatter.JSONFormatter() # json_handler = logging.FileHandler(filename='/var/log/wildsearch_app_log.json') # json_handler.setFormatter(formatter) logger = logging.getLogger(__name__) # logger.addHandler(json_handler) # включаем Sentry if env('SENTRY_DSN', default=None) is not None: sentry_sdk.init(env('SENTRY_DSN'), integrations=[FalconIntegration(), CeleryIntegration(), RedisIntegration()])
"level": "ERROR", "handlers": ["console"], "propagate": False, }, # Errors logged by the SDK itself "sentry_sdk": {"level": "ERROR", "handlers": ["console"], "propagate": False}, "django.security.DisallowedHost": { "level": "ERROR", "handlers": ["console"], "propagate": False, }, }, } # Sentry # ------------------------------------------------------------------------------ SENTRY_DSN = env("SENTRY_DSN") SENTRY_LOG_LEVEL = env.int("DJANGO_SENTRY_LOG_LEVEL", logging.INFO) sentry_logging = LoggingIntegration( level=SENTRY_LOG_LEVEL, # Capture info and above as breadcrumbs event_level=logging.ERROR, # Send errors as events ) sentry_sdk.init( dsn=SENTRY_DSN, integrations=[sentry_logging, DjangoIntegration(), CeleryIntegration()], ) # Your stuff... # ------------------------------------------------------------------------------
# Change this value if your application is running behind a proxy, # e.g. HTTP_CF_Connecting_IP for Cloudflare or X_FORWARDED_FOR REAL_IP_ENVIRON = os.environ.get("REAL_IP_ENVIRON", "REMOTE_ADDR") # The maximum length of a graphql query to log in tracings OPENTRACING_MAX_QUERY_LENGTH_LOG = 2000 # Slugs for menus precreated in Django migrations DEFAULT_MENUS = {"top_menu_name": "navbar", "bottom_menu_name": "footer"} # Sentry SENTRY_DSN = os.environ.get("SENTRY_DSN") if SENTRY_DSN: sentry_sdk.init(dsn=SENTRY_DSN, integrations=[CeleryIntegration(), DjangoIntegration()]) GRAPHENE = { "RELAY_CONNECTION_ENFORCE_FIRST_OR_LAST": True, "RELAY_CONNECTION_MAX_LIMIT": 100, "MIDDLEWARE": [ "saleor.graphql.middleware.OpentracingGrapheneMiddleware", "saleor.graphql.middleware.JWTMiddleware", "saleor.graphql.middleware.app_middleware", ], } PLUGINS_MANAGER = "saleor.plugins.manager.PluginsManager"
from urllib.parse import urlparse url_object = urlparse(os.getenv("REDIS_URL")) RespectfulRequester.configure( redis={ "host": url_object.hostname, "port": url_object.port, "password": url_object.password, "database": 0, }, safety_threshold=5, ) SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.redis import RedisIntegration sentry_sdk.init( dsn=os.getenv("SENTRY_DSN"), integrations=[DjangoIntegration(), CeleryIntegration(), RedisIntegration()], ) # Requests Respectful (rate limiting, waiting) rr = RespectfulRequester() rr.register_realm("Fitbit", max_requests=3600, timespan=3600)
def configure_sdk(): from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration assert sentry_sdk.Hub.main.client is None sdk_options = dict(settings.SENTRY_SDK_CONFIG) relay_dsn = sdk_options.pop("relay_dsn", None) internal_project_key = get_project_key() upstream_dsn = sdk_options.pop("dsn", None) sdk_options["traces_sampler"] = traces_sampler if upstream_dsn: upstream_transport = make_transport( get_options(dsn=upstream_dsn, **sdk_options)) else: upstream_transport = None if relay_dsn: relay_transport = make_transport( get_options(dsn=relay_dsn, **sdk_options)) elif internal_project_key and internal_project_key.dsn_private: relay_transport = make_transport( get_options(dsn=internal_project_key.dsn_private, **sdk_options)) else: relay_transport = None _override_on_full_queue(relay_transport, "internal.uncaptured.events.relay") _override_on_full_queue(upstream_transport, "internal.uncaptured.events.upstream") class MultiplexingTransport(sentry_sdk.transport.Transport): def capture_envelope(self, envelope): # Assume only transactions get sent via envelopes if options.get( "transaction-events.force-disable-internal-project"): return self._capture_anything("capture_envelope", envelope) def capture_event(self, event): if event.get("type") == "transaction" and options.get( "transaction-events.force-disable-internal-project"): return self._capture_anything("capture_event", event) def _capture_anything(self, method_name, *args, **kwargs): # Upstream should get the event first because it is most isolated from # the this sentry installation. if upstream_transport: metrics.incr("internal.captured.events.upstream") # TODO(mattrobenolt): Bring this back safely. # from sentry import options # install_id = options.get('sentry:install-id') # if install_id: # event.setdefault('tags', {})['install-id'] = install_id getattr(upstream_transport, method_name)(*args, **kwargs) if relay_transport and options.get( "store.use-relay-dsn-sample-rate") == 1: # If this is a envelope ensure envelope and it's items are distinct references if method_name == "capture_envelope": args_list = list(args) envelope = args_list[0] relay_envelope = copy.copy(envelope) relay_envelope.items = envelope.items.copy() args = [relay_envelope, *args_list[1:]] if is_current_event_safe(): metrics.incr("internal.captured.events.relay") getattr(relay_transport, method_name)(*args, **kwargs) else: metrics.incr( "internal.uncaptured.events.relay", skip_internal=False, tags={"reason": "unsafe"}, ) sentry_sdk.init( transport=MultiplexingTransport(), integrations=[ DjangoIntegration(), CeleryIntegration(), LoggingIntegration(event_level=None), RustInfoIntegration(), RedisIntegration(), ], **sdk_options, )
import os import sentry_sdk from celery import Celery from django.conf import settings from sentry_sdk.integrations.celery import CeleryIntegration os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nurseconnect_registration.settings") app = Celery("nurseconnect_registration") app.config_from_object("django.conf:settings", namespace="CELERY") app.autodiscover_tasks() sentry_sdk.init(dsn=settings.SENTRY_DSN, integrations=[CeleryIntegration()]) @app.task(bind=True) def debug_task(self): print("Request: {0!r}".format(self.request))
request: dict = event["request"] request_headers: dict = request["headers"] origin_url: str = request_headers.get("Origin", "") referer_url: str = request_headers.get("Referer", "") event["tags"] = {"project": _get_project_name_from_url(referer_url)} ev_logger_name: str = event.get("logger", "") if ev_logger_name != "saleor.graphql.errors.handled": return event # RFC6454, origin is the triple: uri-scheme, uri-host[, uri-port] if any(origin_url.endswith(pwa_origin) for pwa_origin in PWA_ORIGINS): return event logger.info(f"Skipped error from ignored origin: {origin_url!r}") return None DEMO_SENTRY_DSN = os.environ.get("DEMO_SENTRY_DSN") if DEMO_SENTRY_DSN: sentry_sdk.init( DEMO_SENTRY_DSN, integrations=[CeleryIntegration(), DjangoIntegration()], before_send=before_send, ) ignore_logger("graphql.execution.utils") ignore_logger("graphql.execution.executor") ignore_logger("django.security.DisallowedHost")
# See https://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { "version": 1, "disable_existing_loggers": True, "formatters": {"verbose": {"format": "%(levelname)s %(asctime)s %(module)s " "%(process)d %(thread)d %(message)s"}}, "handlers": {"console": {"level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose",}}, "root": {"level": "INFO", "handlers": ["console"]}, "loggers": { "django.db.backends": {"level": "ERROR", "handlers": ["console"], "propagate": False,}, # Errors logged by the SDK itself "sentry_sdk": {"level": "ERROR", "handlers": ["console"], "propagate": False}, "django.security.DisallowedHost": {"level": "ERROR", "handlers": ["console"], "propagate": False,}, }, } # Sentry # ------------------------------------------------------------------------------ SENTRY_DSN = env("SENTRY_DSN") SENTRY_LOG_LEVEL = env.int("DJANGO_SENTRY_LOG_LEVEL", logging.INFO) sentry_logging = LoggingIntegration( level=SENTRY_LOG_LEVEL, event_level=logging.ERROR, # Capture info and above as breadcrumbs # Send errors as events ) sentry_sdk.init( dsn=SENTRY_DSN, integrations=[sentry_logging, DjangoIntegration(), CeleryIntegration()], ) # Your stuff... # ------------------------------------------------------------------------------
if not os.environ.get('AMQP'): raise EnvironmentError("Celery AMQP connection is not setup.") if not os.environ.get('CELERY_BACKEND'): raise EnvironmentError("Celery CELERY_BACKEND connection is not setup.") app = Celery('dnstats', broker=os.environ.get('AMQP'), backend=os.environ.get('CELERY_BACKEND'), broker_pool_limit=50) logger = get_task_logger('dnstats.scans') if os.environ.get('DNSTATS_ENV') != 'Development': import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration sentry_sdk.init(os.environ.get("SENTRY"), integrations=[CeleryIntegration()]) @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task(crontab(hour=0, minute=58), import_list.s()) sender.add_periodic_task(crontab(hour=8, minute=0), do_run.s()) sender.add_periodic_task(crontab(hour=18, minute=0), do_charts_latest.s()) class SqlAlchemyTask(Task): """An abstract Celery Task that ensures that the connection the the database is closed on task completion From: http://www.prschmid.com/2013/04/using-sqlalchemy-with-celery-tasks.html """
import config from sql_persistence import session from sql_persistence.interface import SQLPersistenceInterface from eth_manager.contract_registry.ABIs import (erc20_abi, bancor_converter_abi, bancor_network_abi) from eth_manager.eth_transaction_processor import EthTransactionProcessor from eth_manager.transaction_supervisor import TransactionSupervisor from eth_manager.task_manager import TaskManager from eth_manager.blockchain_sync.blockchain_sync import BlockchainSyncer import celery_utils sentry_sdk.init(config.SENTRY_SERVER_DSN, integrations=[CeleryIntegration()]) with configure_scope() as scope: scope.set_tag("domain", config.APP_HOST) chain_config = config.CHAINS[celery_utils.chain] from config import logg logg.info(f'Using chain {celery_utils.chain}') app = Celery('tasks', broker=config.REDIS_URL, backend=config.REDIS_URL, task_serializer='json') app.conf.redbeat_lock_key = f'redbeat:lock:{config.REDBEAT_LOCK_ID}'
def create_app(): global app_created if not app_created: BlueprintsManager.register(app) Migrate(app, db) app.config.from_object(env('APP_CONFIG', default='config.ProductionConfig')) if not app.config['SECRET_KEY']: if app.config['PRODUCTION']: app.logger.error( 'SECRET_KEY must be set in .env or environment variables in production' ) exit(1) else: random_secret = secrets.token_hex() app.logger.warning( f'Using random secret "{ random_secret }" for development server. ' 'This is NOT recommended. Set proper SECRET_KEY in .env or environment variables' ) app.config['SECRET_KEY'] = random_secret db.init_app(app) if app.config['CACHING']: cache.init_app(app, config={'CACHE_TYPE': 'simple'}) else: cache.init_app(app, config={'CACHE_TYPE': 'null'}) stripe.api_key = 'SomeStripeKey' app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static' app.logger.addHandler(logging.StreamHandler(sys.stdout)) app.logger.setLevel(logging.ERROR) # set up jwt app.config['JWT_HEADER_TYPE'] = 'JWT' app.config['JWT_ACCESS_TOKEN_EXPIRES'] = timedelta(days=1) app.config['JWT_REFRESH_TOKEN_EXPIRES'] = timedelta(days=365) app.config['JWT_ERROR_MESSAGE_KEY'] = 'error' app.config['JWT_TOKEN_LOCATION'] = ['cookies', 'headers'] app.config['JWT_REFRESH_COOKIE_PATH'] = '/v1/auth/token/refresh' app.config['JWT_SESSION_COOKIE'] = False app.config['JWT_BLACKLIST_ENABLED'] = True app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['refresh'] _jwt = JWTManager(app) _jwt.user_loader_callback_loader(jwt_user_loader) _jwt.token_in_blacklist_loader(is_token_blacklisted) # setup celery app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL'] app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL'] app.config['CELERY_ACCEPT_CONTENT'] = ['json', 'application/text'] CORS(app, resources={r"/*": {"origins": "*"}}) AuthManager.init_login(app) if app.config['TESTING'] and app.config['PROFILE']: # Profiling app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) # development api with app.app_context(): from app.api.admin_statistics_api.events import event_statistics from app.api.auth import auth_routes from app.api.custom.attendees import attendee_blueprint from app.api.bootstrap import api_v1 from app.api.celery_tasks import celery_routes from app.api.event_copy import event_copy from app.api.exports import export_routes from app.api.imports import import_routes from app.api.uploads import upload_routes from app.api.users import user_misc_routes from app.api.orders import order_misc_routes from app.api.role_invites import role_invites_misc_routes from app.api.auth import authorised_blueprint from app.api.admin_translations import admin_blueprint from app.api.orders import alipay_blueprint from app.api.settings import admin_misc_routes from app.api.server_version import info_route from app.api.custom.orders import ticket_blueprint from app.api.custom.orders import order_blueprint from app.api.custom.invoices import event_blueprint app.register_blueprint(api_v1) app.register_blueprint(event_copy) app.register_blueprint(upload_routes) app.register_blueprint(export_routes) app.register_blueprint(import_routes) app.register_blueprint(celery_routes) app.register_blueprint(auth_routes) app.register_blueprint(event_statistics) app.register_blueprint(user_misc_routes) app.register_blueprint(attendee_blueprint) app.register_blueprint(order_misc_routes) app.register_blueprint(role_invites_misc_routes) app.register_blueprint(authorised_blueprint) app.register_blueprint(admin_blueprint) app.register_blueprint(alipay_blueprint) app.register_blueprint(admin_misc_routes) app.register_blueprint(info_route) app.register_blueprint(ticket_blueprint) app.register_blueprint(order_blueprint) app.register_blueprint(event_blueprint) add_engine_pidguard(db.engine) if app.config['SQLALCHEMY_DATABASE_URI'].startswith("sqlite://"): sqlite_datetime_fix() sa.orm.configure_mappers() if app.config['SERVE_STATIC']: app.add_url_rule('/static/<path:filename>', endpoint='static', view_func=app.send_static_file) # sentry if not app_created and 'SENTRY_DSN' in app.config: sentry_sdk.init(app.config['SENTRY_DSN'], integrations=[ FlaskIntegration(), RedisIntegration(), CeleryIntegration(), SqlalchemyIntegration() ]) # redis redis_store.init_app(app) # Initialize Extensions shell.init_app(app) limiter.init_app(app) app_created = True return app
def init_logger(self): """ Initialize the logger. Call exactly once. """ assert (self.name is not None) assert (self.author is not None) self.handlers = {} if self.is_root: self.log = logging.getLogger() else: self.log = logging.getLogger(self.name) if not self.propagate: self.log.propagate = False # set the root log level if self.verbose: self.log.setLevel(logging.DEBUG) else: self.log.setLevel(logging.INFO) if self.log.hasHandlers(): self.log.info("Logger already initialized.") # create file handler if self.log_directory is None: self.log_directory = appdirs.user_log_dir(self.name, self.author) if self.log_directory is not None: if self.delete_existing_log_files: for file_path in glob( os.path.join(self.log_directory, "*%s" % self.log_extension)): try: os.remove(file_path) except OSError: pass os.makedirs(self.log_directory, exist_ok=True) self.log_path = os.path.join( self.log_directory, "%s%s" % (self.name, self.log_extension)) file_handler = logging.handlers.RotatingFileHandler( self.log_path, maxBytes=self.max_bytes, backupCount=self.backup_count) file_handler.setFormatter(self.log_formatter) if self.verbose: file_handler.setLevel(logging.DEBUG) else: file_handler.setLevel(logging.INFO) self.log.addHandler(file_handler) self.handlers[HandlerType.File] = file_handler self.log.info('log file path : "%s" ("%s")' % (self.log_path, os.path.abspath(self.log_path))) if self.gui: # GUI will only pop up a dialog box - it's important that GUI not try to output to stdout or stderr # since that would likely cause a permissions error. dialog_box_handler = DialogBoxHandler(self.rate_limits) if self.verbose: dialog_box_handler.setLevel(logging.WARNING) else: dialog_box_handler.setLevel(logging.ERROR) self.log.addHandler(dialog_box_handler) self.handlers[HandlerType.DialogBox] = dialog_box_handler else: console_handler = logging.StreamHandler() console_handler.setFormatter(self.log_formatter) if self.verbose: console_handler.setLevel(logging.INFO) else: console_handler.setLevel(logging.WARNING) self.log.addHandler(console_handler) self.handlers[HandlerType.Console] = console_handler string_list_handler = BalsaStringListHandler( self.max_string_list_entries) string_list_handler.setFormatter(self.log_formatter) string_list_handler.setLevel(logging.INFO) self.log.addHandler(string_list_handler) self.handlers[HandlerType.StringList] = string_list_handler # setting up Sentry error handling # For the Client to work you need a SENTRY_DSN environmental variable set, or one must be provided. if self.use_sentry: sample_rate = 0.0 if self.inhibit_cloud_services else 1.0 integrations = [] if self.use_sentry_django: from sentry_sdk.integrations.django import DjangoIntegration integrations.append(DjangoIntegration()) if self.use_sentry_flask: from sentry_sdk.integrations.flask import FlaskIntegration integrations.append(FlaskIntegration()) if self.use_sentry_lambda: from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration integrations.append(AwsLambdaIntegration()) if self.use_sentry_sqlalchemy: from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration integrations.append(SqlalchemyIntegration()) if self.use_sentry_celery: from sentry_sdk.integrations.celery import CeleryIntegration integrations.append(CeleryIntegration()) if self.sentry_dsn is None: if 'SENTRY_DSN' not in os.environ: raise ValueError(f"Missing sentry_dsn") else: sentry_sdk.init( dsn=os.environ['SENTRY_DSN'], sample_rate=sample_rate, integrations=integrations, ) else: sentry_sdk.init( dsn=self.sentry_dsn, sample_rate=sample_rate, integrations=integrations, ) # error handler for callback on error or above # (this is last since the user may do a sys.exit() in the error callback) if self.error_callback is not None: error_callback_handler = BalsaNullHandler(self.error_callback) error_callback_handler.setLevel(logging.ERROR) self.log.addHandler(error_callback_handler) self.handlers[HandlerType.Callback] = error_callback_handler
"""expedition-merman-wrapper celery application.""" import os from expedition_merman_wrapper import celery, create_app # noqa: F401 app = create_app() app.app_context().push() SENTRY_DSN = os.getenv('SENTRY_DSN') if SENTRY_DSN: import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration sentry_sdk.init(dsn=SENTRY_DSN, integrations=(CeleryIntegration(), ))
def celery(sentry_init): sentry_init(integrations=[CeleryIntegration()]) celery = Celery(__name__) celery.conf.CELERY_ALWAYS_EAGER = True return celery
# Change this value if your application is running behind a proxy, # e.g. HTTP_CF_Connecting_IP for Cloudflare or X_FORWARDED_FOR REAL_IP_ENVIRON = os.environ.get("REAL_IP_ENVIRON", "REMOTE_ADDR") # Slugs for menus precreated in Django migrations DEFAULT_MENUS = {"top_menu_name": "navbar", "bottom_menu_name": "footer"} # Slug for channel precreated in Django migrations DEFAULT_CHANNEL_SLUG = os.environ.get("DEFAULT_CHANNEL_SLUG", "default-channel") # Sentry sentry_sdk.utils.MAX_STRING_LENGTH = 4096 SENTRY_DSN = os.environ.get("SENTRY_DSN") SENTRY_OPTS = {"integrations": [CeleryIntegration(), DjangoIntegration()]} def SENTRY_INIT(dsn: str, sentry_opts: dict): """Init function for sentry. Will only be called if SENTRY_DSN is not None, during core start, can be overriden in separate settings file. """ sentry_sdk.init(dsn, **sentry_opts) ignore_logger("graphql.execution.utils") GRAPHENE = { "RELAY_CONNECTION_ENFORCE_FIRST_OR_LAST": True,
# GLOBAL_WARNING = "This is a message to display throughout the application." DEFAULT_AUTO_FIELD = "django.db.models.AutoField" try: from .secret import * # noqa except ImportError: pass # In-memory sqlite3 databases significantly speed up running tests. if TESTING: DATABASES["default"]["ENGINE"] = "django.db.backends.sqlite3" DATABASES["default"]["NAME"] = ":memory:" # Horrible hack to suppress all migrations to speed up the tests. MIGRATION_MODULES = helpers.MigrationMock() # FIXME: we really shouldn't have to do this. LOGGING_VERBOSE = re.search("-v ?[2-3]|--verbosity [2-3]", " ".join(sys.argv)) is not None elif PRODUCTION or SECRET_DATABASE_URL is not None: DATABASES["default"].update(helpers.parse_db_url(SECRET_DATABASE_URL)) else: # Default testing db config. DATABASES["default"].update({"NAME": "ion", "USER": "******", "PASSWORD": "******"}) # Set up sentry logging if PRODUCTION: # This is implicitly set up but we do this just in case sentry_logging = LoggingIntegration( level=logging.INFO, event_level=logging.ERROR # Capture info and above as breadcrumbs # Send errors as events ) sentry_sdk.init(SENTRY_PUBLIC_DSN, integrations=[DjangoIntegration(), sentry_logging, CeleryIntegration()], send_default_pii=True)
from .recognize import recognize from .tpl import get_template env.read_envfile() celery = Celery('app') celery.conf.update( broker_url=env('CELERY_BROKER_URL'), task_always_eager=env('CELERY_ALWAYS_EAGER', cast=bool, default=False), task_serializer='pickle', # we transfer binary data like photos or voice messages, accept_content=['pickle'], ) if env('SENTRY_DSN', default=None) is not None: sentry_sdk.init(env('SENTRY_DSN'), integrations=[CeleryIntegration()]) @celery.task def send_confirmation_mail(user_id): user = User.get(User.pk == user_id) send_mail( to=user.email, subject='[Selfmailbot] Confirm your email', user_id=user.id, text=get_template('email/confirmation.txt').render(user=user), ) @celery.task def send_text(user_id, subject, text, variables=None):
""" This module defines celery tasks to run tasks (as persisted in the database by means of `pycroft.model.task`) by using implementations as defined in `pycroft.lib.task` (see `TaskImpl`). """ if dsn := os.getenv('PYCROFT_SENTRY_DSN'): logging_integration = LoggingIntegration( level=logging. INFO, # INFO / WARN create breadcrumbs, just as SQL queries event_level=logging.ERROR, # errors and above create breadcrumbs ) sentry_sdk.init( dsn=dsn, integrations=[CeleryIntegration(), logging_integration], traces_sample_rate=1.0, ) app = Celery('tasks', backend=os.environ['PYCROFT_CELERY_RESULT_BACKEND_URI'], broker=os.environ['PYCROFT_CELERY_BROKER_URI']) logger = get_task_logger(__name__) @with_transaction def write_task_message(task, message, log=False): message = str(message) if log:
# Change this value if your application is running behind a proxy, # e.g. HTTP_CF_Connecting_IP for Cloudflare or X_FORWARDED_FOR REAL_IP_ENVIRON = os.environ.get("REAL_IP_ENVIRON", "REMOTE_ADDR") # The maximum length of a graphql query to log in tracings OPENTRACING_MAX_QUERY_LENGTH_LOG = 2000 # Slugs for menus precreated in Django migrations DEFAULT_MENUS = {"top_menu_name": "navbar", "bottom_menu_name": "footer"} # Sentry SENTRY_DSN = os.environ.get("SENTRY_DSN") if SENTRY_DSN: sentry_sdk.init( dsn=SENTRY_DSN, integrations=[CeleryIntegration(), DjangoIntegration()] ) GRAPHENE = { "RELAY_CONNECTION_ENFORCE_FIRST_OR_LAST": True, "RELAY_CONNECTION_MAX_LIMIT": 100, "MIDDLEWARE": [ "saleor.graphql.middleware.OpentracingGrapheneMiddleware", "saleor.graphql.middleware.JWTMiddleware", "saleor.graphql.middleware.app_middleware", ], } PLUGINS_MANAGER = "saleor.plugins.manager.PluginsManager" PLUGINS = [
def configure_sdk(): from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.redis import RedisIntegration assert sentry_sdk.Hub.main.client is None sdk_options = dict(settings.SENTRY_SDK_CONFIG) relay_dsn = sdk_options.pop("relay_dsn", None) internal_project_key = get_project_key() upstream_dsn = sdk_options.pop("dsn", None) sdk_options["traces_sampler"] = traces_sampler if upstream_dsn: transport = make_transport(get_options(dsn=upstream_dsn, **sdk_options)) upstream_transport = patch_transport_for_instrumentation(transport, "upstream") else: upstream_transport = None if relay_dsn: transport = make_transport(get_options(dsn=relay_dsn, **sdk_options)) relay_transport = patch_transport_for_instrumentation(transport, "relay") elif internal_project_key and internal_project_key.dsn_private: transport = make_transport(get_options(dsn=internal_project_key.dsn_private, **sdk_options)) relay_transport = patch_transport_for_instrumentation(transport, "relay") else: relay_transport = None _override_on_full_queue(relay_transport, "internal.uncaptured.events.relay") _override_on_full_queue(upstream_transport, "internal.uncaptured.events.upstream") class MultiplexingTransport(sentry_sdk.transport.Transport): def capture_envelope(self, envelope): # Temporarily capture envelope counts to compare to ingested # transactions. metrics.incr("internal.captured.events.envelopes") transaction = envelope.get_transaction_event() # Temporarily also capture counts for one specific transaction to check ingested amount if ( transaction and transaction.get("transaction") == "/api/0/organizations/{organization_slug}/issues/" ): metrics.incr("internal.captured.events.envelopes.issues") if transaction: metrics.incr("internal.captured.events.transactions") # Assume only transactions get sent via envelopes if options.get("transaction-events.force-disable-internal-project"): return self._capture_anything("capture_envelope", envelope) def capture_event(self, event): if event.get("type") == "transaction" and options.get( "transaction-events.force-disable-internal-project" ): return self._capture_anything("capture_event", event) def _capture_anything(self, method_name, *args, **kwargs): # Upstream should get the event first because it is most isolated from # the this sentry installation. if upstream_transport: metrics.incr("internal.captured.events.upstream") # TODO(mattrobenolt): Bring this back safely. # from sentry import options # install_id = options.get('sentry:install-id') # if install_id: # event.setdefault('tags', {})['install-id'] = install_id getattr(upstream_transport, method_name)(*args, **kwargs) if relay_transport and options.get("store.use-relay-dsn-sample-rate") == 1: if is_current_event_safe(): metrics.incr("internal.captured.events.relay") getattr(relay_transport, method_name)(*args, **kwargs) else: metrics.incr( "internal.uncaptured.events.relay", skip_internal=False, tags={"reason": "unsafe"}, ) sentry_sdk.init( transport=MultiplexingTransport(), integrations=[ DjangoIntegration(), CeleryIntegration(), LoggingIntegration(event_level=None), RustInfoIntegration(), RedisIntegration(), ], **sdk_options, )
# -*- coding: utf-8 -*- from .base import * # noqa: F401 SENTRY_DSN = os.getenv('SENTRY_DSN') if SENTRY_DSN: import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.integrations.tornado import TornadoIntegration sentry_sdk.init( SENTRY_DSN, traces_sample_rate=1.0, integrations=[CeleryIntegration(), DjangoIntegration(), RedisIntegration(), TornadoIntegration()], ) DEBUG = False INTERNAL_IPS = [ '127.0.0.1', ]
def ENV_SETTING(key, default): return env.get_value(key, default=default) if ENV_SETTING("ENABLE_SENTRY") == "True": integrations = [DjangoIntegration()] if ENV_SETTING("SENTRY_REDIS") == "True": from sentry_sdk.integrations.redis import RedisIntegration integrations.append(RedisIntegration()) if ENV_SETTING("SENTRY_CELERY") == "True": from sentry_sdk.integrations.celery import CeleryIntegration integrations.append(CeleryIntegration()) sentry_sdk.init( dsn=ENV_SETTING("SENTRY_DSN"), integrations=integrations, send_default_pii=True, ) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = Path(__file__).resolve().parents[2] # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "y#swzfnq4br$0vmcjs@yej^&qmv_tualw#(awwi=he@=!@#&8u"
"handlers": ["console"], "propagate": False, }, }, } # Sentry # ------------------------------------------------------------------------------ SENTRY_DSN = env("SENTRY_DSN") SENTRY_LOG_LEVEL = env.int("DJANGO_SENTRY_LOG_LEVEL", logging.INFO) sentry_logging = LoggingIntegration( level=SENTRY_LOG_LEVEL, # Capture info and above as breadcrumbs event_level=logging.ERROR, # Send errors as events ) integrations = [ sentry_logging, DjangoIntegration(), CeleryIntegration(), RedisIntegration() ] sentry_sdk.init( dsn=SENTRY_DSN, integrations=integrations, environment=env("SENTRY_ENVIRONMENT", default="production"), traces_sample_rate=env.float("SENTRY_TRACES_SAMPLE_RATE", default=0.0), ) # Your stuff... # ------------------------------------------------------------------------------
if local_env_file := os.getenv("LOCAL_ENV_FILE"): local_config_files = [local_env_file] else: local_config_files = glob.iglob("local.*.env") for local_config_file in glob.iglob("local.*.env"): logging.info(f'Importing environment from "{local_config_file}".') for line in open(local_config_file): clean_line = line.strip() eq_idx = clean_line.find("=") if 0 < eq_idx < len(clean_line) - 1: os.environ[clean_line[:eq_idx]] = clean_line[eq_idx + 1 :] # noqa sentry_sdk.init( dsn=os.getenv("SENTRY_DSN"), integrations=[CeleryIntegration(), SqlalchemyIntegration(), AioHttpIntegration(), RedisIntegration()], ) def getenv_boolean(var_name: str, default_value: bool = False) -> bool: result = default_value env_value = os.getenv(var_name) if env_value is not None: result = env_value.upper() in ("TRUE", "1") return result def getenv_int(var_name: str, default_value: int = 0) -> int: result = default_value env_value = os.getenv(var_name) if env_value: