示例#1
0
文件: balsa.py 项目: gh-mrice/balsa
    def init_logger(self):
        """
        Initialize the logger.  Call exactly once.
        """

        assert (self.name is not None)
        assert (self.author is not None)
        self.handlers = {}
        if self.is_root:
            self.log = logging.getLogger()
        else:
            self.log = logging.getLogger(self.name)
        if not self.propagate:
            self.log.propagate = False

        # set the root log level
        if self.verbose:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        if self.log.hasHandlers():
            self.log.info("Logger already initialized.")

        # create file handler
        if self.log_directory is None:
            self.log_directory = appdirs.user_log_dir(self.name, self.author)
        if self.log_directory is not None:
            if self.delete_existing_log_files:
                for file_path in glob(
                        os.path.join(self.log_directory,
                                     "*%s" % self.log_extension)):
                    try:
                        os.remove(file_path)
                    except OSError:
                        pass
            os.makedirs(self.log_directory, exist_ok=True)
            self.log_path = os.path.join(
                self.log_directory, "%s%s" % (self.name, self.log_extension))
            file_handler = logging.handlers.RotatingFileHandler(
                self.log_path,
                maxBytes=self.max_bytes,
                backupCount=self.backup_count)
            file_handler.setFormatter(self.log_formatter)
            if self.verbose:
                file_handler.setLevel(logging.DEBUG)
            else:
                file_handler.setLevel(logging.INFO)
            self.log.addHandler(file_handler)
            self.handlers[HandlerType.File] = file_handler
            self.log.info('log file path : "%s" ("%s")' %
                          (self.log_path, os.path.abspath(self.log_path)))

        if self.gui:
            # GUI will only pop up a dialog box - it's important that GUI not try to output to stdout or stderr
            # since that would likely cause a permissions error.
            dialog_box_handler = DialogBoxHandler(self.rate_limits)
            if self.verbose:
                dialog_box_handler.setLevel(logging.WARNING)
            else:
                dialog_box_handler.setLevel(logging.ERROR)
            self.log.addHandler(dialog_box_handler)
            self.handlers[HandlerType.DialogBox] = dialog_box_handler
        else:
            console_handler = logging.StreamHandler()
            console_handler.setFormatter(self.log_formatter)
            if self.verbose:
                console_handler.setLevel(logging.INFO)
            else:
                console_handler.setLevel(logging.WARNING)
            self.log.addHandler(console_handler)
            self.handlers[HandlerType.Console] = console_handler

        string_list_handler = BalsaStringListHandler(
            self.max_string_list_entries)
        string_list_handler.setFormatter(self.log_formatter)
        string_list_handler.setLevel(logging.INFO)
        self.log.addHandler(string_list_handler)
        self.handlers[HandlerType.StringList] = string_list_handler

        # setting up Sentry error handling
        # For the Client to work you need a SENTRY_DSN environmental variable set, or one must be provided.
        if self.use_sentry:
            sample_rate = 0.0 if self.inhibit_cloud_services else 1.0
            integrations = []
            if self.use_sentry_django:
                from sentry_sdk.integrations.django import DjangoIntegration
                integrations.append(DjangoIntegration())
            if self.use_sentry_flask:
                from sentry_sdk.integrations.flask import FlaskIntegration
                integrations.append(FlaskIntegration())
            if self.use_sentry_lambda:
                from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
                integrations.append(AwsLambdaIntegration())
            if self.use_sentry_sqlalchemy:
                from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
                integrations.append(SqlalchemyIntegration())
            if self.use_sentry_celery:
                from sentry_sdk.integrations.celery import CeleryIntegration
                integrations.append(CeleryIntegration())

            if self.sentry_dsn is None:
                if 'SENTRY_DSN' not in os.environ:
                    raise ValueError(f"Missing sentry_dsn")
                else:
                    sentry_sdk.init(
                        dsn=os.environ['SENTRY_DSN'],
                        sample_rate=sample_rate,
                        integrations=integrations,
                    )
            else:
                sentry_sdk.init(
                    dsn=self.sentry_dsn,
                    sample_rate=sample_rate,
                    integrations=integrations,
                )

        # error handler for callback on error or above
        # (this is last since the user may do a sys.exit() in the error callback)
        if self.error_callback is not None:
            error_callback_handler = BalsaNullHandler(self.error_callback)
            error_callback_handler.setLevel(logging.ERROR)
            self.log.addHandler(error_callback_handler)
            self.handlers[HandlerType.Callback] = error_callback_handler
示例#2
0
def test_orm_queries(sentry_init, capture_events):
    sentry_init(integrations=[SqlalchemyIntegration()],
                _experiments={"record_sql_params": True})
    events = capture_events()

    Base = declarative_base()

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine("sqlite:///:memory:")
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)
    session = Session()

    bob = Person(name="Bob")
    session.add(bob)

    assert session.query(Person).first() == bob

    capture_message("hi")

    (event, ) = events

    for crumb in event["breadcrumbs"]:
        del crumb["timestamp"]

    assert event["breadcrumbs"][-2:] == [
        {
            "category": "query",
            "data": {
                "db.params": ["Bob"],
                "db.paramstyle": "qmark"
            },
            "message": "INSERT INTO person (name) VALUES (?)",
            "type": "default",
        },
        {
            "category":
            "query",
            "data": {
                "db.params": [1, 0],
                "db.paramstyle": "qmark"
            },
            "message":
            "SELECT person.id AS person_id, person.name AS person_name \n"
            "FROM person\n"
            " LIMIT ? OFFSET ?",
            "type":
            "default",
        },
    ]
示例#3
0
def create_app():
    global app_created
    if not app_created:
        BlueprintsManager.register(app)
        graphql_views.init_app(app)
    Migrate(app, db)

    app.config.from_object(env('APP_CONFIG',
                               default='config.ProductionConfig'))

    if not app.config['SECRET_KEY']:
        if app.config['PRODUCTION']:
            app.logger.error(
                'SECRET_KEY must be set in .env or environment variables in production'
            )
            exit(1)
        else:
            random_secret = secrets.token_hex()
            app.logger.warning(
                f'Using random secret "{ random_secret }" for development server. '
                'This is NOT recommended. Set proper SECRET_KEY in .env or environment variables'
            )
            app.config['SECRET_KEY'] = random_secret

    db.init_app(app)

    if app.config['CACHING']:
        cache.init_app(app, config={'CACHE_TYPE': 'simple'})
    else:
        cache.init_app(app, config={'CACHE_TYPE': 'null'})

    stripe.api_key = 'SomeStripeKey'
    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static'

    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.ERROR)

    # set up jwt
    app.config['JWT_HEADER_TYPE'] = 'JWT'
    app.config['JWT_ACCESS_TOKEN_EXPIRES'] = timedelta(days=1)
    app.config['JWT_REFRESH_TOKEN_EXPIRES'] = timedelta(days=365)
    app.config['JWT_ERROR_MESSAGE_KEY'] = 'error'
    app.config['JWT_TOKEN_LOCATION'] = ['cookies', 'headers']
    app.config['JWT_REFRESH_COOKIE_PATH'] = '/v1/auth/token/refresh'
    app.config['JWT_SESSION_COOKIE'] = False
    app.config['JWT_BLACKLIST_ENABLED'] = True
    app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['refresh']
    _jwt = JWTManager(app)
    _jwt.user_loader_callback_loader(jwt_user_loader)
    _jwt.token_in_blacklist_loader(is_token_blacklisted)

    # setup celery
    app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL']
    app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL']
    app.config['CELERY_ACCEPT_CONTENT'] = ['json', 'application/text']

    app.config['MAIL_RECORDER'] = MailRecorder(use_env=True)

    CORS(app, resources={r"/*": {"origins": "*"}})
    AuthManager.init_login(app)

    if app.config['TESTING'] and app.config['PROFILE']:
        # Profiling
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    # development api
    with app.app_context():
        from app.api.admin_statistics_api.events import event_statistics
        from app.api.auth import auth_routes
        from app.api.custom.attendees import attendee_blueprint
        from app.api.bootstrap import api_v1
        from app.api.celery_tasks import celery_routes
        from app.api.event_copy import event_copy
        from app.api.exports import export_routes
        from app.api.imports import import_routes
        from app.api.uploads import upload_routes
        from app.api.users import user_misc_routes
        from app.api.orders import order_misc_routes
        from app.api.role_invites import role_invites_misc_routes
        from app.api.auth import authorised_blueprint
        from app.api.admin_translations import admin_blueprint
        from app.api.orders import alipay_blueprint
        from app.api.sessions import sessions_blueprint
        from app.api.settings import admin_misc_routes
        from app.api.server_version import info_route
        from app.api.custom.orders import ticket_blueprint
        from app.api.custom.orders import order_blueprint
        from app.api.custom.invoices import event_blueprint
        from app.api.custom.calendars import calendar_routes

        app.register_blueprint(api_v1)
        app.register_blueprint(event_copy)
        app.register_blueprint(upload_routes)
        app.register_blueprint(export_routes)
        app.register_blueprint(import_routes)
        app.register_blueprint(celery_routes)
        app.register_blueprint(auth_routes)
        app.register_blueprint(event_statistics)
        app.register_blueprint(user_misc_routes)
        app.register_blueprint(attendee_blueprint)
        app.register_blueprint(order_misc_routes)
        app.register_blueprint(role_invites_misc_routes)
        app.register_blueprint(authorised_blueprint)
        app.register_blueprint(admin_blueprint)
        app.register_blueprint(alipay_blueprint)
        app.register_blueprint(admin_misc_routes)
        app.register_blueprint(info_route)
        app.register_blueprint(ticket_blueprint)
        app.register_blueprint(order_blueprint)
        app.register_blueprint(event_blueprint)
        app.register_blueprint(sessions_blueprint)
        app.register_blueprint(calendar_routes)

        add_engine_pidguard(db.engine)

        if app.config['SQLALCHEMY_DATABASE_URI'  # pytype: disable=attribute-error
                      ].startswith("sqlite://"):
            sqlite_datetime_fix()

    sa.orm.configure_mappers()

    if app.config['SERVE_STATIC']:
        app.add_url_rule('/static/<path:filename>',
                         endpoint='static',
                         view_func=app.send_static_file)

    # sentry
    if not app_created and 'SENTRY_DSN' in app.config:
        sentry_sdk.init(
            app.config['SENTRY_DSN'],
            integrations=[
                FlaskIntegration(),
                RedisIntegration(),
                CeleryIntegration(),
                SqlalchemyIntegration(),
            ],
            release=app.config['SENTRY_RELEASE_NAME'],
            traces_sample_rate=app.config['SENTRY_TRACES_SAMPLE_RATE'],
        )

    # redis
    redis_store.init_app(app)

    # Initialize Extensions
    shell.init_app(app)
    limiter.init_app(app)

    app_created = True
    return app
示例#4
0
文件: main.py 项目: levakin/netmon
from app.api.api_v1.api import api_router
from app.core.config import settings
from fastapi import FastAPI
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from starlette.middleware.cors import CORSMiddleware

app = FastAPI(
    title=settings.PROJECT_NAME, openapi_url=f"{settings.API_V1_STR}/openapi.json"
)

# Set all CORS enabled origins
if settings.BACKEND_CORS_ORIGINS:
    app.add_middleware(
        CORSMiddleware,
        allow_origins=[str(origin) for origin in settings.BACKEND_CORS_ORIGINS],
        allow_credentials=True,
        allow_methods=["*"],
        allow_headers=["*"],
    )

app.include_router(api_router, prefix=settings.API_V1_STR)

if settings.SENTRY_DSN:
    sentry_sdk.init(
        dsn=settings.SENTRY_DSN,
        integrations=[AioHttpIntegration(), SqlalchemyIntegration()],
    )
    app.add_middleware(SentryAsgiMiddleware)
示例#5
0
                                    app_id=app.config['ONESIGNAL_CSH_APP_ID'])

intro_onesignal_client = onesignal.Client(user_auth_key=app.config['ONESIGNAL_USER_AUTH_KEY'],
                                    app_auth_key=app.config['ONESIGNAL_INTRO_APP_AUTH_KEY'],
                                    app_id=app.config['ONESIGNAL_INTRO_APP_ID'])

# OIDC Auth
auth = OIDCAuthentication({'app': APP_CONFIG}, app)

# LDAP
_ldap = csh_ldap.CSHLDAP(app.config['LDAP_BIND_DN'], app.config['LDAP_BIND_PASS'])

# Sentry
sentry_sdk.init(
    dsn=app.config['SENTRY_DSN'],
    integrations=[FlaskIntegration(), SqlalchemyIntegration()]
)

app.logger.info('OIDCAuth and LDAP configured')

# pylint: disable=wrong-import-position
from . import models
from . import context_processors
from . import commands
from .routes import api, shared

if app.config['REALM'] == 'csh':
    from .routes import upperclassmen
    from .routes import admin
else:
    from .routes import freshmen
示例#6
0
def test_too_large_event_truncated(sentry_init, capture_events):
    sentry_init(
        traces_sample_rate=1,
        integrations=[SqlalchemyIntegration()],
        _experiments={"smart_transaction_trimming": True},
    )
    events = capture_events()

    long_str = "x" * (MAX_STRING_LENGTH + 10)

    with configure_scope() as scope:

        @scope.add_event_processor
        def processor(event, hint):
            event["message"] = long_str
            return event

    engine = create_engine("sqlite:///:memory:")
    with start_transaction(name="test"):
        with engine.connect() as con:
            for _ in range(2000):
                con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))

    (event,) = events

    # Because of attached metadata in the "_meta" key, we may send out a little
    # bit more than MAX_EVENT_BYTES.
    max_bytes = 1.2 * MAX_EVENT_BYTES
    assert len(json_dumps(event)) < max_bytes

    # Some spans are discarded.
    assert len(event["spans"]) == 1000

    # Some spans have their descriptions truncated. Because the test always
    # generates the same amount of descriptions and truncation is deterministic,
    # the number here should never change across test runs.
    #
    # Which exact span descriptions are truncated depends on the span durations
    # of each SQL query and is non-deterministic.
    assert len(event["_meta"]["spans"]) == 537

    for i, span in enumerate(event["spans"]):
        description = span["description"]

        assert description.startswith("SELECT ")
        if str(i) in event["_meta"]["spans"]:
            # Description must have been truncated
            assert len(description) == 10
            assert description.endswith("...")
        else:
            # Description was not truncated, check for original length
            assert len(description) == 1583
            assert description.endswith("SELECT 98 UNION SELECT 99")

    # Smoke check the meta info for one of the spans.
    assert next(iter(event["_meta"]["spans"].values())) == {
        "description": {"": {"len": 1583, "rem": [["!limit", "x", 7, 10]]}}
    }

    # Smoke check that truncation of other fields has not changed.
    assert len(event["message"]) == MAX_STRING_LENGTH

    # The _meta for other truncated fields should be there as well.
    assert event["_meta"]["message"] == {
        "": {"len": 522, "rem": [["!limit", "x", 509, 512]]}
    }
示例#7
0
    def init_logger(self):
        """
        Initialize the logger.  Call exactly once.
        """

        log_formatter = BalsaFormatter(self.log_formatter_string)

        assert self.name is not None
        assert self.author is not None
        self.handlers = {}
        if self.is_root:
            self.log = logging.getLogger()
        else:
            self.log = logging.getLogger(self.name)
        if not self.propagate:
            self.log.propagate = False

        # set the root log level
        if self.verbose:
            self.log.setLevel(logging.DEBUG)
        else:
            self.log.setLevel(logging.INFO)

        if self.log.hasHandlers():
            self.log.info("Logger already initialized.")

        # use turn off file logging, e.g. for cloud environments where it's not recommended and/or possible to write to the local file system
        if self.use_file_logging:
            # create file handler
            if self.log_directory is None:
                self.log_directory = Path(
                    appdirs.user_log_dir(self.name, self.author))

            if self.log_directory is not None:

                if isinstance(self.log_directory, str):
                    self.log_directory = Path(self.log_directory)

                self.log_directory.mkdir(parents=True, exist_ok=True)
                if self.delete_existing_log_files:
                    # need to glob since there are potentially many files due to the "rotating" file handler
                    for file_path in Path.glob(self.log_directory,
                                               f"*{self.log_extension}"):
                        try:
                            file_path.unlink()
                        except OSError:
                            pass

                if self.instance_name is None:
                    file_name = f"{self.name}{self.log_extension}"
                else:
                    file_name = f"{self.name}_{self.instance_name}{self.log_extension}"
                self.log_path = Path(self.log_directory, file_name)

                file_handler = logging.handlers.RotatingFileHandler(
                    self.log_path,
                    maxBytes=self.max_bytes,
                    backupCount=self.backup_count)
                file_handler.setFormatter(log_formatter)
                if self.verbose:
                    file_handler.setLevel(logging.DEBUG)
                else:
                    file_handler.setLevel(logging.INFO)
                self.log.addHandler(file_handler)
                self.handlers[HandlerType.File] = file_handler
                self.log.info(
                    f'log file path : "{self.log_path}" ("{self.log_path.absolute()}")'
                )

        if self.gui:
            # GUI will only pop up a dialog box - it's important that GUI apps not try to output to stdout or stderr
            # since that would likely cause a permissions error.
            dialog_box_handler = DialogBoxHandler(self.rate_limits)
            if self.verbose:
                dialog_box_handler.setLevel(logging.WARNING)
            else:
                dialog_box_handler.setLevel(logging.ERROR)
            self.log.addHandler(dialog_box_handler)
            self.handlers[HandlerType.DialogBox] = dialog_box_handler

            self.set_std()  # redirect stdout and stderr to log
        else:
            console_handler = logging.StreamHandler()
            # prefix for things like "\n" or "\r"
            console_handler.setFormatter(
                BalsaFormatter(
                    f"{self.log_console_prefix}{self.log_formatter_string}"))
            if self.verbose:
                console_handler.setLevel(logging.INFO)
            else:
                console_handler.setLevel(logging.WARNING)
            self.log.addHandler(console_handler)
            self.handlers[HandlerType.Console] = console_handler

        string_list_handler = BalsaStringListHandler(
            self.max_string_list_entries)
        string_list_handler.setFormatter(log_formatter)
        string_list_handler.setLevel(logging.INFO)
        self.log.addHandler(string_list_handler)
        self.handlers[HandlerType.StringList] = string_list_handler

        # setting up Sentry error handling
        # For the Client to work you need a SENTRY_DSN environmental variable set, or one must be provided.
        if self.use_sentry:

            if self.sentry_max_string_len is not None:
                sentry_sdk.utils.MAX_STRING_LENGTH = self.sentry_max_string_len

            sample_rate = 0.0 if self.inhibit_cloud_services else 1.0
            integrations = []
            if self.use_sentry_django:
                from sentry_sdk.integrations.django import DjangoIntegration

                integrations.append(DjangoIntegration())
            if self.use_sentry_flask:
                from sentry_sdk.integrations.flask import FlaskIntegration

                integrations.append(FlaskIntegration())
            if self.use_sentry_lambda:
                from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration

                integrations.append(AwsLambdaIntegration())
            if self.use_sentry_sqlalchemy:
                from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration

                integrations.append(SqlalchemyIntegration())
            if self.use_sentry_celery:
                from sentry_sdk.integrations.celery import CeleryIntegration

                integrations.append(CeleryIntegration())

            if self.sentry_dsn is None:
                if (sentry_dsn := self.get_sentry_dsn_via_env_var()) is None:
                    raise ValueError(
                        "Missing Sentry DSN - either set as an environmental variable or a parameter to the Balsa constructor"
                    )
                else:
                    sentry_sdk.init(
                        dsn=sentry_dsn,
                        sample_rate=sample_rate,
                        integrations=integrations,
                    )
            else:
                sentry_sdk.init(
                    dsn=self.sentry_dsn,
                    sample_rate=sample_rate,
                    integrations=integrations,
                )
示例#8
0
)
origins = ["*"]

app.add_middleware(
    CORSMiddleware,
    allow_origins=origins,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

sentry_sdk.init(
    release=f"{config.BACKEND_VERSION}",
    environment=f"{config.ENVIRONMENT}",
    dsn="https://[email protected]/2",
    integrations=[SqlalchemyIntegration(), RedisIntegration()],
)
sentry_sdk.set_tag('panel.ip', get_external_ip())


@app.middleware("http")
async def sentry_exception(request: Request, call_next):
    try:
        response = await call_next(request)
        return response
    except Exception as e:
        if config.ENABLE_SENTRY:
            with sentry_sdk.push_scope() as scope:
                scope.set_context("request", request)
                scope.user = {"ip_address": request.client.host}
                sentry_sdk.capture_exception(e)
示例#9
0
    def __init__(self, log):
        super().__init__(
            command_prefix=_prefix_callable,
            description=__description__,
            help_command=commands.DefaultHelpCommand(dm_help=True),
            case_insensitive=True,
            fetch_offline_members=True,
            max_messages=20000,
        )
        self.database = None
        self.version = __version__
        self.log = log
        self.log.info("/*********Starting App*********\\")
        self.log.info(f"App Name: {__botname__} | Version: {__version__}")
        self.started_time = datetime.utcnow()
        self.botconfig = botconfig
        self.constants = Constants()
        self.owner = None
        self.guild_settings = {}
        self.activity_index = {}
        self.cooldown_settings = None
        self.session = aiohttp.ClientSession(loop=self.loop)
        self.log.debug(f"Initialized: Session Loop")
        self.database = DatabaseManager(self.botconfig)
        self.log.debug(f"Initialized: Database Manager")
        self.helpers = Helpers(self)
        self.log.debug(f"Initialized: Helpers")
        # Load the cooldown settings prior to loading Mod Mail or AntiSpam
        self.helpers.db_get_cooldown_settings()
        self.tasks = Tasks(self)
        self.log.debug(f"Initialized: Tasks")
        self.antispam = AntiSpam(self)
        self.log.debug(f"Initialized AntiSpam Feature")
        self.prompt = prompt.Prompt(self)
        self.log.debug(f"Initialized: Prompt")
        self.assignment = RoleAssignment(self)
        self.log.debug(f"Initialized: RoleAssignment")

        # Sets up the sentry_sdk integration:
        sentry_sdk.init(
            dsn=__dsn__,
            release=__version__,
            environment=__environment__,
            integrations=[SqlalchemyIntegration(), RedisIntegration()],
            before_send=self.sentry_before_send,
            traces_sample_rate=0.25,  # Sends 25% of transactions for performance sampling
            _experiments={
                "auto_enabling_integrations": True
            },  # Automatically enable all relevant transactions
        )
        self.log.debug(f"Initialized: Sentry SDK")

        for extension in initial_extensions:
            try:
                self.load_extension(extension)
                self.log.info(f"Loaded extension {extension}")
            except Exception as err:
                self.log.exception(
                    f"Failed to load extension {extension}. {sys.exc_info()[0].__name__}: {err}"
                )
        self.log.info("Done loading all extensions")
示例#10
0
from flask import g, Flask, request
from functools import wraps
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from types import SimpleNamespace
from typing import List

from elo import get_leaderboard, PlayerStats
from database import (datetime, get_session, get_display_name, get_team,
                      get_channel, get_app_user, insert_match, get_last_match)

SENTRY_DSN = os.getenv('SLACK_APP_PONG_SENTRY_DSN'
                       )  # should not be configured when test are running
sentry_sdk.init(dsn=SENTRY_DSN,
                integrations=[FlaskIntegration(),
                              SqlalchemyIntegration()]) if SENTRY_DSN else None

app = Flask(__name__)

app.config['DATABASE_URL'] = os.environ['SLACK_APP_PONG_DATABASE_URL']
app.config['SIGNING_SECRET'] = os.environ['SLACK_APP_PONG_SIGNING_SECRET']

CLIENT_ID = os.environ['SLACK_APP_PONG_CLIENT_ID']
CLIENT_SECRET = os.environ['SLACK_APP_PONG_CLIENT_SECRET']


class AuthorizeException(Exception):
    pass


class ValidateException(Exception):
示例#11
0
    CustomDomain,
    LifetimeCoupon,
    Directory,
    Mailbox,
    Referral,
    AliasMailbox,
    Notification,
    PublicDomain,
)
from app.monitor.base import monitor_bp
from app.oauth.base import oauth_bp

if SENTRY_DSN:
    LOG.d("enable sentry")
    sentry_sdk.init(
        dsn=SENTRY_DSN, integrations=[FlaskIntegration(), SqlalchemyIntegration(),],
    )

# the app is served behin nginx which uses http and not https
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"


def create_light_app() -> Flask:
    app = Flask(__name__)
    app.config["SQLALCHEMY_DATABASE_URI"] = DB_URI
    app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False

    db.init_app(app)

    return app
示例#12
0
    def init_app(
            self,
            app,
            requires=[],
            ext_requires=[],
            bundle_js=None,
            bundle_css=None,
            assetenv=None,
            theme='bootstrap3',
            asset_modules=(),
    ):
        """
        Initialize an app and load necessary assets.

        :param requires: List of required assets. If an asset has both .js
            and .css components, both will be added to the requirement list.
            Loaded assets will be minified and concatenated into the app's
            ``static/js`` and ``static/css`` folders. If an asset has problems
            with either of these, it should be loaded pre-bundled via the
            ``bundle_js`` and ``bundle_css`` parameters.
        :param ext_requires: Same as requires, but will be loaded from
            an external cookiefree server if ``ASSET_SERVER`` is in config,
            before the reqular requires list. Assets are loaded as part of
            ``requires`` if there is no asset server
        :param bundle_js: Bundle of additional JavaScript
        :param bundle_css: Bundle of additional CSS
        :param assetenv: Environment for assets (in case your app needs a custom environment)
        """
        # Initialize Sentry logging
        if app.config.get('SENTRY_URL'):
            sentry_sdk.init(
                dsn=app.config['SENTRY_URL'],
                integrations=[
                    FlaskIntegration(),
                    RqIntegration(),
                    SqlalchemyIntegration(),
                ],
            )

        # Default .js and tracking file for Matomo
        if app.config.get('MATOMO_URL') and app.config.get('MATOMO_ID'):
            app.config.setdefault('MATOMO_JS', 'matomo.js')
            app.config.setdefault('MATOMO_FILE', 'matomo.php')

        statsd.init_app(app)

        # Since Flask 0.11, templates are no longer auto reloaded.
        # Setting the config alone doesn't seem to work, so we explicitly
        # set the jinja environment here.
        if app.config.get('TEMPLATES_AUTO_RELOAD') or (
                app.config.get('TEMPLATES_AUTO_RELOAD') is None
                and app.config.get('DEBUG')):
            app.jinja_env.auto_reload = True
        app.jinja_env.add_extension('jinja2.ext.do')
        app.jinja_env.autoescape = _select_jinja_autoescape
        app.jinja_env.globals['request_is_xhr'] = request_is_xhr
        if app.subdomain_matching:
            # Does this app want a static subdomain? (Default: yes, 'static').
            # Apps can disable this by setting STATIC_SUBDOMAIN = None.
            # Since Werkzeug internally uses '' instead of None, but takes None
            # as the default parameter, we remap '' to None in our config
            subdomain = app.config.get('STATIC_SUBDOMAIN', 'static') or None
            if subdomain:
                for rule in app.url_map.iter_rules('static'):
                    # For safety, seek out and update the static route added by Flask.
                    # Do not touch additional static routes added by the app or other
                    # blueprints
                    if not rule.subdomain:  # Will be '' not None
                        rule.subdomain = subdomain
                        rule.refresh()
                        break
        else:
            subdomain = None

        ignore_js = ['!jquery.js']
        ignore_css = []
        ext_js = []
        ext_css = []
        if app.config.get('ASSET_SERVER'):
            for itemgroup in ext_requires:
                sub_js = []
                sub_css = []
                if not isinstance(itemgroup, (list, tuple)):
                    itemgroup = [itemgroup]
                for item in itemgroup:
                    name, spec = split_namespec(item)
                    for alist, ilist, ext in [
                        (sub_js, ignore_js, '.js'),
                        (sub_css, ignore_css, '.css'),
                    ]:
                        if name + ext in assets:
                            alist.append(name + ext + six.text_type(spec))
                            ilist.append('!' + name + ext)
                if sub_js:
                    ext_js.append(sub_js)
                if sub_css:
                    ext_css.append(sub_css)
        else:
            requires = [
                item for itemgroup in ext_requires
                for item in (itemgroup if isinstance(itemgroup, (
                    list, tuple)) else [itemgroup])
            ] + requires

        app.config['ext_js'] = ext_js
        app.config['ext_css'] = ext_css

        assets_js = []
        assets_css = []
        for item in requires:
            name, spec = split_namespec(item)
            for alist, ext in [(assets_js, '.js'), (assets_css, '.css')]:
                if name + ext in assets:
                    alist.append(name + ext + six.text_type(spec))
        js_all = Bundle(
            assets.require(*(ignore_js + assets_js)),
            filters='uglipyjs',
            output='js/baseframe-packed.js',
        )
        css_all = Bundle(
            assets.require(*(ignore_css + assets_css)),
            filters=['cssrewrite', 'cssmin'],
            output='css/baseframe-packed.css',
        )
        if bundle_js:
            js_all = Bundle(js_all, bundle_js)
        if bundle_css:
            css_all = Bundle(css_all, bundle_css)

        if assetenv is None:
            app.assets = Environment(app)
        else:
            app.assets = assetenv
        app.assets.register('js_jquery', assets.require('jquery.js'))
        app.assets.register('js_all', js_all)
        app.assets.register('css_all', css_all)
        app.register_blueprint(self, static_subdomain=subdomain)

        for module_name in asset_modules:
            try:
                module = __import__(module_name)
                module.blueprint.init_app(app)
                app.register_blueprint(
                    module.blueprint,
                    url_prefix="/_baseframe",
                    static_subdomain=subdomain,
                )
            except ImportError:
                app.logger.warning("Unable to import asset module: %s",
                                   module_name)

        # Optional config for a client app to use a manifest file
        # to load fingerprinted assets
        # If used with webpack, the client app is expected to specify its own webpack.config.js
        # Set `ASSETS_MANIFEST_PATH` in `app.config` to the path for `manifest.json`.
        # Eg: "static/build/manifest.json"
        # Set `ASSET_BASE_PATH` in `app.config` to the path in which the compiled assets are present.
        # Eg: "static/build"
        if app.config.get('ASSET_MANIFEST_PATH'):
            # Load assets into config from a manifest file
            with app.open_resource(app.config['ASSET_MANIFEST_PATH']) as f:
                asset_bundles = json.loads(f.read())
                if app.config.get('assets'):
                    raise ValueError(
                        "Loading assets via a manifest file needs the `ASSETS` config key to be unused"
                    )
                app.config['assets'] = {}
                for asset_key, asset_path in asset_bundles['assets'].items():
                    app.config['assets'][asset_key] = asset_path

        app.config.setdefault('CACHE_KEY_PREFIX',
                              'flask_cache_' + app.name + '/')
        nwcacheconfig = dict(app.config)
        nwcacheconfig['CACHE_KEY_PREFIX'] = 'networkbar_'
        if 'CACHE_TYPE' not in nwcacheconfig:
            nwcacheconfig['CACHE_TYPE'] = 'simple'

        acacheconfig = dict(app.config)
        acacheconfig['CACHE_KEY_PREFIX'] = 'asset_'
        if 'CACHE_TYPE' not in acacheconfig:
            acacheconfig['CACHE_TYPE'] = 'simple'

        networkbar_cache.init_app(app, config=nwcacheconfig)
        asset_cache.init_app(app, config=acacheconfig)
        cache.init_app(app)

        babel.init_app(app)
        if toolbar is not None:
            if 'DEBUG_TB_PANELS' not in app.config:
                app.config['DEBUG_TB_PANELS'] = [
                    'flask_debugtoolbar.panels.versions.VersionDebugPanel',
                    'flask_debugtoolbar.panels.timer.TimerDebugPanel',
                    'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
                    'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
                    'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
                    'flask_debugtoolbar.panels.template.TemplateDebugPanel',
                    'flask_debugtoolbar.panels.sqlalchemy.SQLAlchemyDebugPanel',
                    'flask_debugtoolbar.panels.logger.LoggingPanel',
                    'flask_debugtoolbar.panels.route_list.RouteListDebugPanel',
                    'flask_debugtoolbar.panels.profiler.ProfilerDebugPanel',
                ]
                if line_profile is not None:
                    app.config['DEBUG_TB_PANELS'].append(
                        'flask_debugtoolbar_lineprofilerpanel.panels.LineProfilerPanel'
                    )
            toolbar.init_app(app)

        app.json_encoder = JSONEncoder
        # If this app has a Lastuser extension registered, give it a cache
        lastuser = getattr(app, 'extensions', {}).get('lastuser')
        if lastuser and hasattr(lastuser, 'init_cache'):
            lastuser.init_cache(app=app, cache=cache)

        app.config['tz'] = timezone(app.config.get('TIMEZONE', 'UTC'))

        if theme not in THEME_FILES:
            raise ValueError("Unrecognised theme: %s" % theme)
        app.config['theme'] = theme

        if 'NETWORKBAR_DATA' not in app.config:
            app.config[
                'NETWORKBAR_DATA'] = 'https://api.hasgeek.com/1/networkbar/networkbar.json'

        if isinstance(app.config.get('NETWORKBAR_DATA'), (list, tuple)):
            app.config['NETWORKBAR_LINKS'] = app.config['NETWORKBAR_DATA']

        app.config.setdefault('RECAPTCHA_DATA_ATTRS', {})
        app.config['RECAPTCHA_DATA_ATTRS'].setdefault(
            'callback', 'onInvisibleRecaptchaSubmit')
        app.config['RECAPTCHA_DATA_ATTRS'].setdefault('size', 'invisible')
示例#13
0
#!/usr/bin/env python3

import os

env_ptvsd = os.getenv('PTVSD')
if env_ptvsd:
    import ptvsd
    print('Oczekiwanie na połączenie debuggera VS Code…')
    ptvsd.enable_attach(address=('0.0.0.0', 5678))
    if env_ptvsd.lower() == 'wait':
        ptvsd.wait_for_attach()

import signal
from configuration import configuration
import sentry_sdk
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from core import somsiad
from version import __version__

if __name__ == '__main__':
    signal.signal(signal.SIGINT, somsiad.signal_handler)
    if configuration['sentry_dsn']:
        print('Inicjowanie połączenia z Sentry...')
        sentry_sdk.init(
            configuration['sentry_dsn'], release=f'{configuration["sentry_proj"] or "somsiad"}@{__version__}',
            integrations=[SqlalchemyIntegration(), AioHttpIntegration()]
        )
    somsiad.load_and_run()
示例#14
0
def create_app(config_class: object):
    """Create Flask app.

    Args:
        config_class: configuration for Flask app
    """
    if os.environ.get("FLASK_ENV") == "production":  # pragma: no cover
        sentry_sdk.init(
            dsn="https://[email protected]/1513206",
            integrations=[FlaskIntegration(),
                          SqlalchemyIntegration()],
            traces_sample_rate=1.0,
        )
    app = Flask(__name__)
    app.config.from_object(config_class)
    db.init_app(app)

    from app.api.v1 import api_v1

    app.register_blueprint(api_v1)

    @app.route('/ping', methods=['GET', 'POST'])
    def ping():
        """Return string to show the server is alive."""
        return 'Server is here'

    @app.route('/', methods=['GET'])
    def root():
        """Root page with links to simple and CSV form."""
        return render_template('index.html')

    @app.route('/json', methods=['POST'])
    def json():
        """Get optimal next period budget shares for ad options."""
        # TODO: Remove this route in favor of api/v1/ads after informing users

        if 'optimize' not in request.json:  # pragma: no cover
            if 'stats' not in request.json:
                return '"optimize" and "stats" keys missing in posted JSON object'
            return '"optimize" key missing in posted JSON object'
        if 'stats' not in request.json:  # pragma: no cover
            return '"stats" key missing in posted JSON object'

        if not request.json['optimize']:  # pragma: no cover
            if not request.json['stats']:
                return '"optimize" and "stats" keys are empty'
            return '"optimize" key is empty'
        if not request.json['stats']:  # pragma: no cover
            return '"stats" key is empty'

        weights = {
            'impression_weight': 0,
            'engagement_weight': 0,
            'click_weight': 0,
            'conversion_weight': 0,
        }
        for metric in request.json['optimize']:
            weights[metric[:-1] + '_weight'] = None

        data = pd.DataFrame(request.json['stats'])
        data = pro.preprocess(data, **weights)
        data = pro.filter_dates(data, cutoff=CUTOFF)
        [options, data] = pro.reindex_options(data)

        bandit = ban.Bandit(
            num_options=len(options),
            memory=True,
            shape=SHAPE,
            cutoff=CUTOFF,
            cut_level=CUT_LEVEL,
        )
        bandit.add_daily_results(data)
        shares = bandit.calculate_shares(accelerate=True)
        options['ad_share'] = shares.tolist()

        return options.to_json(orient='records')

    @app.route('/form', methods=['GET', 'POST'])
    def form():
        """
        Provide form with cumulated trial and success inputs for multiple options,
        return options with suggested budget share for next period
        """

        if request.method == 'POST':
            entries = [value for value in list(request.form.values()) if value]
            num_options = int(len(entries) / 2)
            options = pd.DataFrame([{
                'option': str(i + 1)
            } for i in range(num_options)])
            trials = [int(entries[i * 2]) for i in range(num_options)]
            successes = [int(entries[i * 2 + 1]) for i in range(num_options)]
            bandit = ban.Bandit(num_options=num_options, memory=False)
            for i in range(num_options):
                bandit.add_results(option_id=i,
                                   trials=trials[i],
                                   successes=successes[i])
            shares = bandit.calculate_shares(accelerate=False)
            options = format_results(options, shares)
            records = options.to_dict('records')
            columns = options.columns.values
            save_plot(bandit)
            return render_template(
                'form_result.html',
                records=records,
                columns=columns,
                plot='/static/images/plot.png',
            )

        return render_template('form.html')

    @app.route('/csv', methods=['GET', 'POST'])
    def csv():
        """
        Provide form to paste ads CSV with daily breakdown of channel (optional),
        ad_id, impressions, engagements, clicks and conversions;
        return options with suggested budget share or status for next period and
        provide direct upload to Facebook via API
        """

        if request.method == 'POST':
            if request.form['update'] == 'true':  # pragma: no cover
                app_id = request.form['app_id']
                app_secret = request.form['app_secret']
                access_token = request.form['access_token']
                channels = ast.literal_eval(request.form['channels'])
                records = ast.literal_eval(request.form['records'])
                updatable = ['facebook', 'instagram']
                indices = []
                for channel in updatable:
                    if channel in channels:
                        indices.append(channels.index(channel))
                results = pd.DataFrame(columns=['ad_id', 'ad_status'])
                for index in indices:
                    for record in records[index]:
                        results.loc[len(results)] = [
                            record['ad_id'],
                            record['ad_status'],
                        ]
                updated = update_facebook(app_id, app_secret, access_token,
                                          results)
                records = updated.to_dict('records')
                columns = updated.columns.values
                return render_template('update_result.html',
                                       records=records,
                                       columns=columns)

            weights = {}
            for weight in [
                    'impression_weight',
                    'engagement_weight',
                    'click_weight',
                    'conversion_weight',
            ]:
                if request.form[weight] == '':
                    weights[weight] = None
                else:
                    weights[weight] = int(request.form[weight])

            data = pd.read_csv(StringIO(request.form['ads']),
                               sep=None,
                               engine='python')

            try:
                data = pro.preprocess(
                    data,
                    weights['impression_weight'],
                    weights['engagement_weight'],
                    weights['click_weight'],
                    weights['conversion_weight'],
                )
            except Exception as error:  # pragma: no cover
                print(error)
                message = 'Cannot pre-process your data. \
                         Please check the CSV input format and try again.'

                return render_template(
                    'csv.html',
                    error=message,
                    output=request.form['output'],
                    impression_weight=request.form['impression_weight'],
                    engagement_weight=request.form['engagement_weight'],
                    click_weight=request.form['click_weight'],
                    conversion_weight=request.form['conversion_weight'],
                    ads=request.form['ads'],
                )

            try:
                data = pro.filter_dates(data, cutoff=CUTOFF)
            except Exception as error:  # pragma: no cover
                print(error)
                message = 'Please check your dates (format should be YYYY-MM-DD).'
                return render_template(
                    'csv.html',
                    error=message,
                    output=request.form['output'],
                    impression_weight=request.form['impression_weight'],
                    engagement_weight=request.form['engagement_weight'],
                    click_weight=request.form['click_weight'],
                    conversion_weight=request.form['conversion_weight'],
                    ads=request.form['ads'],
                )
            if data.empty:  # pragma: no cover
                error = ('Please include results with data from the past ' +
                         str(CUTOFF) + ' days.')
                return render_template(
                    'csv.html',
                    error=error,
                    output=request.form['output'],
                    impression_weight=request.form['impression_weight'],
                    engagement_weight=request.form['engagement_weight'],
                    click_weight=request.form['click_weight'],
                    conversion_weight=request.form['conversion_weight'],
                    ads=request.form['ads'],
                )

            [options, data] = pro.reindex_options(data)

            bandit = ban.Bandit(
                num_options=len(options),
                memory=True,
                shape=SHAPE,
                cutoff=CUTOFF,
                cut_level=CUT_LEVEL,
            )
            bandit.add_daily_results(data)
            shares = bandit.calculate_shares(accelerate=True)

            output = request.form['output']
            if output == 'status':
                results = format_results(options, shares, status=True)
            elif output == 'share':
                results = format_results(options, shares,
                                         status=False).round(2)

            if 'channel' in options.columns:
                channel_shares = (format_results(
                    options, shares,
                    status=False).groupby('channel')['ad_share'].sum().round(2)
                                  )
                channels = []
                records = []
                for name, group in results.groupby('channel'):
                    channels.append(name)
                    group = group.drop(['channel'], axis=1)
                    columns = group.columns.values
                    records.append(group.to_dict('records'))
                return render_template(
                    'csv_result_channels.html',
                    channels=channels,
                    channel_shares=channel_shares,
                    records=records,
                    columns=columns,
                )

            records = results.to_dict('records')
            columns = results.columns.values
            return render_template('csv_result.html',
                                   records=records,
                                   columns=columns)

        return render_template('csv.html')

    @app.after_request
    def after_request(response):
        """
        Clear server cache after every request to display newest plot image
        """
        response.headers[
            'Cache-Control'] = 'no-cache, no-store, must-revalidate, public, max-age=0'
        response.headers['Expires'] = 0
        response.headers['Pragma'] = 'no-cache'
        return response

    def format_results(options, shares, status=False):
        """
        Return ACTIVE/PAUSED instead of numeric share for options if desired
        """
        results = options.copy()
        if status:
            status = shares > 0
            results['ad_status'] = status.tolist()
            results['ad_status'].replace(True, 'ACTIVE', inplace=True)
            results['ad_status'].replace(False, 'PAUSED', inplace=True)
        else:
            results['ad_share'] = shares.tolist()
            results['ad_share'] = results['ad_share']
        return results

    def save_plot(bandit):
        """
        Save plot with bandit options' PDFs (beta distributions)
        """
        x = np.linspace(0, 1, 100)
        for i in range(len(bandit.trials)):
            plt.plot(
                x,
                beta.pdf(x, bandit.successes[i],
                         bandit.trials[i] - bandit.successes[i]),
                label='option ' + str(i + 1),
            )
        plt.xlabel('Success rate')
        plt.ylabel('Probablity density')
        plt.grid()
        plt.yticks([])
        plt.legend()
        plt.savefig('app/static/images/plot.png')
        plt.clf()

    def update_facebook(app_id, app_secret, access_token,
                        options):  # pragma: no cover
        """
        Update status of ads on Facebook if different from respective suggestion;
        return dataframe with updated ads
        """
        api = FacebookAdsApi.init(app_id, app_secret, access_token)
        updated = []
        # Determine number of required batches since
        # Facebook only allows 50 API calls per batch
        num_options = len(options.index)
        batch_size = 50
        batches = int(num_options / batch_size) + 1
        # Split options into batches and loop through those
        i = 0
        for _ in range(batches):
            option_batch = options.loc[i:i + batch_size, :]
            i += batch_size
            update_batch = api.new_batch()
            # Loop through options within batch, compare current and suggested
            # ad status and update if changed
            for _, row in option_batch.iterrows():
                ad_id = str(row['ad_id'])
                ad = Ad(ad_id)
                ad.api_get(fields=[Ad.Field.status])
                old_status = ad[Ad.Field.status]
                new_status = row['ad_status']
                if old_status != new_status:
                    ad[Ad.Field.status] = new_status
                    updated.append([ad_id, old_status + ' -> ' + new_status])
                    ad.api_update(
                        batch=update_batch,
                        fields=[],
                        params={Ad.Field.status: new_status},
                    )
            update_batch.execute()
        return pd.DataFrame(updated, columns=['ad_id', 'updated'])

    return app
示例#15
0
            for key, value in event.items()
        }
    elif isinstance(event, list):
        return [
            strip_bot_token(value, hint)
            for value in event
        ]
    elif isinstance(event, str) and BOT_TOKEN_RE.find(event):
        return BOT_TOKEN_RE.sub(event, '<BOT_TOKEN>')
    else:
        return event


sentry_sdk.init(
    dsn='https://[email protected]/1568089',
    integrations=[SqlalchemyIntegration(), ThreadingIntegration()],
    before_send=strip_bot_token,
)


class LastUpdate(Base):
    __tablename__ = 'last_update'
    id = Column(Integer, primary_key=True)
    update_id = Column(Integer)


class User(Base):
    __tablename__ = 'users'

    id = Column(Integer, primary_key=True)
    chat_id = Column(Integer, unique=True)
示例#16
0
def create_app(test_config=None):
    if dsn := os.getenv('SENTRY_DSN'):
        sentry_sdk.init(dsn=dsn, integrations=[FlaskIntegration(), SqlalchemyIntegration()])
示例#17
0
def create_app(config_name="default"):
    application = Flask(__name__, instance_relative_config=True)

    from jinja2 import select_autoescape

    application.jinja_options = {
        "autoescape":
        select_autoescape(
            enabled_extensions=("html", "html.j2", "xml"),
            default_for_string=True,
        )
    }

    # CONFIG
    from config import configs

    application.config.from_object(configs[config_name])

    print("DB INFO: using {}".format(application.config["INFO_USED_DB"]))

    # APPS
    mail.init_app(application)
    db.init_app(application)
    migrate.init_app(application, db)
    cache.init_app(application)
    # charts.init_app(application)

    if application.config["SENTRY_MONITORING"]:
        import sentry_sdk
        from sentry_sdk.integrations.flask import FlaskIntegration
        from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration

        sentry_sdk.init(
            dsn=
            "https://[email protected]/5454190",
            integrations=[FlaskIntegration(),
                          SqlalchemyIntegration()],
            traces_sample_rate=0.2,
        )
    else:
        print("No Sentry monitoring.")

    # LOGGING
    from .config.config_logging import db_handler, gunicorn_logger

    application.logger.addHandler(gunicorn_logger)
    application.logger.addHandler(db_handler)

    # CONTROLLERS
    from .controllers import register_all_controllers  # noqa: F401

    register_all_controllers(application)

    from .controllers import register_error_handlers  # noqa: F401

    register_error_handlers(application)

    # MODULES

    from .auth import create_module as auth_create_module

    auth_create_module(application)

    return application
示例#18
0
def create_app(config_name="default"):
    application = Flask(__name__, instance_relative_config=True)

    from jinja2 import select_autoescape

    # CONFIG
    from config import configs

    application.config.from_object(configs[config_name])

    print(f"DB INFO: using {application.config['INFO_USED_DB']}")

    # APPS
    db.init_app(application)
    migrate.init_app(application, db)
    security.init_app(application, user_datastore)
    babel.init_app(application)
    turbo.init_app(application)
    mail.init_app(application)
    dropzone.init_app(application)

    if application.config["SENTRY_MONITORING"]:
        import sentry_sdk
        from sentry_sdk.integrations.flask import FlaskIntegration
        from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration

        sentry_sdk.init(
            dsn="https://[email protected]/5776351",
            integrations=[FlaskIntegration(), SqlalchemyIntegration()],
            traces_sample_rate=1.0,
        )
    else:
        print("No Sentry monitoring.")

    # LOGGING
    # from .config.config_logging import db_handler, gunicorn_logger

    # application.logger.addHandler(gunicorn_logger)
    # application.logger.addHandler(db_handler)

    # CONTROLLERS
    from .controllers import register_all_controllers  # noqa: F401

    register_all_controllers(application)

    from .controllers import register_error_handlers  # noqa: F401

    register_error_handlers(application)

    # MODULES
    from app.modules.auth.auth import blueprint as google_oauth_bp

    application.register_blueprint(google_oauth_bp, url_prefix="/oauth/")

    if application.config["APP_STATE"] in ("test", "testing"):
        with application.app_context():
            # from app.helpers.tests.fill_db import db_fill
            db.create_all()
            print("Created database")

            # db_fill()

    application.jinja_options = {
        "autoescape": select_autoescape(enabled_extensions=("html", "html.j2", "xml")),
        "line_statement_prefix": "#",
        "line_comment_prefix": "##",
    }

    return application
示例#19
0
def test_transactions(sentry_init, capture_events, render_span_tree):

    sentry_init(
        integrations=[SqlalchemyIntegration()],
        _experiments={"record_sql_params": True},
        traces_sample_rate=1.0,
    )
    events = capture_events()

    Base = declarative_base()  # noqa: N806

    class Person(Base):
        __tablename__ = "person"
        id = Column(Integer, primary_key=True)
        name = Column(String(250), nullable=False)

    class Address(Base):
        __tablename__ = "address"
        id = Column(Integer, primary_key=True)
        street_name = Column(String(250))
        street_number = Column(String(250))
        post_code = Column(String(250), nullable=False)
        person_id = Column(Integer, ForeignKey("person.id"))
        person = relationship(Person)

    engine = create_engine("sqlite:///:memory:")
    Base.metadata.create_all(engine)

    Session = sessionmaker(bind=engine)  # noqa: N806
    session = Session()

    with start_transaction(name="test_transaction", sampled=True):
        with session.begin_nested():
            session.query(Person).first()

        for _ in range(2):
            with pytest.raises(IntegrityError):
                with session.begin_nested():
                    session.add(Person(id=1, name="bob"))
                    session.add(Person(id=1, name="bob"))

        with session.begin_nested():
            session.query(Person).first()

    (event,) = events

    assert (
        render_span_tree(event)
        == """\
- op=null: description=null
  - op="db": description="SAVEPOINT sa_savepoint_1"
  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
  - op="db": description="RELEASE SAVEPOINT sa_savepoint_1"
  - op="db": description="SAVEPOINT sa_savepoint_2"
  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2"
  - op="db": description="SAVEPOINT sa_savepoint_3"
  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3"
  - op="db": description="SAVEPOINT sa_savepoint_4"
  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
  - op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\
"""
    )
示例#20
0
def init_sentry(dsn):
    """ Helper function that initializes sentry SDK if a dsn is specified. """
    if dsn:
        sentry_sdk.init(
            dsn, integrations=[PyramidIntegration(),
                               SqlalchemyIntegration()])
示例#21
0
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration

logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)

# create postgresql connection string
try:
    DB_HOST = os.environ.get('DB_HOST').replace('\n', '')
    DB_PORT = os.environ.get('DB_PORT').replace('\n', '')
    DB_USER = os.environ.get('DB_USER').replace('\n', '')
    DB_PASS = os.environ.get('DB_PASS').replace('\n', '')
    DB_NAME = os.environ.get('DB_NAME').replace('\n', '')

    if None in (DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME):
        raise KeyError
    else:
        SQLALCHEMY_DATABASE_URI = f"postgresql://{quote(DB_USER, safe='')}:{quote(DB_PASS, safe='')}@{quote(DB_HOST, safe='')}:{quote(DB_PORT, safe='')}/{quote(DB_NAME, safe='')}"

    SENTRY_DSN = os.environ.get('SENTRY_DSN').replace('\n', '')
    VERSION = os.environ.get('VERSION').replace('\n', '')
    ENVIRONMENT = os.environ.get('ENVIRONMENT').replace('\n', '')
    sentry_sdk.init(environment=ENVIRONMENT,
                    release=VERSION,
                    dsn=SENTRY_DSN,
                    integrations=[SqlalchemyIntegration()])

except KeyError as e:
    logger.warning('One or multiple necessary environment variables not set.')
    raise e
    exit(1)
示例#22
0
def includeme(config: Optional[pyramid.config.Configurator] = None) -> None:
    """Initialize the Sentry intergation."""
    global _client_setup
    sentry_url = config_utils.env_or_config(config, "SENTRY_URL",
                                            "c2c.sentry.url")
    if sentry_url is not None and not _client_setup:
        client_info: MutableMapping[str, Any] = {
            key[14:].lower(): value
            for key, value in os.environ.items()
            if key.startswith("SENTRY_CLIENT_")
        }
        # Parse bool
        for key in (
                "with_locals",
                "default_integrations",
                "send_default_pii",
                "debug",
                "attach_stacktrace",
                "propagate_traces",
                "auto_enabling_integrations",
                "auto_session_tracking",
        ):
            if key in client_info:
                client_info[key] = client_info[key].lower() in ("1", "t",
                                                                "true")
        # Parse int
        for key in ("max_breadcrumbs", "shutdown_timeout",
                    "transport_queue_size"):
            if key in client_info:
                client_info[key] = int(client_info[key])
        # Parse float
        for key in ("sample_rate", "traces_sample_rate"):
            if key in client_info:
                client_info[key] = float(client_info[key])

        git_hash = config_utils.env_or_config(config, "GIT_HASH",
                                              "c2c.git_hash")
        if git_hash is not None and not ("release" in client_info and
                                         client_info["release"] != "latest"):
            client_info["release"] = git_hash
        client_info["ignore_errors"] = client_info.pop("ignore_exceptions",
                                                       "SystemExit").split(",")
        tags = {
            key[11:].lower(): value
            for key, value in os.environ.items()
            if key.startswith("SENTRY_TAG_")
        }

        sentry_logging = LoggingIntegration(
            level=logging.DEBUG,
            event_level=config_utils.env_or_config(config, "SENTRY_LEVEL",
                                                   "c2c.sentry_level",
                                                   "ERROR").upper(),
        )
        traces_sample_rate = float(
            config_utils.env_or_config(config, "SENTRY_TRACES_SAMPLE_RATE",
                                       "c2c.sentry_traces_sample_rate", "0.0"))
        sentry_sdk.init(
            dsn=sentry_url,
            integrations=[
                sentry_logging,
                PyramidIntegration(),
                SqlalchemyIntegration(),
                RedisIntegration()
            ],
            traces_sample_rate=traces_sample_rate,
            before_send=_create_before_send_filter(tags),
            **client_info,
        )
        _client_setup = True

        excludes = config_utils.env_or_config(config, "SENTRY_EXCLUDES",
                                              "c2c.sentry.excludes",
                                              "sentry_sdk").split(",")
        for exclude in excludes:
            ignore_logger(exclude)

        LOG.info("Configured sentry reporting with client=%s and tags=%s",
                 repr(client_info), repr(tags))
示例#23
0
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
    """Set up Sentry from a config entry."""

    # Migrate environment from config entry data to config entry options
    if (CONF_ENVIRONMENT not in entry.options
            and CONF_ENVIRONMENT in entry.data
            and entry.data[CONF_ENVIRONMENT]):
        options = {
            **entry.options, CONF_ENVIRONMENT: entry.data[CONF_ENVIRONMENT]
        }
        data = entry.data.copy()
        data.pop(CONF_ENVIRONMENT)
        hass.config_entries.async_update_entry(entry,
                                               data=data,
                                               options=options)

    # https://docs.sentry.io/platforms/python/logging/
    sentry_logging = LoggingIntegration(
        level=entry.options.get(CONF_LOGGING_LEVEL, DEFAULT_LOGGING_LEVEL),
        event_level=entry.options.get(CONF_LOGGING_EVENT_LEVEL,
                                      DEFAULT_LOGGING_EVENT_LEVEL),
    )

    # Additional/extra data collection
    channel = get_channel(current_version)
    huuid = await hass.helpers.instance_id.async_get()
    system_info = await hass.helpers.system_info.async_get_system_info()
    custom_components = await async_get_custom_components(hass)

    tracing = {}
    if entry.options.get(CONF_TRACING):
        tracing = {
            "traces_sample_rate":
            entry.options.get(CONF_TRACING_SAMPLE_RATE,
                              DEFAULT_TRACING_SAMPLE_RATE),
        }

    sentry_sdk.init(  # pylint: disable=abstract-class-instantiated
        dsn=entry.data[CONF_DSN],
        environment=entry.options.get(CONF_ENVIRONMENT),
        integrations=[
            sentry_logging,
            AioHttpIntegration(),
            SqlalchemyIntegration()
        ],
        release=current_version,
        before_send=lambda event, hint: process_before_send(
            hass,
            entry.options,
            channel,
            huuid,
            system_info,
            custom_components,
            event,
            hint,
        ),
        **tracing,
    )

    async def update_system_info(now):
        nonlocal system_info
        system_info = await hass.helpers.system_info.async_get_system_info()

        # Update system info every hour
        async_call_later(hass, 3600, update_system_info)

    hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, update_system_info)

    return True
示例#24
0
    def init_app(
            self,
            app: Flask,
            requires: Iterable[str] = (),
            ext_requires: Iterable[Union[str, List[str], Tuple[str,
                                                               ...]]] = (),
            bundle_js=None,
            bundle_css=None,
            assetenv=None,
            theme: str = 'bootstrap3',
            asset_modules=(),
    ):
        """
        Initialize an app with Baseframe and load necessary assets.

        :param requires: List of required assets. If an asset has both .js
            and .css components, both will be added to the requirement list.
            Loaded assets will be minified and concatenated into the app's
            ``static/js`` and ``static/css`` folders. If an asset has problems
            with either of these, it should be loaded pre-bundled via the
            ``bundle_js`` and ``bundle_css`` parameters
        :param ext_requires: Extended requirements, will be loaded first. These used to
            be served from a separate asset server, but that is deprecated
        :param bundle_js: Bundle of additional JavaScript
        :param bundle_css: Bundle of additional CSS
        :param theme: CSS theme, one of 'bootstrap3' (default) or 'mui'
        :param assetenv: Environment for assets (in case your app needs a custom
            environment)
        :param asset_modules: Modules providing additional assets
        """
        # Initialize Sentry logging
        if app.config.get('SENTRY_URL'):
            # With `traces_sample_rate` option set, every transaction created will
            # have that percentage chance of being sent to Sentry. (So, for example,
            # if you set traces_sample_rate to 0.2, approximately 20% of your
            # transactions will get recorded and sent.) We're keeping it 100% by
            # default, but we can change it from app.config if needed.
            sentry_sdk.init(
                dsn=app.config['SENTRY_URL'],
                traces_sample_rate=app.config.get('SENTRY_SAMPLE_RATE', 1.0),
                integrations=[
                    FlaskIntegration(),
                    RqIntegration(),
                    SqlalchemyIntegration(),
                ],
            )

        # Setup secret key rotation
        if app.config['SECRET_KEY']:  # Always present as it's a default
            if not app.config.get('SECRET_KEYS'):
                app.logger.debug("Setting SECRET_KEYS from SECRET_KEY")
                app.config['SECRET_KEYS'] = [app.config['SECRET_KEY']]
        elif app.config.get('SECRET_KEYS'):
            app.logger.debug(
                "Setting SECRET_KEY from first item in SECRET_KEYS")
            app.config['SECRET_KEY'] = app.config['SECRET_KEYS'][0]
        if app.config.get('SECRET_KEY') != app.config.get(
                'SECRET_KEYS', [None])[0]:
            raise ValueError("App has misconfigured secret keys")
        app.session_interface = RotatingKeySecureCookieSessionInterface()

        # Default .js and tracking file for Matomo
        if app.config.get('MATOMO_URL') and app.config.get('MATOMO_ID'):
            app.config.setdefault('MATOMO_JS', 'matomo.js')
            app.config.setdefault('MATOMO_FILE', 'matomo.php')

        statsd.init_app(app)

        # Since Flask 0.11, templates are no longer auto reloaded.
        # Setting the config alone doesn't seem to work, so we explicitly
        # set the jinja environment here.
        if app.config.get('TEMPLATES_AUTO_RELOAD') or (
                app.config.get('TEMPLATES_AUTO_RELOAD') is None
                and app.config.get('DEBUG')):
            app.jinja_env.auto_reload = True
        app.jinja_env.add_extension('jinja2.ext.do')
        app.jinja_env.autoescape = _select_jinja_autoescape
        app.jinja_env.globals['request_is_xhr'] = request_is_xhr
        app.jinja_env.globals['get_locale'] = get_locale
        app.jinja_env.globals['csrf_token'] = generate_csrf
        if app.subdomain_matching:
            # Does this app want a static subdomain? (Default: yes, 'static').
            # Apps can disable this by setting STATIC_SUBDOMAIN = None.
            # Since Werkzeug internally uses '' instead of None, but takes None
            # as the default parameter, we remap '' to None in our config
            subdomain = app.config.get('STATIC_SUBDOMAIN', 'static') or None
            if subdomain:
                for rule in app.url_map.iter_rules('static'):
                    # For safety, seek out and update the static route added by Flask.
                    # Do not touch additional static routes added by the app or other
                    # blueprints
                    if not rule.subdomain:  # Will be '' not None
                        rule.subdomain = subdomain
                        rule.refresh()
                        break
        else:
            subdomain = None

        ignore_js: List[str] = ['!jquery.js']
        ignore_css: List[str] = []
        ext_js: List[List[str]] = []
        ext_css: List[List[str]] = []
        requires = [
            item for itemgroup in ext_requires
            for item in (itemgroup if isinstance(itemgroup, (
                list, tuple)) else [itemgroup])
        ] + list(requires)

        app.config['ext_js'] = ext_js
        app.config['ext_css'] = ext_css

        assets_js: List[str] = []
        assets_css: List[str] = []
        for item in requires:
            name, spec = split_namespec(item)
            for alist, ext in [(assets_js, '.js'), (assets_css, '.css')]:
                if name + ext in assets:
                    alist.append(name + ext + str(spec))
        js_all = Bundle(
            assets.require(*(ignore_js + assets_js)),
            filters='uglipyjs',
            output='js/baseframe-packed.js',
        )
        css_all = Bundle(
            assets.require(*(ignore_css + assets_css)),
            filters=['cssrewrite', 'cssmin'],
            output='css/baseframe-packed.css',
        )
        if bundle_js:
            js_all = Bundle(js_all, bundle_js)
        if bundle_css:
            css_all = Bundle(css_all, bundle_css)

        if assetenv is None:
            app.assets = Environment(app)
        else:
            app.assets = assetenv
        app.assets.register('js_jquery', assets.require('jquery.js'))
        app.assets.register('js_all', js_all)
        app.assets.register('css_all', css_all)
        app.register_blueprint(self, static_subdomain=subdomain)

        for module_name in asset_modules:
            try:
                module = __import__(module_name)
                module.blueprint.init_app(app)
                app.register_blueprint(
                    module.blueprint,
                    url_prefix="/_baseframe",
                    static_subdomain=subdomain,
                )
            except ImportError:
                app.logger.warning("Unable to import asset module: %s",
                                   module_name)

        # Optional config for a client app to use a manifest file
        # to load fingerprinted assets
        # If used with webpack, the client app is expected to specify its own
        # webpack.config.js
        # Set `ASSETS_MANIFEST_PATH` in `app.config` to the path for `manifest.json`.
        # Eg: "static/build/manifest.json"
        # Set `ASSET_BASE_PATH` in `app.config` to the path in which the compiled assets
        # are present.
        # Eg: "static/build"
        if app.config.get('ASSET_MANIFEST_PATH'):
            # Load assets into config from a manifest file
            with app.open_resource(app.config['ASSET_MANIFEST_PATH']) as f:
                asset_bundles = json.loads(f.read())
                if app.config.get('assets'):
                    raise ValueError(
                        "Loading assets via a manifest file needs the `ASSETS` config key to be unused"
                    )
                app.config['assets'] = {}
                for _asset_key, _asset_path in asset_bundles['assets'].items():
                    app.config['assets'][_asset_key] = _asset_path

        app.config.setdefault('CACHE_KEY_PREFIX',
                              'flask_cache_' + app.name + '/')
        nwcacheconfig = dict(app.config)
        nwcacheconfig['CACHE_KEY_PREFIX'] = 'networkbar_'
        if 'CACHE_TYPE' not in nwcacheconfig:
            nwcacheconfig['CACHE_TYPE'] = 'simple'

        acacheconfig = dict(app.config)
        acacheconfig['CACHE_KEY_PREFIX'] = 'asset_'
        if 'CACHE_TYPE' not in acacheconfig:
            acacheconfig['CACHE_TYPE'] = 'simple'

        networkbar_cache.init_app(app, config=nwcacheconfig)
        asset_cache.init_app(app, config=acacheconfig)
        cache.init_app(app)

        babel.init_app(app)
        if toolbar is not None:
            if 'DEBUG_TB_PANELS' not in app.config:
                app.config['DEBUG_TB_PANELS'] = [
                    'flask_debugtoolbar.panels.versions.VersionDebugPanel',
                    'flask_debugtoolbar.panels.timer.TimerDebugPanel',
                    'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
                    'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
                    'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
                    'flask_debugtoolbar.panels.template.TemplateDebugPanel',
                    'flask_debugtoolbar.panels.sqlalchemy.SQLAlchemyDebugPanel',
                    'flask_debugtoolbar.panels.logger.LoggingPanel',
                    'flask_debugtoolbar.panels.route_list.RouteListDebugPanel',
                    'flask_debugtoolbar.panels.profiler.ProfilerDebugPanel',
                ]
            toolbar.init_app(app)

        app.json_encoder = JSONEncoder
        # If this app has a Lastuser extension registered, give it a cache
        lastuser = getattr(app, 'extensions', {}).get('lastuser')
        if lastuser and hasattr(lastuser, 'init_cache'):
            lastuser.init_cache(app=app, cache=cache)

        app.config['tz'] = timezone(app.config.get('TIMEZONE', 'UTC'))

        if theme not in THEME_FILES:
            raise ValueError("Unrecognised theme: %s" % theme)
        app.config['theme'] = theme

        if 'NETWORKBAR_DATA' not in app.config:
            app.config[
                'NETWORKBAR_DATA'] = 'https://api.hasgeek.com/1/networkbar/networkbar.json'

        if isinstance(app.config.get('NETWORKBAR_DATA'), (list, tuple)):
            app.config['NETWORKBAR_LINKS'] = app.config['NETWORKBAR_DATA']

        app.config.setdefault('RECAPTCHA_DATA_ATTRS', {})
        app.config['RECAPTCHA_DATA_ATTRS'].setdefault(
            'callback', 'onInvisibleRecaptchaSubmit')
        app.config['RECAPTCHA_DATA_ATTRS'].setdefault('size', 'invisible')

        if newrelic and os.path.isfile('newrelic.ini'):
            newrelic.agent.initialize('newrelic.ini')
            app.logger.debug("Successfully initiated Newrelic")
        elif not newrelic:
            app.logger.debug("Did not find `newrelic` package, skipping it")
        else:
            app.logger.debug(
                "Did not find New Relic settings file newrelic.ini, skipping it"
            )
示例#25
0
def configure_logging(debug: bool, testing: bool, sentry_dsn: t.Optional[str],
                      cur_commit: t.Optional[str]) -> None:
    """Configure the structlog logger.

    :param debug: Are we in a debug environment.
    :param testing: Are we in a testing environment.
    """

    json_renderer = structlog.processors.JSONRenderer()

    processors = [
        structlog.stdlib.add_logger_name,
        _add_log_level,
        _add_thread_name,
        _maybe_add_stacktrace,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.format_exc_info,
        structlog.processors.TimeStamper(fmt="iso"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.UnicodeDecoder(),
        _add_log_as_breadcrumb,
        _maybe_report_error_to_sentry,
        _call_log_callbacks,
        json_renderer,
    ]
    if debug and not testing:
        processors[-1] = structlog.dev.ConsoleRenderer(colors=True)

    structlog.configure(
        processors=processors,
        context_class=structlog.threadlocal.wrap_dict(dict),
        logger_factory=PrintLogger,
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    system_logging.basicConfig(
        format='%(message)s',
        stream=sys.stdout,
    )
    system_logging.getLogger('psef').setLevel(system_logging.DEBUG)
    system_logging.getLogger('werkzeug').setLevel(system_logging.ERROR)
    # Let the celery logger shut up
    # system_logging.getLogger('celery').propagate = False

    if sentry_dsn is not None:
        logger.info('Setting up sentry')
        release = None
        if cur_commit is not None:
            release = f'CodeGra.de@{cur_commit}'

        sentry_sdk.init(
            release=release,
            dsn=sentry_dsn,
            integrations=[
                FlaskIntegration(transaction_style='url'),
                SqlalchemyIntegration(),
                CeleryIntegration(),
            ],
            before_send=_sentry_before_send,
        )
示例#26
0
def init_sentry():
    sentry_sdk.init(
        dsn=
        'https://[email protected]/5397255',
        integrations=[SqlalchemyIntegration()],
        release=f'sms-event-log@{VERSION}')
示例#27
0

if __name__ == "__main__":
    listen = sys.argv[1:] or ["default"]
    logger.info("Worker: listening to queues %s", listen)

    log_redis_connection_status()
    log_database_connection_status()

    if settings.IS_DEV is False:
        sentry_sdk.init(
            dsn=settings.SENTRY_DSN,
            integrations=[
                RedisIntegration(),
                RqIntegration(),
                SqlalchemyIntegration()
            ],
            release=read_version_from_file(),
            environment=settings.ENV,
            traces_sample_rate=settings.SENTRY_SAMPLE_RATE,
        )
        logger.info("Worker : connection to sentry OK")

    while True:
        try:
            with app.app_context():
                # This sessions removals are meant to prevent open db connection
                # to spread through forked children and cause bugs in the jobs
                # https://python-rq.org/docs/workers/#the-worker-lifecycle
                # https://docs.sqlalchemy.org/en/13/core/connections.html?highlight=dispose#engine-disposal
                db.session.remove()
示例#28
0
def create_app():
    global app_created
    if not app_created:
        BlueprintsManager.register(app)
    Migrate(app, db)

    app.config.from_object(env('APP_CONFIG',
                               default='config.ProductionConfig'))
    db.init_app(app)
    _manager = Manager(app)
    _manager.add_command('db', MigrateCommand)

    if app.config['CACHING']:
        cache.init_app(app, config={'CACHE_TYPE': 'simple'})
    else:
        cache.init_app(app, config={'CACHE_TYPE': 'null'})

    stripe.api_key = 'SomeStripeKey'
    app.secret_key = 'super secret key'
    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static'

    app.logger.addHandler(logging.StreamHandler(sys.stdout))
    app.logger.setLevel(logging.ERROR)

    # set up jwt
    app.config['JWT_HEADER_TYPE'] = 'JWT'
    app.config['JWT_ACCESS_TOKEN_EXPIRES'] = timedelta(days=1)
    app.config['JWT_REFRESH_TOKEN_EXPIRES'] = timedelta(days=365)
    app.config['JWT_ERROR_MESSAGE_KEY'] = 'error'
    app.config['JWT_TOKEN_LOCATION'] = ['cookies', 'headers']
    app.config['JWT_REFRESH_COOKIE_PATH'] = '/v1/auth/token/refresh'
    app.config['JWT_SESSION_COOKIE'] = False
    app.config['JWT_BLACKLIST_ENABLED'] = True
    app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['refresh']
    _jwt = JWTManager(app)
    _jwt.user_loader_callback_loader(jwt_user_loader)
    _jwt.token_in_blacklist_loader(is_token_blacklisted)

    # setup celery
    app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL']
    app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL']
    app.config['CELERY_ACCEPT_CONTENT'] = ['json', 'application/text']

    CORS(app, resources={r"/*": {"origins": "*"}})
    AuthManager.init_login(app)

    if app.config['TESTING'] and app.config['PROFILE']:
        # Profiling
        app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])

    # development api
    with app.app_context():
        from app.api.admin_statistics_api.events import event_statistics
        from app.api.auth import auth_routes
        from app.api.attendees import attendee_misc_routes
        from app.api.bootstrap import api_v1
        from app.api.celery_tasks import celery_routes
        from app.api.event_copy import event_copy
        from app.api.exports import export_routes
        from app.api.imports import import_routes
        from app.api.uploads import upload_routes
        from app.api.users import user_misc_routes
        from app.api.orders import order_misc_routes
        from app.api.role_invites import role_invites_misc_routes
        from app.api.auth import authorised_blueprint
        from app.api.admin_translations import admin_blueprint
        from app.api.orders import alipay_blueprint
        from app.api.settings import admin_misc_routes
        from app.api.server_version import info_route
        from app.api.custom.orders import ticket_blueprint

        app.register_blueprint(api_v1)
        app.register_blueprint(event_copy)
        app.register_blueprint(upload_routes)
        app.register_blueprint(export_routes)
        app.register_blueprint(import_routes)
        app.register_blueprint(celery_routes)
        app.register_blueprint(auth_routes)
        app.register_blueprint(event_statistics)
        app.register_blueprint(user_misc_routes)
        app.register_blueprint(attendee_misc_routes)
        app.register_blueprint(order_misc_routes)
        app.register_blueprint(role_invites_misc_routes)
        app.register_blueprint(ticket_blueprint)
        app.register_blueprint(authorised_blueprint)
        app.register_blueprint(admin_blueprint)
        app.register_blueprint(alipay_blueprint)
        app.register_blueprint(admin_misc_routes)
        app.register_blueprint(info_route)

        add_engine_pidguard(db.engine)

        if app.config['SQLALCHEMY_DATABASE_URI'].startswith("sqlite://"):
            sqlite_datetime_fix()

    sa.orm.configure_mappers()

    if app.config['SERVE_STATIC']:
        app.add_url_rule('/static/<path:filename>',
                         endpoint='static',
                         view_func=app.send_static_file)

    # sentry
    if not app_created and 'SENTRY_DSN' in app.config:
        sentry_sdk.init(app.config['SENTRY_DSN'],
                        integrations=[
                            FlaskIntegration(),
                            RedisIntegration(),
                            CeleryIntegration(),
                            SqlalchemyIntegration()
                        ])

    # redis
    redis_store.init_app(app)

    # elasticsearch
    if app.config['ENABLE_ELASTICSEARCH']:
        client.init_app(app)
        connections.add_connection('default', client.elasticsearch)
        with app.app_context():
            try:
                cron_rebuild_events_elasticsearch.delay()
            except Exception:
                pass

    app_created = True
    return app, _manager, db, _jwt
示例#29
0
def main():
    logging.root.handlers.clear()
    logging.basicConfig(level=logging.INFO,
                        format="%(asctime)s %(levelname)s: %(message)s")
    locale.setlocale(locale.LC_ALL, '')
    lang = locale.getlocale()[0]
    lang = [lang] if lang else []
    d = pkg_resources.resource_filename('taguette', 'l10n')
    trans = gettext.translation('taguette_main', d, lang, fallback=True)
    taguette._trans = trans
    _ = trans.gettext

    if sys.platform == 'win32' and sys.version_info >= (3, 8):
        # https://github.com/tornadoweb/tornado/issues/2608
        try:
            from asyncio import WindowsSelectorEventLoopPolicy
        except ImportError:
            pass
        else:
            policy = asyncio.get_event_loop_policy()
            if not isinstance(policy, WindowsSelectorEventLoopPolicy):
                asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())

    if sys.platform == 'win32':
        import ctypes.wintypes

        CSIDL_PERSONAL = 5  # My Documents
        SHGFP_TYPE_CURRENT = 0  # Get current, not default value

        buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
        ctypes.windll.shell32.SHGetFolderPathW(None, CSIDL_PERSONAL, None,
                                               SHGFP_TYPE_CURRENT, buf)

        default_db = os.path.join(buf.value, 'Taguette', 'taguette.sqlite3')
        default_db_show = os.path.join(os.path.basename(buf.value),
                                       'Taguette', 'taguette.sqlite3')
    else:
        data = os.environ.get('XDG_DATA_HOME')
        if not data:
            data = os.path.join(os.environ['HOME'], '.local', 'share')
            default_db_show = '$HOME/.local/share/taguette/taguette.sqlite3'
        else:
            default_db_show = '$XDG_DATA_HOME/taguette/taguette.sqlite3'
        default_db = os.path.join(data, 'taguette', 'taguette.sqlite3')

    parser = argparse.ArgumentParser(
        description="Document tagger for qualitative analysis",
    )
    parser.add_argument('--version', action='version',
                        version='taguette version %s' % __version__)
    parser.add_argument('-p', '--port', default='7465',
                        help=_("Port number on which to listen"))
    parser.add_argument('-b', '--bind', default='127.0.0.1',
                        help=_("Address to bind on"))
    parser.add_argument('--browser', action='store_true', default=True,
                        help=_("Open web browser to the application"))
    parser.add_argument('--no-browser', action='store_false', dest='browser',
                        help=_("Don't open the web browser"))
    parser.add_argument('--debug', action='store_true', default=False,
                        help=argparse.SUPPRESS)
    parser.add_argument('--database', action='store',
                        default=default_db,
                        help=_("Database location or connection string, for "
                               "example 'project.db' or "
                               "'postgresql://*****:*****@localhost/mydb' "
                               "(default: %(default)r)") %
                        dict(default=default_db_show))
    parser.add_argument('--set-umask', action='store', dest='umask',
                        default="077",
                        help=_("Set the file creation mask (umask) on systems "
                               "that support it."))
    parser.add_argument('--dont-set-umask', action='store_const', dest='umask',
                        const=None,
                        help=_("Don't change umask on startup"))
    parser.set_defaults(func=None)

    subparsers = parser.add_subparsers(title=_("additional commands"),
                                       metavar='', dest='cmd')

    parser_migrate = subparsers.add_parser('migrate',
                                           help=_("Manually trigger a "
                                                  "database migration"))
    parser_migrate.add_argument('revision', action='store', default='head',
                                nargs=argparse.OPTIONAL)
    parser_migrate.set_defaults(
        func=lambda args: migrate(prepare_db(args.database), args.revision))

    parser_config = subparsers.add_parser(
        'default-config',
        help=_("Print the default server configuration"))
    parser_config.add_argument('--output', '-o', action='store', nargs=1,
                               help=_("Output to this file rather than "
                                      "stdout"))
    parser_config.set_defaults(func=lambda args: default_config(args.output))

    parser_server = subparsers.add_parser(
        'server',
        help=_("Run in server mode, suitable for a multi-user deployment"))
    parser_server.add_argument('config_file',
                               help=_("Configuration file for the server. The "
                                      "default configuration can be generated "
                                      "using the `default-config` command"))

    args = parser.parse_args()

    if args.umask is not None:
        if not re.match(r'^[0-7][0-7][0-7]$', args.umask):
            print(_("Invalid umask: %(arg)s") % dict(arg=args.umask),
                  file=sys.stderr, flush=True)
            sys.exit(2)
        logger.info("Setting umask to %s", args.umask)
        os.umask(int(args.umask, 8))

    if args.func:
        args.func(args)
        sys.exit(0)

    if args.cmd == 'server':
        # Set configuration from config file
        config = {}
        with open(args.config_file) as fp:
            exec(fp.read(), config)
        config = dict(
            DEFAULT_CONFIG,
            **config
        )
        missing = False
        for key in REQUIRED_CONFIG:
            if key not in config:
                print(_("Missing required configuration variable %(var)s") %
                      dict(var=key),
                      file=sys.stderr, flush=True)
                missing = True
        if missing:
            sys.exit(2)
    else:
        if args.debug:
            # Use a deterministic secret key, to avoid it changing during
            # auto-reload and such
            secret = 'debug'
        else:
            secret = os.urandom(30).decode('iso-8859-15')

        # Set configuration from command-line
        config = dict(
            DEFAULT_CONFIG,
            MULTIUSER=False,
            BIND_ADDRESS=args.bind,
            X_HEADERS=False,
            PORT=int(args.port),
            DATABASE=prepare_db(args.database),
            SECRET_KEY=secret,
            COOKIES_PROMPT=False,
            HTML_OUT_SIZE_LIMIT=5000000,  # 5 MB
        )

    if 'PROMETHEUS_LISTEN' in config:
        p_addr = None
        p_port = config['PROMETHEUS_LISTEN']
        if isinstance(p_port, str):
            if ':' in p_port:
                p_addr, p_port = p_port.split(':')
                p_addr = p_addr or None
            p_port = int(p_port)
        logger.info("Starting Prometheus exporter on port %d", p_port)
        prometheus_client.start_http_server(p_port, p_addr)

    try:
        version = subprocess.check_output(
            ['git', '--git-dir=.git', 'describe'],
            cwd=os.path.dirname(os.path.dirname(__file__)),
            stderr=subprocess.PIPE,
        ).decode('utf-8').strip()
    except (OSError, subprocess.CalledProcessError):
        version = 'v%s' % __version__
        logger.info("Not a Git repository, using version=%s", version)
    else:
        logger.info("Running from Git repository, using version=%s",
                    version)
    PROM_VERSION.labels(version).set(1)

    if 'SENTRY_DSN' in config:
        import sentry_sdk
        from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
        from sentry_sdk.integrations.tornado import TornadoIntegration
        logger.info("Initializing Sentry")
        sentry_sdk.init(
            dsn=config['SENTRY_DSN'],
            integrations=[TornadoIntegration(), SqlalchemyIntegration()],
            ignore_errors=[KeyboardInterrupt],
            release='taguette@%s' % version,
        )

    app = make_app(config, debug=args.debug)
    app.listen(config['PORT'], address=config['BIND_ADDRESS'],
               xheaders=config.get('X_HEADERS', False))
    loop = tornado.ioloop.IOLoop.current()

    token = app.single_user_token
    if token:
        url = 'http://localhost:%d/?token=%s' % (config['PORT'], token)
    else:
        url = 'http://localhost:%d/' % config['PORT']
    print(_("\n    Taguette %(version)s is now running. You can connect to it "
            "using this link:\n\n    %(url)s\n") %
          dict(url=url, version=__version__), flush=True)

    if args.browser and not args.debug:
        loop.call_later(0.01, webbrowser.open, url)

    loop.start()
示例#30
0
if local_env_file := os.getenv("LOCAL_ENV_FILE"):
    local_config_files = [local_env_file]
else:
    local_config_files = glob.iglob("local.*.env")

for local_config_file in glob.iglob("local.*.env"):
    logging.info(f'Importing environment from "{local_config_file}".')
    for line in open(local_config_file):
        clean_line = line.strip()
        eq_idx = clean_line.find("=")
        if 0 < eq_idx < len(clean_line) - 1:
            os.environ[clean_line[:eq_idx]] = clean_line[eq_idx + 1 :]  # noqa

sentry_sdk.init(
    dsn=os.getenv("SENTRY_DSN"),
    integrations=[CeleryIntegration(), SqlalchemyIntegration(), AioHttpIntegration(), RedisIntegration()],
)


def getenv_boolean(var_name: str, default_value: bool = False) -> bool:
    result = default_value
    env_value = os.getenv(var_name)
    if env_value is not None:
        result = env_value.upper() in ("TRUE", "1")
    return result


def getenv_int(var_name: str, default_value: int = 0) -> int:
    result = default_value
    env_value = os.getenv(var_name)
    if env_value: