コード例 #1
0
def _get_rules():
    """Return the current rules to be used with categorize.categorize.
    NB: Reloads the rules file at each call.
    """
    rules_file = current_app.config.get('CATEGORIZE_RULES_FILE')
    if not rules_file:
        return None
    return categorize.load_rules(rules_file)
コード例 #2
0
def _get_rules():
    """Return the current rules to be used with categorize.categorize.
    NB: Reloads the rules file at each call.
    """
    rules_file = current_app.config.get('CATEGORIZE_RULES_FILE')
    if not rules_file:
        return None
    return categorize.load_rules(rules_file)
コード例 #3
0
ファイル: config.py プロジェクト: martyanov/changes
def create_app(_read_config=True, **config):
    app = flask.Flask(__name__,
                      static_folder=None,
                      template_folder=os.path.join(PROJECT_ROOT, 'templates'))

    app.wsgi_app = ProxyFix(app.wsgi_app)
    # app.wsgi_app = TracerMiddleware(app.wsgi_app, app)

    # This key is insecure and you should override it on the server
    app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'

    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
    app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
    app.config['SQLALCHEMY_POOL_SIZE'] = 60
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
    # required for flask-debugtoolbar and the db perf metrics we record
    app.config['SQLALCHEMY_RECORD_QUERIES'] = True

    app.config['REDIS_URL'] = 'redis://localhost/0'
    app.config['DEBUG'] = True
    app.config['HTTP_PORT'] = 5000
    app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0

    app.config['API_TRACEBACKS'] = True

    # Expiration delay between when a snapshot image becomes superceded and when
    # it becomes truly expired (and thus no longer included in the sync information
    # for any cluster that runs that particular image's plan)
    app.config['CACHED_SNAPSHOT_EXPIRATION_DELTA'] = timedelta(hours=1)

    # default snapshot ID to use when no project-specific active image available
    app.config['DEFAULT_SNAPSHOT'] = None
    app.config['SNAPSHOT_S3_BUCKET'] = None
    app.config['LXC_PRE_LAUNCH'] = None
    app.config['LXC_POST_LAUNCH'] = None

    # Location of artifacts server that is passed to changes-client
    # (include http:// or https://)
    app.config['ARTIFACTS_SERVER'] = None

    app.config['CHANGES_CLIENT_DEFAULT_BUILD_TYPE'] = 'legacy'

    # This is a hash from each build type (string identifiers used in
    # build step configuration) to a "build spec", a definition of
    # how to use changes-client to build. To use changes-client, the key
    # 'uses_client' must be set to True.
    #
    # Required build spec keys for client:
    #   adapter -> basic or lxc
    #   jenkins-command -> command to run from jenkins directly ($JENKINS_COMMAND)
    #   commands -> array of hash from script -> string that represents a script
    #
    # Optional keys (lxc-only)
    #   pre-launch -> lxc pre-launch script
    #   post-launch -> lxc post-launch script
    #   release -> lxc release
    app.config['CHANGES_CLIENT_BUILD_TYPES'] = {
        'legacy': {'uses_client': False},
    }

    app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json']
    app.config['CELERY_ACKS_LATE'] = True
    app.config['CELERY_BROKER_URL'] = 'redis://localhost/0'
    app.config['CELERY_DEFAULT_QUEUE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
    app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
    app.config['CELERY_DISABLE_RATE_LIMITS'] = True
    app.config['CELERY_IGNORE_RESULT'] = True
    app.config['CELERY_RESULT_BACKEND'] = None
    app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json'
    app.config['CELERY_SEND_EVENTS'] = False
    app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
    app.config['CELERY_TASK_SERIALIZER'] = 'changes_json'
    app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
    app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000

    # By default, Celery logs writes to stdout/stderr as WARNING, which
    # is a bit harsh considering that some of the code is code we don't
    # own calling 'print'. This flips the default back to INFO, which seems
    # more appropriate. Can be overridden by the Changes config.
    app.config['CELERY_REDIRECT_STDOUTS_LEVEL'] = 'INFO'

    app.config['CELERY_QUEUES'] = (
        Queue('job.sync', routing_key='job.sync'),
        Queue('job.create', routing_key='job.create'),
        Queue('celery', routing_key='celery'),
        Queue('events', routing_key='events'),
        Queue('default', routing_key='default'),
        Queue('repo.sync', Exchange('fanout', 'fanout'), routing_key='repo.sync'),
    )
    app.config['CELERY_ROUTES'] = {
        'create_job': {
            'queue': 'job.create',
            'routing_key': 'job.create',
        },
        'sync_job': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_job_step': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_build': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'check_repos': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'sync_repo': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'run_event_listener': {
            'queue': 'events',
            'routing_key': 'events',
        },
        'fire_signal': {
            'queue': 'events',
            'routing_key': 'events',
        },
    }

    app.config['EVENT_LISTENERS'] = (
        ('changes.listeners.mail.build_finished_handler', 'build.finished'),
        ('changes.listeners.green_build.build_finished_handler', 'build.finished'),
        ('changes.listeners.build_revision.revision_created_handler', 'revision.created'),
        ('changes.listeners.build_finished_notifier.build_finished_handler', 'build.finished'),
        ('changes.listeners.phabricator_listener.build_finished_handler', 'build.finished'),
        ('changes.listeners.analytics_notifier.build_finished_handler', 'build.finished'),
        ('changes.listeners.analytics_notifier.job_finished_handler', 'job.finished'),
        ('changes.listeners.snapshot_build.build_finished_handler', 'build.finished'),
    )

    # restrict outbound notifications to the given domains
    app.config['MAIL_DOMAIN_WHITELIST'] = ()

    app.config['DEBUG_TB_ENABLED'] = True

    # celerybeat must be running for our cleanup tasks to execute
    # e.g. celery worker -B
    app.config['CELERYBEAT_SCHEDULE'] = {
        'cleanup-tasks': {
            'task': 'cleanup_tasks',
            'schedule': timedelta(minutes=1),
        },
        'check-repos': {
            'task': 'check_repos',
            'schedule': timedelta(minutes=2),
        },
        'aggregate-flaky-tests': {
            'task': 'aggregate_flaky_tests',
            # Hour 7 GMT is midnight PST, hopefully a time of low load
            'schedule': crontab(hour=7, minute=0),
        },
    }
    app.config['CELERY_TIMEZONE'] = 'UTC'

    app.config['SENTRY_DSN'] = None
    app.config['SENTRY_INCLUDE_PATHS'] = [
        'changes',
    ]

    app.config['JENKINS_URL'] = None
    app.config['JENKINS_CLUSTERS'] = {}

    app.config['KOALITY_URL'] = None
    app.config['KOALITY_API_KEY'] = None

    app.config['GOOGLE_CLIENT_ID'] = None
    app.config['GOOGLE_CLIENT_SECRET'] = None
    app.config['GOOGLE_DOMAIN'] = None

    # must be a URL-safe base64-encoded 32-byte key
    app.config['COOKIE_ENCRYPTION_KEY'] = 'theDefaultKeyIs32BytesLongAndTotallyURLSafe='

    app.config['REPO_ROOT'] = None

    app.config['DEFAULT_FILE_STORAGE'] = 'changes.storage.s3.S3FileStorage'
    app.config['S3_ACCESS_KEY'] = None
    app.config['S3_SECRET_KEY'] = None
    app.config['S3_BUCKET'] = None

    app.config['PHABRICATOR_HOST'] = None
    app.config['PHABRICATOR_USERNAME'] = None
    app.config['PHABRICATOR_CERT'] = None

    app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost'
    app.config['BASE_URI'] = 'http://localhost:5000'

    # if set to a string, most (all?) of the frontend js will make API calls
    # to the host this string is set to (e.g. http://changes.bigcompany.com)
    # THIS IS JUST FOR EASIER TESTING IN DEVELOPMENT. Although it won't even
    # work in prod: you'll have to start chrome with --disable-web-security to
    # make this work. Override this this in your changes.conf.py file
    app.config['WEBAPP_USE_ANOTHER_HOST'] = None

    # Custom changes content unique to your deployment. This is intended to
    # customize the look and feel, provide contextual help and add custom links
    # to other internal tools. You should put your files in webapp/custom and
    # link them here.
    #
    # e.g. /acmecorp-changes/changes.js
    #
    # Some of the custom_content hooks can show images. Assume that the webserver
    # is willing to serve any file within the directory of the js file
    app.config['WEBAPP_CUSTOM_JS'] = None
    # This can be a .less file. We import it after the variables.less,
    # so you can override them in your file
    # Note: if you change this and nothing seems to happen, try deleting
    # webapp/.webassets-cache and bundled.css. This probably won't happen, though
    app.config['WEBAPP_CUSTOM_CSS'] = None

    # In minutes, the timeout applied to jobs without a timeout specified at build time.
    # A timeout should nearly always be specified; this is just a safeguard so that
    # unspecified timeout doesn't mean "is allowed to run indefinitely".
    app.config['DEFAULT_JOB_TIMEOUT_MIN'] = 60

    # Number of milliseconds a transaction can run before triggering a warning.
    app.config['TRANSACTION_MS_WARNING_THRESHOLD'] = 2500

    # Maximum number of jobsteps to retry for a given job
    app.config['JOBSTEP_RETRY_MAX'] = 1

    # we opt these users into the new ui...redirecting them if they
    # hit the homepage
    app.config['NEW_UI_OPTIN_USERS'] = set([])

    # the PHID of the user creating quarantine tasks. We can use this to show
    # the list of open quarantine tasks inline
    app.config['QUARANTINE_PHID'] = None

    app.config.update(config)
    if _read_config:
        if os.environ.get('CHANGES_CONF'):
            # CHANGES_CONF=/etc/changes.conf.py
            app.config.from_envvar('CHANGES_CONF')
        else:
            # Look for ~/.changes/changes.conf.py
            path = os.path.normpath(os.path.expanduser('~/.changes/changes.conf.py'))
            app.config.from_pyfile(path, silent=True)

    # default the DSN for changes-client to the server's DSN
    app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN'])

    if not app.config['BASE_URI']:
        raise ValueError('You must set ``BASE_URI`` in your configuration.')

    parsed_url = urlparse(app.config['BASE_URI'])
    app.config.setdefault('SERVER_NAME', parsed_url.netloc)
    app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme)

    if app.debug:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
    else:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30

    app.url_map.converters['uuid'] = UUIDConverter

    # now that config is set up, let's ensure the CUSTOM_JS / CUSTOM_CSS
    # variables are safe (within the changes directory) and convert them to
    # absolute paths
    if app.config['WEBAPP_CUSTOM_CSS']:
        app.config['WEBAPP_CUSTOM_CSS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_CSS'])

        enforce_is_subdir(
            app.config['WEBAPP_CUSTOM_CSS'],
            os.path.join(PROJECT_ROOT, 'webapp/custom'))

    if app.config['WEBAPP_CUSTOM_JS']:
        app.config['WEBAPP_CUSTOM_JS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_JS'])

        enforce_is_subdir(
            app.config['WEBAPP_CUSTOM_JS'],
            os.path.join(PROJECT_ROOT, 'webapp/custom'))

    # init sentry first
    sentry.init_app(app)

    @app.before_request
    def capture_user(*args, **kwargs):
        if 'uid' in session:
            sentry.client.user_context({
                'id': session['uid'],
                'email': session['email'],
            })

    api.init_app(app)
    db.init_app(app)
    mail.init_app(app)
    queue.init_app(app)
    redis.init_app(app)
    statsreporter.init_app(app)

    configure_debug_toolbar(app)

    from raven.contrib.celery import register_signal, register_logger_signal
    register_signal(sentry.client)
    register_logger_signal(sentry.client, loglevel=logging.WARNING)

    # configure debug routes first
    if app.debug:
        configure_debug_routes(app)

    configure_templates(app)

    # TODO: these can be moved to wsgi app entrypoints
    configure_api_routes(app)
    app_static_root = configure_web_routes(app)

    # blueprint for our new v2 webapp
    blueprint = create_v2_blueprint(app, app_static_root)
    app.register_blueprint(blueprint, url_prefix='/v2')

    configure_jobs(app)
    configure_transaction_logging(app)

    rules_file = app.config.get('CATEGORIZE_RULES_FILE')
    if rules_file:
        # Fail at startup if we have a bad rules file.
        categorize.load_rules(rules_file)

    return app
コード例 #4
0
ファイル: config.py プロジェクト: gregorynicholas/changes
def create_app(_read_config=True, **config):
    app = flask.Flask(__name__,
                      static_folder=None,
                      template_folder=os.path.join(PROJECT_ROOT, 'templates'))

    app.wsgi_app = ProxyFix(app.wsgi_app)
    # app.wsgi_app = TracerMiddleware(app.wsgi_app, app)

    # This key is insecure and you should override it on the server
    app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'

    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
    app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
    app.config['SQLALCHEMY_POOL_SIZE'] = 60
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
    # required for flask-debugtoolbar
    app.config['SQLALCHEMY_RECORD_QUERIES'] = True

    app.config['REDIS_URL'] = 'redis://localhost/0'
    app.config['DEBUG'] = True
    app.config['HTTP_PORT'] = 5000
    app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0

    app.config['API_TRACEBACKS'] = True

    # default snapshot ID to use when no project-specific active image available
    app.config['DEFAULT_SNAPSHOT'] = None
    app.config['SNAPSHOT_S3_BUCKET'] = None
    app.config['LXC_PRE_LAUNCH'] = None
    app.config['LXC_POST_LAUNCH'] = None

    app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json']
    app.config['CELERY_ACKS_LATE'] = True
    app.config['CELERY_BROKER_URL'] = 'redis://localhost/0'
    app.config['CELERY_DEFAULT_QUEUE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
    app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
    app.config['CELERY_DISABLE_RATE_LIMITS'] = True
    app.config['CELERY_IGNORE_RESULT'] = True
    app.config['CELERY_RESULT_BACKEND'] = None
    app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json'
    app.config['CELERY_SEND_EVENTS'] = False
    app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
    app.config['CELERY_TASK_SERIALIZER'] = 'changes_json'
    app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
    app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000

    # By default, Celery logs writes to stdout/stderr as WARNING, which
    # is a bit harsh considering that some of the code is code we don't
    # own calling 'print'. This flips the default back to INFO, which seems
    # more appropriate. Can be overridden by the Changes config.
    app.config['CELERY_REDIRECT_STDOUTS_LEVEL'] = 'INFO'

    app.config['CELERY_QUEUES'] = (
        Queue('job.sync', routing_key='job.sync'),
        Queue('job.create', routing_key='job.create'),
        Queue('celery', routing_key='celery'),
        Queue('events', routing_key='events'),
        Queue('default', routing_key='default'),
        Queue('repo.sync', Exchange('fanout', 'fanout'), routing_key='repo.sync'),
    )
    app.config['CELERY_ROUTES'] = {
        'create_job': {
            'queue': 'job.create',
            'routing_key': 'job.create',
        },
        'sync_job': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_job_step': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_build': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'check_repos': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'sync_repo': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'run_event_listener': {
            'queue': 'events',
            'routing_key': 'events',
        },
        'fire_signal': {
            'queue': 'events',
            'routing_key': 'events',
        },
    }

    app.config['EVENT_LISTENERS'] = (
        ('changes.listeners.mail.build_finished_handler', 'build.finished'),
        ('changes.listeners.green_build.build_finished_handler', 'build.finished'),
        ('changes.listeners.build_revision.revision_created_handler', 'revision.created'),
        ('changes.listeners.phabricator_listener.build_finished_handler', 'build.finished'),
        ('changes.listeners.analytics_notifier.build_finished_handler', 'build.finished'),
        ('changes.listeners.log_processing.job_finished_handler', 'job.finished'),
    )

    # restrict outbound notifications to the given domains
    app.config['MAIL_DOMAIN_WHITELIST'] = ()

    app.config['DEBUG_TB_ENABLED'] = True

    # celerybeat must be running for our cleanup tasks to execute
    # e.g. celery worker -B
    app.config['CELERYBEAT_SCHEDULE'] = {
        'cleanup-tasks': {
            'task': 'cleanup_tasks',
            'schedule': timedelta(minutes=1),
        },
        'check-repos': {
            'task': 'check_repos',
            'schedule': timedelta(minutes=2),
        },
    }
    app.config['CELERY_TIMEZONE'] = 'UTC'

    app.config['SENTRY_DSN'] = None
    app.config['SENTRY_INCLUDE_PATHS'] = [
        'changes',
    ]

    app.config['JENKINS_AUTH'] = None
    app.config['JENKINS_URL'] = None
    app.config['JENKINS_TOKEN'] = None
    app.config['JENKINS_CLUSTERS'] = {}

    app.config['KOALITY_URL'] = None
    app.config['KOALITY_API_KEY'] = None

    app.config['GOOGLE_CLIENT_ID'] = None
    app.config['GOOGLE_CLIENT_SECRET'] = None
    app.config['GOOGLE_DOMAIN'] = None

    app.config['REPO_ROOT'] = None

    app.config['DEFAULT_FILE_STORAGE'] = 'changes.storage.s3.S3FileStorage'
    app.config['S3_ACCESS_KEY'] = None
    app.config['S3_SECRET_KEY'] = None
    app.config['S3_BUCKET'] = None

    app.config['PHABRICATOR_HOST'] = None
    app.config['PHABRICATOR_USERNAME'] = None
    app.config['PHABRICATOR_CERT'] = None

    app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost'
    app.config['BASE_URI'] = 'http://localhost:5000'

    # In minutes, the timeout applied to jobs without a timeout specified at build time.
    # A timeout should nearly always be specified; this is just a safeguard so that
    # unspecified timeout doesn't mean "is allowed to run indefinitely".
    app.config['DEFAULT_JOB_TIMEOUT_MIN'] = 60

    app.config.update(config)
    if _read_config:
        if os.environ.get('CHANGES_CONF'):
            # CHANGES_CONF=/etc/changes.conf.py
            app.config.from_envvar('CHANGES_CONF')
        else:
            # Look for ~/.changes/changes.conf.py
            path = os.path.normpath(os.path.expanduser('~/.changes/changes.conf.py'))
            app.config.from_pyfile(path, silent=True)

    # default the DSN for changes-client to the server's DSN
    app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN'])

    if not app.config['BASE_URI']:
        raise ValueError('You must set ``BASE_URI`` in your configuration.')

    parsed_url = urlparse(app.config['BASE_URI'])
    app.config.setdefault('SERVER_NAME', parsed_url.netloc)
    app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme)

    if app.debug:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
    else:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30

    app.url_map.converters['uuid'] = UUIDConverter

    # init sentry first
    sentry.init_app(app)

    @app.before_request
    def capture_user(*args, **kwargs):
        if 'uid' in session:
            sentry.client.user_context({
                'id': session['uid'],
                'email': session['email'],
            })

    api.init_app(app)
    db.init_app(app)
    mail.init_app(app)
    queue.init_app(app)
    redis.init_app(app)
    statsreporter.init_app(app)

    configure_debug_toolbar(app)

    from raven.contrib.celery import register_signal, register_logger_signal
    register_signal(sentry.client)
    register_logger_signal(sentry.client, loglevel=logging.WARNING)

    # configure debug routes first
    if app.debug:
        configure_debug_routes(app)

    configure_templates(app)

    # TODO: these can be moved to wsgi app entrypoints
    configure_api_routes(app)
    configure_web_routes(app)

    configure_jobs(app)

    rules_file = app.config.get('CATEGORIZE_RULES_FILE')
    if rules_file:
        # Fail at startup if we have a bad rules file.
        categorize.load_rules(rules_file)

    return app
コード例 #5
0
ファイル: config.py プロジェクト: jhance/changes
def create_app(_read_config=True, **config):
    app = flask.Flask(__name__,
                      static_folder=None,
                      template_folder=os.path.join(PROJECT_ROOT, 'templates'))

    # app.wsgi_app = TracerMiddleware(app.wsgi_app, app)

    # This key is insecure and you should override it on the server
    app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'

    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
    app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
    app.config['SQLALCHEMY_POOL_SIZE'] = 60
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
    # required for flask-debugtoolbar and the db perf metrics we record
    app.config['SQLALCHEMY_RECORD_QUERIES'] = True

    app.config['REDIS_URL'] = 'redis://localhost/0'
    app.config['DEBUG'] = True
    app.config['HTTP_PORT'] = 5000
    app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0

    app.config['API_TRACEBACKS'] = True

    # Expiration delay between when a snapshot image becomes superceded and when
    # it becomes truly expired (and thus no longer included in the sync information
    # for any cluster that runs that particular image's plan)
    app.config['CACHED_SNAPSHOT_EXPIRATION_DELTA'] = timedelta(hours=1)

    # default snapshot ID to use when no project-specific active image available
    app.config['DEFAULT_SNAPSHOT'] = None
    app.config['SNAPSHOT_S3_BUCKET'] = None
    app.config['LXC_PRE_LAUNCH'] = None
    app.config['LXC_POST_LAUNCH'] = None

    # APT mirror URLs to use for new LXC containers created by changes-client.
    # NB: these aren't currently supported in the public changes-client repo.
    app.config['LXC_APT_MIRROR'] = None
    app.config['LXC_APT_SECURITY_MIRROR'] = None

    # name of the template to use for LXC (usually the name of a particular
    # Linux distro). Defaults to ubuntu.
    app.config['LXC_TEMPLATE'] = 'ubuntu'

    # Location of artifacts server that is passed to changes-client
    # (include http:// or https://)
    #
    # The default artifact server url uses a random uri which is expected to fail
    # without being overridden. This value is referenced in test code.
    app.config['ARTIFACTS_SERVER'] = 'http://localhost:1234'

    # the binary to use for running changes-client. Default is just
    # "changes-client", but can also be specified as e.g. a full path.
    app.config['CHANGES_CLIENT_BINARY'] = 'changes-client'

    app.config['CHANGES_CLIENT_DEFAULT_BUILD_TYPE'] = 'legacy'

    # Base URI to use for git repos that we want to clone (currently only used
    # for the "other_repos" buildstep config). The repo name is appended
    # directly to this, so it should already contain necessary colons and
    # slashes, etc. For example, if GIT_DEFAULT_BASE_URI is `[email protected]:`
    # and a repo is specified as `changes.git`, the clone url will be
    # `[email protected]:changes.git`
    app.config['GIT_DEFAULT_BASE_URI'] = None
    # Same as GIT_DEFAULT_BASE_URI but used for mercurial repos.
    app.config['MERCURIAL_DEFAULT_BASE_URI'] = None

    # This is a hash from each build type (string identifiers used in
    # build step configuration) to a "build spec", a definition of
    # how to use changes-client to build. To use changes-client, the key
    # 'uses_client' must be set to True.
    #
    # Required build spec keys for client:
    #   adapter -> basic or lxc
    #   jenkins-command -> command to run from jenkins directly ($JENKINS_COMMAND)
    #   commands -> array of hash from script -> string that represents a script
    #
    # Optional keys (lxc-only)
    #   pre-launch -> lxc pre-launch script
    #   post-launch -> lxc post-launch script
    #   release -> lxc release
    app.config['CHANGES_CLIENT_BUILD_TYPES'] = {
        'legacy': {'uses_client': False},
    }

    app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json']
    app.config['CELERY_ACKS_LATE'] = True
    app.config['CELERY_BROKER_URL'] = 'redis://localhost/0'
    app.config['CELERY_DEFAULT_QUEUE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
    app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
    app.config['CELERY_DISABLE_RATE_LIMITS'] = True
    app.config['CELERY_IGNORE_RESULT'] = True
    app.config['CELERY_RESULT_BACKEND'] = None
    app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json'
    app.config['CELERY_SEND_EVENTS'] = False
    app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
    app.config['CELERY_TASK_SERIALIZER'] = 'changes_json'
    app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
    app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000

    # By default, Celery logs writes to stdout/stderr as WARNING, which
    # is a bit harsh considering that some of the code is code we don't
    # own calling 'print'. This flips the default back to INFO, which seems
    # more appropriate. Can be overridden by the Changes config.
    app.config['CELERY_REDIRECT_STDOUTS_LEVEL'] = 'INFO'

    app.config['CELERY_QUEUES'] = (
        Queue('job.sync', routing_key='job.sync'),
        Queue('job.create', routing_key='job.create'),
        Queue('celery', routing_key='celery'),
        Queue('events', routing_key='events'),
        Queue('default', routing_key='default'),
        Queue('repo.sync', Exchange('fanout', 'fanout'), routing_key='repo.sync'),
        Broadcast('repo.update'),
    )
    app.config['CELERY_ROUTES'] = {
        'create_job': {
            'queue': 'job.create',
            'routing_key': 'job.create',
        },
        'sync_job': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_job_step': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_build': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'check_repos': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'sync_repo': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'run_event_listener': {
            'queue': 'events',
            'routing_key': 'events',
        },
        'fire_signal': {
            'queue': 'events',
            'routing_key': 'events',
        },
        'update_local_repos': {
            'queue': 'repo.update',
        }
    }

    app.config['EVENT_LISTENERS'] = (
        ('changes.listeners.mail.build_finished_handler', 'build.finished'),
        ('changes.listeners.green_build.build_finished_handler', 'build.finished'),
        ('changes.listeners.build_revision.revision_created_handler', 'revision.created'),
        ('changes.listeners.build_finished_notifier.build_finished_handler', 'build.finished'),
        ('changes.listeners.phabricator_listener.build_finished_handler', 'build.finished'),
        ('changes.listeners.analytics_notifier.build_finished_handler', 'build.finished'),
        ('changes.listeners.analytics_notifier.job_finished_handler', 'job.finished'),
        ('changes.listeners.stats_notifier.build_finished_handler', 'build.finished'),
        ('changes.listeners.snapshot_build.build_finished_handler', 'build.finished'),
    )

    # restrict outbound notifications to the given domains
    app.config['MAIL_DOMAIN_WHITELIST'] = ()

    app.config['DEBUG_TB_ENABLED'] = True

    # celerybeat must be running for our cleanup tasks to execute
    # e.g. celery worker -B
    app.config['CELERYBEAT_SCHEDULE'] = {
        'cleanup-tasks': {
            'task': 'cleanup_tasks',
            'schedule': timedelta(minutes=1),
        },
        'check-repos': {
            'task': 'check_repos',
            'schedule': timedelta(minutes=2),
        },
        'aggregate-flaky-tests': {
            'task': 'aggregate_flaky_tests',
            # Hour 7 GMT is midnight PST, hopefully a time of low load
            'schedule': crontab(hour=7, minute=0),
        },
        'delete-old-data': {
            'task': 'delete_old_data',
            'schedule': timedelta(minutes=60),
        },
        'update-local-repos': {
            'task': 'update_local_repos',
            'schedule': timedelta(minutes=1),
        }
    }
    app.config['CELERY_TIMEZONE'] = 'UTC'

    app.config['SENTRY_DSN'] = None
    app.config['SENTRY_INCLUDE_PATHS'] = [
        'changes',
    ]

    app.config['KOALITY_URL'] = None
    app.config['KOALITY_API_KEY'] = None

    app.config['GOOGLE_CLIENT_ID'] = None
    app.config['GOOGLE_CLIENT_SECRET'] = None
    app.config['GOOGLE_DOMAIN'] = None

    # must be a URL-safe base64-encoded 32-byte key
    app.config['COOKIE_ENCRYPTION_KEY'] = 'theDefaultKeyIs32BytesLongAndTotallyURLSafe='

    app.config['REPO_ROOT'] = None

    app.config['DEFAULT_FILE_STORAGE'] = 'changes.storage.s3.S3FileStorage'
    app.config['S3_ACCESS_KEY'] = None
    app.config['S3_SECRET_KEY'] = None
    app.config['S3_BUCKET'] = None

    app.config['PHABRICATOR_LINK_HOST'] = None
    app.config['PHABRICATOR_API_HOST'] = None
    app.config['PHABRICATOR_USERNAME'] = None
    app.config['PHABRICATOR_CERT'] = None

    # Configuration to access Zookeeper - currently used to discover mesos master leader instance
    # E.g., if mesos master is configured to talk to zk://zk1:2181,zk2:2181/mesos,
    # set ZOOKEEPER_HOSTS = 'zk1:2181,zk2:2181'
    #     ZOOKEEPER_MESOS_MASTER_PATH = '/mesos'
    #
    # This is only used to control mesos slave offline/online status from within Changes

    # Comma-separated list of host:port (or ip:port) to Zookeeper instances.
    app.config['ZOOKEEPER_HOSTS'] = 'zk:2181'
    # Namespace within zookeeper where mesos master election is performed.
    app.config['ZOOKEEPER_MESOS_MASTER_PATH'] = '/mesos'

    # List of valid tables to be written to when reporting project analytics.
    # Analytics artifacts targeting tables not listed here will be considered invalid.
    app.config['ANALYTICS_PROJECT_TABLES'] = []
    # URL any project analytics JSON entries will be posted to.
    # Entries will be posted as JSON, with the intended table specified as 'source' in the URL params.
    app.config['ANALYTICS_PROJECT_POST_URL'] = None

    app.config['SUPPORT_CONTACT'] = 'support'

    app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost'
    app.config['BASE_URI'] = 'http://localhost:5000'

    # if set to a string, most (all?) of the frontend js will make API calls
    # to the host this string is set to (e.g. http://changes.bigcompany.com)
    # THIS IS JUST FOR EASIER TESTING IN DEVELOPMENT. Although it won't even
    # work in prod: you'll have to start chrome with --disable-web-security to
    # make this work. Override this in your changes.conf.py file
    app.config['WEBAPP_USE_ANOTHER_HOST'] = None

    # Custom changes content unique to your deployment. This is intended to
    # customize the look and feel, provide contextual help and add custom links
    # to other internal tools. You should put your files in webapp/custom and
    # link them here.
    #
    # e.g. /acmecorp-changes/changes.js
    #
    # Some of the custom_content hooks can show images. Assume that the webserver
    # is willing to serve any file within the directory of the js file
    app.config['WEBAPP_CUSTOM_JS'] = None
    # This can be a .less file. We import it after the variables.less,
    # so you can override them in your file
    # Note: if you change this and nothing seems to happen, try deleting
    # webapp/.webassets-cache and bundled.css. This probably won't happen, though
    # If not specified, we will search for CUSTOM_CSS_FILE in the custom dir.
    app.config['WEBAPP_CUSTOM_CSS'] = None

    # In minutes, the timeout applied to jobs without a timeout specified at build time.
    # A timeout should nearly always be specified; this is just a safeguard so that
    # unspecified timeout doesn't mean "is allowed to run indefinitely".
    app.config['DEFAULT_JOB_TIMEOUT_MIN'] = 60

    # Number of milliseconds a transaction can run before triggering a warning.
    app.config['TRANSACTION_MS_WARNING_THRESHOLD'] = 2500

    # Hard maximum number of jobsteps to retry for a given job
    app.config['JOBSTEP_RETRY_MAX'] = 6
    # Maximum number of machines that we'll retry jobsteps for. This allows us
    # to retry more jobsteps if it's always the same machine failing.
    app.config['JOBSTEP_MACHINE_RETRY_MAX'] = 2

    # the PHID of the user creating quarantine tasks. We can use this to show
    # the list of open quarantine tasks inline
    app.config['QUARANTINE_PHID'] = None

    # The max length a test's output to be stored. If it is longer, the it will
    # be truncated.
    app.config['TEST_MESSAGE_MAX_LEN'] = 64 * 1024

    # sources.list entry, format is:
    # deb uri distribution [component1] [component2] [...]
    # Used for bazel setup, so source should have bazel package.
    app.config['APT_SPEC'] = None

    # rsync source for encap
    # Example: rsync://example.com/encap/
    app.config['ENCAP_RSYNC_URL'] = None

    # In some configurations, build slaves might not have access to the Changes API via the
    # normal address; if PATCH_BASE_URI is specified, it'll be used as the base URI for
    # PATCH_URI variables provided to build slaves.
    app.config['PATCH_BASE_URI'] = None

    # name of default cluster to use for autogenerated jobs
    app.config['DEFAULT_CLUSTER'] = None

    app.config.update(config)
    if _read_config:
        if os.environ.get('CHANGES_CONF'):
            # CHANGES_CONF=/etc/changes.conf.py
            app.config.from_envvar('CHANGES_CONF')
        else:
            # Look for ~/.changes/changes.conf.py
            path = os.path.normpath(os.path.expanduser('~/.changes/changes.conf.py'))
            app.config.from_pyfile(path, silent=True)

    # default the DSN for changes-client to the server's DSN
    app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN'])

    # Backwards compatibility with old configs containing BASE_URI
    if 'WEB_BASE_URI' not in app.config and 'BASE_URI' in app.config:
        app.config['WEB_BASE_URI'] = app.config['BASE_URI']
    if 'INTERNAL_BASE_URI' not in app.config and 'BASE_URI' in app.config:
        app.config['INTERNAL_BASE_URI'] = app.config['BASE_URI']

    parsed_url = urlparse(app.config['WEB_BASE_URI'])
    app.config.setdefault('PREFERRED_URL_SCHEME', 'https')

    if app.debug:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
    else:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30

    app.url_map.converters['uuid'] = UUIDConverter

    # now that config is set up, let's ensure the CUSTOM_JS / CUSTOM_CSS
    # variables are safe (within the changes directory) and convert them to
    # absolute paths
    if app.config['WEBAPP_CUSTOM_CSS']:
        app.config['WEBAPP_CUSTOM_CSS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_CSS'])

        enforce_is_subdir(
            app.config['WEBAPP_CUSTOM_CSS'],
            os.path.join(PROJECT_ROOT, 'webapp/custom'))
    else:
        app.config['WEBAPP_CUSTOM_CSS'] = _find_custom_css()

    if app.config['WEBAPP_CUSTOM_JS']:
        app.config['WEBAPP_CUSTOM_JS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_JS'])

        enforce_is_subdir(
            app.config['WEBAPP_CUSTOM_JS'],
            os.path.join(PROJECT_ROOT, 'webapp/custom'))

    # init sentry first
    sentry.init_app(app)

    @app.before_request
    def capture_user(*args, **kwargs):
        from changes.api.auth import get_current_user
        user = get_current_user()
        if user is not None:
            sentry.client.user_context({
                'id': user.id,
                'email': user.email,
            })

    api.init_app(app)
    db.init_app(app)
    mail.init_app(app)
    queue.init_app(app)
    redis.init_app(app)
    statsreporter.init_app(app)

    configure_debug_toolbar(app)

    from raven.contrib.celery import register_signal, register_logger_signal
    register_signal(sentry.client)
    register_logger_signal(sentry.client, loglevel=logging.WARNING)

    # configure debug routes first
    if app.debug:
        configure_debug_routes(app)

    configure_templates(app)

    # TODO: these can be moved to wsgi app entrypoints
    configure_api_routes(app)
    configure_web_routes(app)

    configure_jobs(app)
    configure_transaction_logging(app)

    rules_file = app.config.get('CATEGORIZE_RULES_FILE')
    if rules_file:
        # Fail at startup if we have a bad rules file.
        categorize.load_rules(rules_file)

    import jinja2
    webapp_template_folder = os.path.join(PROJECT_ROOT, 'webapp/html')
    template_folder = os.path.join(PROJECT_ROOT, 'templates')
    template_loader = jinja2.ChoiceLoader([
                app.jinja_loader,
                jinja2.FileSystemLoader([webapp_template_folder, template_folder])
                ])
    app.jinja_loader = template_loader

    return app
コード例 #6
0
def create_app(_read_config=True, **config):
    app = flask.Flask(__name__,
                      static_folder=None,
                      template_folder=os.path.join(PROJECT_ROOT, 'templates'))

    app.wsgi_app = ProxyFix(app.wsgi_app)
    # app.wsgi_app = TracerMiddleware(app.wsgi_app, app)

    # This key is insecure and you should override it on the server
    app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'

    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
    app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
    app.config['SQLALCHEMY_POOL_SIZE'] = 60
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
    # required for flask-debugtoolbar and the db perf metrics we record
    app.config['SQLALCHEMY_RECORD_QUERIES'] = True

    app.config['REDIS_URL'] = 'redis://localhost/0'
    app.config['DEBUG'] = True
    app.config['HTTP_PORT'] = 5000
    app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0

    app.config['API_TRACEBACKS'] = True

    # Expiration delay between when a snapshot image becomes superceded and when
    # it becomes truly expired (and thus no longer included in the sync information
    # for any cluster that runs that particular image's plan)
    app.config['CACHED_SNAPSHOT_EXPIRATION_DELTA'] = timedelta(hours=1)

    # default snapshot ID to use when no project-specific active image available
    app.config['DEFAULT_SNAPSHOT'] = None
    app.config['SNAPSHOT_S3_BUCKET'] = None
    app.config['LXC_PRE_LAUNCH'] = None
    app.config['LXC_POST_LAUNCH'] = None

    # Location of artifacts server that is passed to changes-client
    # (include http:// or https://)
    #
    # The default artifact server url uses a random uri which is expected to fail
    # without being overridden. This value is referenced in test code.
    app.config['ARTIFACTS_SERVER'] = 'http://*****:*****@localhost'
    app.config['BASE_URI'] = 'http://localhost:5000'

    # if set to a string, most (all?) of the frontend js will make API calls
    # to the host this string is set to (e.g. http://changes.bigcompany.com)
    # THIS IS JUST FOR EASIER TESTING IN DEVELOPMENT. Although it won't even
    # work in prod: you'll have to start chrome with --disable-web-security to
    # make this work. Override this this in your changes.conf.py file
    app.config['WEBAPP_USE_ANOTHER_HOST'] = None

    # Custom changes content unique to your deployment. This is intended to
    # customize the look and feel, provide contextual help and add custom links
    # to other internal tools. You should put your files in webapp/custom and
    # link them here.
    #
    # e.g. /acmecorp-changes/changes.js
    #
    # Some of the custom_content hooks can show images. Assume that the webserver
    # is willing to serve any file within the directory of the js file
    app.config['WEBAPP_CUSTOM_JS'] = None
    # This can be a .less file. We import it after the variables.less,
    # so you can override them in your file
    # Note: if you change this and nothing seems to happen, try deleting
    # webapp/.webassets-cache and bundled.css. This probably won't happen, though
    app.config['WEBAPP_CUSTOM_CSS'] = None

    # In minutes, the timeout applied to jobs without a timeout specified at build time.
    # A timeout should nearly always be specified; this is just a safeguard so that
    # unspecified timeout doesn't mean "is allowed to run indefinitely".
    app.config['DEFAULT_JOB_TIMEOUT_MIN'] = 60

    # Number of milliseconds a transaction can run before triggering a warning.
    app.config['TRANSACTION_MS_WARNING_THRESHOLD'] = 2500

    # Maximum number of jobsteps to retry for a given job
    app.config['JOBSTEP_RETRY_MAX'] = 2

    # we opt these users into the new ui...redirecting them if they
    # hit the homepage
    app.config['NEW_UI_OPTIN_USERS'] = set([])

    # the PHID of the user creating quarantine tasks. We can use this to show
    # the list of open quarantine tasks inline
    app.config['QUARANTINE_PHID'] = None

    # The max length a test's output to be stored. If it is longer, the it will
    # be truncated.
    app.config['TEST_MESSAGE_MAX_LEN'] = 64 * 1024

    app.config['USE_OLD_UI'] = False

    app.config.update(config)
    if _read_config:
        if os.environ.get('CHANGES_CONF'):
            # CHANGES_CONF=/etc/changes.conf.py
            app.config.from_envvar('CHANGES_CONF')
        else:
            # Look for ~/.changes/changes.conf.py
            path = os.path.normpath(os.path.expanduser('~/.changes/changes.conf.py'))
            app.config.from_pyfile(path, silent=True)

    # default the DSN for changes-client to the server's DSN
    app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN'])

    if not app.config['BASE_URI']:
        raise ValueError('You must set ``BASE_URI`` in your configuration.')

    parsed_url = urlparse(app.config['BASE_URI'])
    app.config.setdefault('SERVER_NAME', parsed_url.netloc)
    app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme)

    if app.debug:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
    else:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30

    app.url_map.converters['uuid'] = UUIDConverter

    # now that config is set up, let's ensure the CUSTOM_JS / CUSTOM_CSS
    # variables are safe (within the changes directory) and convert them to
    # absolute paths
    if app.config['WEBAPP_CUSTOM_CSS']:
        app.config['WEBAPP_CUSTOM_CSS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_CSS'])

        enforce_is_subdir(
            app.config['WEBAPP_CUSTOM_CSS'],
            os.path.join(PROJECT_ROOT, 'webapp/custom'))

    if app.config['WEBAPP_CUSTOM_JS']:
        app.config['WEBAPP_CUSTOM_JS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_JS'])

        enforce_is_subdir(
            app.config['WEBAPP_CUSTOM_JS'],
            os.path.join(PROJECT_ROOT, 'webapp/custom'))

    # init sentry first
    sentry.init_app(app)

    @app.before_request
    def capture_user(*args, **kwargs):
        if 'uid' in session:
            sentry.client.user_context({
                'id': session['uid'],
                'email': session['email'],
            })

    api.init_app(app)
    db.init_app(app)
    mail.init_app(app)
    queue.init_app(app)
    redis.init_app(app)
    statsreporter.init_app(app)

    configure_debug_toolbar(app)

    from raven.contrib.celery import register_signal, register_logger_signal
    register_signal(sentry.client)
    register_logger_signal(sentry.client, loglevel=logging.WARNING)

    # configure debug routes first
    if app.debug:
        configure_debug_routes(app)

    configure_templates(app)

    # TODO: these can be moved to wsgi app entrypoints
    configure_api_routes(app)
    configure_web_routes(app)

    configure_jobs(app)
    configure_transaction_logging(app)

    rules_file = app.config.get('CATEGORIZE_RULES_FILE')
    if rules_file:
        # Fail at startup if we have a bad rules file.
        categorize.load_rules(rules_file)

    import jinja2
    webapp_template_folder = os.path.join(PROJECT_ROOT, 'webapp/html')
    template_folder = os.path.join(PROJECT_ROOT, 'templates')
    template_loader = jinja2.ChoiceLoader([
                app.jinja_loader,
                jinja2.FileSystemLoader([webapp_template_folder, template_folder])
                ])
    app.jinja_loader = template_loader

    return app
コード例 #7
0
def create_app(_read_config=True, **config):
    app = flask.Flask(__name__,
                      static_folder=None,
                      template_folder=os.path.join(PROJECT_ROOT, 'templates'))

    # app.wsgi_app = TracerMiddleware(app.wsgi_app, app)

    # This key is insecure and you should override it on the server
    app.config[
        'SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'

    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
    app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
    app.config['SQLALCHEMY_POOL_SIZE'] = 60
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
    # required for flask-debugtoolbar and the db perf metrics we record
    app.config['SQLALCHEMY_RECORD_QUERIES'] = True

    app.config['REDIS_URL'] = 'redis://localhost/0'
    app.config['GROUPER_API_URL'] = 'https://localhost/'
    app.config['GROUPER_PERMISSIONS_ADMIN'] = 'changes.prod.admin'
    app.config[
        'GROUPER_PERMISSIONS_PROJECT_ADMIN'] = 'changes.prod.project.admin'
    app.config['GROUPER_EXCLUDED_ROLES'] = ['np-owner']
    app.config['DEBUG'] = True
    app.config['HTTP_PORT'] = 5000
    app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0

    app.config['BAZEL_ARTIFACT_SUFFIX'] = '.bazel'

    app.config['BAZEL_TEST_OUTPUT_RELATIVE_PATH'] = 'bazel-testlogs/'

    app.config['API_TRACEBACKS'] = True

    # Expiration delay between when a snapshot image becomes superceded and when
    # it becomes truly expired (and thus no longer included in the sync information
    # for any cluster that runs that particular image's plan)
    app.config['CACHED_SNAPSHOT_EXPIRATION_DELTA'] = timedelta(hours=1)

    # default snapshot ID to use when no project-specific active image available
    app.config['DEFAULT_SNAPSHOT'] = None
    app.config['SNAPSHOT_S3_BUCKET'] = None
    app.config['LXC_PRE_LAUNCH'] = None
    app.config['LXC_POST_LAUNCH'] = None

    # APT mirror URLs to use for new LXC containers created by changes-client.
    # NB: these aren't currently supported in the public changes-client repo.
    app.config['LXC_APT_MIRROR'] = None
    app.config['LXC_APT_SECURITY_MIRROR'] = None

    # name of the template to use for LXC (usually the name of a particular
    # Linux distro). Defaults to ubuntu.
    app.config['LXC_TEMPLATE'] = 'ubuntu'

    # Location of artifacts server that is passed to changes-client
    # (include http:// or https://)
    #
    # The default artifact server url uses a random uri which is expected to fail
    # without being overridden. This value is referenced in test code.
    app.config['ARTIFACTS_SERVER'] = 'http://localhost:1234'

    # The default max artifact size handlers should be capable of processing.
    app.config['MAX_ARTIFACT_BYTES'] = 200 * 1024 * 1024
    # The max artifact size the analytics json handler should be capable of processing.
    app.config['MAX_ARTIFACT_BYTES_ANALYTICS_JSON'] = 70 * 1024 * 1024

    # the binary to use for running changes-client. Default is just
    # "changes-client", but can also be specified as e.g. a full path.
    app.config['CHANGES_CLIENT_BINARY'] = 'changes-client'

    app.config['CHANGES_CLIENT_DEFAULT_BUILD_TYPE'] = 'legacy'

    # Base URI to use for git repos that we want to clone (currently only used
    # for the "other_repos" buildstep config). The repo name is appended
    # directly to this, so it should already contain necessary colons and
    # slashes, etc. For example, if GIT_DEFAULT_BASE_URI is `[email protected]:`
    # and a repo is specified as `changes.git`, the clone url will be
    # `[email protected]:changes.git`
    app.config['GIT_DEFAULT_BASE_URI'] = None
    # Same as GIT_DEFAULT_BASE_URI but used for mercurial repos.
    app.config['MERCURIAL_DEFAULT_BASE_URI'] = None

    # This is a hash from each build type (string identifiers used in
    # build step configuration) to a "build spec", a definition of
    # how to use changes-client to build. To use changes-client, the key
    # 'uses_client' must be set to True.
    #
    # Required build spec keys for client:
    #   adapter -> basic or lxc
    #   jenkins-command -> command to run from jenkins directly ($JENKINS_COMMAND)
    #   commands -> array of hash from script -> string that represents a script
    #
    # Optional keys (lxc-only)
    #   pre-launch -> lxc pre-launch script
    #   post-launch -> lxc post-launch script
    #   release -> lxc release
    app.config['CHANGES_CLIENT_BUILD_TYPES'] = {
        'legacy': {
            'uses_client': False
        },
    }

    app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json']
    app.config['CELERY_ACKS_LATE'] = True
    app.config['CELERY_BROKER_URL'] = 'redis://localhost/0'
    app.config['CELERY_DEFAULT_QUEUE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
    app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
    app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
    app.config['CELERY_DISABLE_RATE_LIMITS'] = True
    app.config['CELERY_IGNORE_RESULT'] = True
    app.config['CELERY_RESULT_BACKEND'] = None
    app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json'
    app.config['CELERY_SEND_EVENTS'] = False
    app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
    app.config['CELERY_TASK_SERIALIZER'] = 'changes_json'
    app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
    app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000

    # By default, Celery logs writes to stdout/stderr as WARNING, which
    # is a bit harsh considering that some of the code is code we don't
    # own calling 'print'. This flips the default back to INFO, which seems
    # more appropriate. Can be overridden by the Changes config.
    app.config['CELERY_REDIRECT_STDOUTS_LEVEL'] = 'INFO'

    app.config['CELERY_QUEUES'] = (
        Queue('job.sync', routing_key='job.sync'),
        Queue('job.create', routing_key='job.create'),
        Queue('celery', routing_key='celery'),
        Queue('events', routing_key='events'),
        Queue('default', routing_key='default'),
        Queue('delete', routing_key='delete'),
        Queue('repo.sync',
              Exchange('fanout', 'fanout'),
              routing_key='repo.sync'),
        Queue('grouper.sync', routing_key='grouper.sync'),
        Broadcast('repo.update'),
    )
    app.config['CELERY_ROUTES'] = {
        'create_job': {
            'queue': 'job.create',
            'routing_key': 'job.create',
        },
        'sync_job': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_job_step': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'sync_build': {
            'queue': 'job.sync',
            'routing_key': 'job.sync',
        },
        'check_repos': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'sync_grouper': {
            'queue': 'grouper.sync',
            'routing_key': 'grouper.sync',
        },
        'sync_repo': {
            'queue': 'repo.sync',
            'routing_key': 'repo.sync',
        },
        'run_event_listener': {
            'queue': 'events',
            'routing_key': 'events',
        },
        'fire_signal': {
            'queue': 'events',
            'routing_key': 'events',
        },
        'update_local_repos': {
            'queue': 'repo.update',
        },
        'delete_old_data': {
            'queue': 'delete',
            'routing_key': 'delete',
        },
        'delete_old_data_10m': {
            'queue': 'delete',
            'routing_key': 'delete',
        },
        'delete_old_data_5h_delayed': {
            'queue': 'delete',
            'routing_key': 'delete',
        },
    }

    app.config['EVENT_LISTENERS'] = (
        ('changes.listeners.mail.build_finished_handler', 'build.finished'),
        ('changes.listeners.green_build.revision_result_updated_handler',
         'revision_result.updated'),
        ('changes.listeners.build_revision.revision_created_handler',
         'revision.created'),
        ('changes.listeners.build_finished_notifier.build_finished_handler',
         'build.finished'),
        ('changes.listeners.phabricator_listener.build_finished_handler',
         'build.finished'),
        ('changes.listeners.analytics_notifier.build_finished_handler',
         'build.finished'),
        ('changes.listeners.analytics_notifier.job_finished_handler',
         'job.finished'),
        ('changes.listeners.revision_result.revision_result_build_finished_handler',
         'build.finished'),
        ('changes.listeners.stats_notifier.build_finished_handler',
         'build.finished'),
        ('changes.listeners.snapshot_build.build_finished_handler',
         'build.finished'),
    )

    # restrict outbound notifications to the given domains
    app.config['MAIL_DOMAIN_WHITELIST'] = ()

    app.config['DEBUG_TB_ENABLED'] = True

    app.config['DEBUG_TB_PANELS'] = (
        'flask_debugtoolbar.panels.versions.VersionDebugPanel',
        'flask_debugtoolbar.panels.timer.TimerDebugPanel',
        'flask_debugtoolbar.panels.headers.HeaderDebugPanel',
        'flask_debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
        # Disable the config vars panel by default; it can contain sensitive information.
        # 'flask_debugtoolbar.panels.config_vars.ConfigVarsDebugPanel',
        'flask_debugtoolbar.panels.template.TemplateDebugPanel',
        'flask_debugtoolbar.panels.sqlalchemy.SQLAlchemyDebugPanel',
        'flask_debugtoolbar.panels.logger.LoggingPanel',
        'flask_debugtoolbar.panels.profiler.ProfilerDebugPanel')

    # celerybeat must be running for our cleanup tasks to execute
    # e.g. celery worker -B
    app.config['CELERYBEAT_SCHEDULE'] = {
        'cleanup-tasks': {
            'task': 'cleanup_tasks',
            'schedule': timedelta(minutes=1),
        },
        'check-repos': {
            'task': 'check_repos',
            'schedule': timedelta(minutes=2),
        },
        'sync-grouper': {
            'task': 'sync_grouper',
            'schedule': timedelta(minutes=1),
        },
        'aggregate-flaky-tests': {
            'task': 'aggregate_flaky_tests',
            # Hour 7 GMT is midnight PST, hopefully a time of low load
            'schedule': crontab(hour=7, minute=0),
        },
        'delete-old-data-10m': {
            'task': 'delete_old_data_10m',
            'schedule': timedelta(minutes=10),
        },
        'delete-old-data-5h-delayed': {
            'task': 'delete_old_data_5h_delayed',
            # This task runs every 4 hours but looks at 5 hours worth of tests
            # so consecutive runs will look at sets of tests that will overlap.
            # This is to make it unlikely to miss tests in between.
            #
            # While this is looking at 5 hours worth of tests, this should not be long running
            # as the shorter delete tasks will catch most cases and this checks
            # a time frame that should've been cleaned by them already.
            'schedule': crontab(hour='*/4'),
        },
        'update-local-repos': {
            'task': 'update_local_repos',
            'schedule': timedelta(minutes=1),
        }
    }
    app.config['CELERY_TIMEZONE'] = 'UTC'

    app.config['SENTRY_DSN'] = None
    app.config['SENTRY_INCLUDE_PATHS'] = [
        'changes',
    ]

    app.config['KOALITY_URL'] = None
    app.config['KOALITY_API_KEY'] = None

    app.config['GOOGLE_CLIENT_ID'] = None
    app.config['GOOGLE_CLIENT_SECRET'] = None
    app.config['GOOGLE_DOMAIN'] = None

    # must be a URL-safe base64-encoded 32-byte key
    app.config[
        'COOKIE_ENCRYPTION_KEY'] = 'theDefaultKeyIs32BytesLongAndTotallyURLSafe='

    app.config['REPO_ROOT'] = None

    app.config['DEFAULT_FILE_STORAGE'] = 'changes.storage.s3.S3FileStorage'
    app.config['S3_ACCESS_KEY'] = None
    app.config['S3_SECRET_KEY'] = None
    app.config['S3_BUCKET'] = None

    app.config['PHABRICATOR_LINK_HOST'] = None
    app.config['PHABRICATOR_API_HOST'] = None
    app.config['PHABRICATOR_USERNAME'] = None
    app.config['PHABRICATOR_CERT'] = None

    # Configuration to access Zookeeper - currently used to discover mesos master leader instance
    # E.g., if mesos master is configured to talk to zk://zk1:2181,zk2:2181/mesos,
    # set ZOOKEEPER_HOSTS = 'zk1:2181,zk2:2181'
    #     ZOOKEEPER_MESOS_MASTER_PATH = '/mesos'
    #
    # This is only used to control mesos slave offline/online status from within Changes

    # Comma-separated list of host:port (or ip:port) to Zookeeper instances.
    app.config['ZOOKEEPER_HOSTS'] = 'zk:2181'
    # Namespace within zookeeper where mesos master election is performed.
    app.config['ZOOKEEPER_MESOS_MASTER_PATH'] = '/mesos'

    # List of valid tables to be written to when reporting project analytics.
    # Analytics artifacts targeting tables not listed here will be considered invalid.
    app.config['ANALYTICS_PROJECT_TABLES'] = []
    # URL any project analytics JSON entries will be posted to.
    # Entries will be posted as JSON, with the intended table specified as 'source' in the URL params.
    app.config['ANALYTICS_PROJECT_POST_URL'] = None

    app.config['SUPPORT_CONTACT'] = 'support'

    app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost'
    app.config['BASE_URI'] = 'http://localhost:5000'

    # if set to a string, most (all?) of the frontend js will make API calls
    # to the host this string is set to (e.g. http://changes.bigcompany.com)
    # THIS IS JUST FOR EASIER TESTING IN DEVELOPMENT. Although it won't even
    # work in prod: you'll have to start chrome with --disable-web-security to
    # make this work. Override this in your changes.conf.py file
    app.config['WEBAPP_USE_ANOTHER_HOST'] = None

    # Custom changes content unique to your deployment. This is intended to
    # customize the look and feel, provide contextual help and add custom links
    # to other internal tools. You should put your files in webapp/custom and
    # link them here.
    #
    # e.g. /acmecorp-changes/changes.js
    #
    # Some of the custom_content hooks can show images. Assume that the webserver
    # is willing to serve any file within the directory of the js file
    app.config['WEBAPP_CUSTOM_JS'] = None
    # This can be a .less file. We import it after the variables.less,
    # so you can override them in your file
    # Note: if you change this and nothing seems to happen, try deleting
    # webapp/.webassets-cache and bundled.css. This probably won't happen, though
    # If not specified, we will search for CUSTOM_CSS_FILE in the custom dir.
    app.config['WEBAPP_CUSTOM_CSS'] = None

    # In minutes, the timeout applied to jobs without a timeout specified at build time.
    # A timeout should nearly always be specified; this is just a safeguard so that
    # unspecified timeout doesn't mean "is allowed to run indefinitely".
    app.config['DEFAULT_JOB_TIMEOUT_MIN'] = 60

    # Number of milliseconds a transaction can run before triggering a warning.
    app.config['TRANSACTION_MS_WARNING_THRESHOLD'] = 2500

    # Hard maximum number of jobsteps to retry for a given job
    app.config['JOBSTEP_RETRY_MAX'] = 6
    # Maximum number of machines that we'll retry jobsteps for. This allows us
    # to retry more jobsteps if it's always the same machine failing.
    app.config['JOBSTEP_MACHINE_RETRY_MAX'] = 2

    # the PHID of the user creating quarantine tasks. We can use this to show
    # the list of open quarantine tasks inline
    app.config['QUARANTINE_PHID'] = None

    # The max length a test's output to be stored. If it is longer, the it will
    # be truncated.
    app.config['TEST_MESSAGE_MAX_LEN'] = 64 * 1024

    # List of packages needed to install bazel and any environment.
    app.config['BAZEL_APT_PKGS'] = ['bazel']

    # rsync source for encap
    # Example: rsync://example.com/encap/
    app.config['ENCAP_RSYNC_URL'] = None

    # In some configurations, build slaves might not have access to the Changes API via the
    # normal address; if PATCH_BASE_URI is specified, it'll be used as the base URI for
    # PATCH_URI variables provided to build slaves.
    app.config['PATCH_BASE_URI'] = None

    # name of default cluster to use for autogenerated jobs
    app.config['DEFAULT_CLUSTER'] = None

    # Maximum number of cpus allowed for a bazel executor. Since we expose `bazel.cpus` to
    # the user, this number needs to be bounded to avoid runaway resource allocation (by always
    # allocating large chunks of resources, like 12-16 cores), and to avoid invalid configuration
    # (like, requesting more cpus than available on a single slave, typically 32)
    app.config['MAX_CPUS_PER_EXECUTOR'] = 16

    # Minimum memory allowed per executor (in MB)
    app.config['MIN_MEM_MB_PER_EXECUTOR'] = 1024

    # Maximum memory allowed per executor (in MB)
    app.config['MAX_MEM_MB_PER_EXECUTOR'] = 16384

    # Maximum number of bazel executors allowed.
    app.config['MAX_EXECUTORS'] = 10

    # Absolute path to Bazel root (passed via --output_root to Bazel)
    # Storing bazel cache in tmpfs could be a bad idea because:
    #  - tmpfs means any files stored here will be stored purely in RAM and will eat into container limits
    #  - these containers are not persisted from the snapshot
    #
    # Bazel will create parent directories (if the user has appropriate permissions), if missing.
    app.config['BAZEL_ROOT_PATH'] = '/tmp/bazel_changes'

    # List of mandatory flags to be passed to `bazel test`
    app.config['BAZEL_MANDATORY_TEST_FLAGS'] = [
        '--spawn_strategy=sandboxed',
        '--genrule_strategy=sandboxed',
        '--keep_going',
    ]

    app.config['BAZEL_ADDITIONAL_TEST_FLAGS_WHITELIST_REGEX'] = [
        r'^--test_env=[A-Za-z0-9=]+',
        r'^--test_arg=[A-Za-z0-9=]+',
        r'^--define=[A-Za-z0-9=]+',
    ]

    app.config['SELECTIVE_TESTING_PROPAGATION_LIMIT'] = 30

    app.config['SELECTIVE_TESTING_ENABLED'] = False

    # Debug config entries passed to every autobazel jobstep
    app.config['BAZEL_DEBUG_CONFIG'] = {}

    # Extra test setup commands to be executed before collect-targets or `bazel test` invocations.
    app.config['BAZEL_EXTRA_SETUP_CMD'] = ['exit 0']

    # Jobsteps go from 'pending_allocation' to 'allocated' once an external scheduler claims them, and
    # once they begin running they're updated to 'in_progress'. If the scheduler somehow fails or drops
    # the task, this value is used to time out the 'allocated' status and revert back to 'pending_allocation'.
    # For current and expected schedulers, we don't allocate unless we think we can execute immediately, so
    # a 3 minute timeout is conservative and should be safe.
    app.config['JOBSTEP_ALLOCATION_TIMEOUT_SECONDS'] = 3 * 60

    app.config.update(config)

    if _read_config:
        if os.environ.get('CHANGES_CONF'):
            # CHANGES_CONF=/etc/changes.conf.py
            app.config.from_envvar('CHANGES_CONF')
        else:
            # Look for ~/.changes/changes.conf.py
            path = os.path.normpath(
                os.path.expanduser('~/.changes/changes.conf.py'))
            app.config.from_pyfile(path, silent=True)

    # default the DSN for changes-client to the server's DSN
    app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN'])

    # Backwards compatibility with old configs containing BASE_URI
    if 'WEB_BASE_URI' not in app.config and 'BASE_URI' in app.config:
        app.config['WEB_BASE_URI'] = app.config['BASE_URI']
    if 'INTERNAL_BASE_URI' not in app.config and 'BASE_URI' in app.config:
        app.config['INTERNAL_BASE_URI'] = app.config['BASE_URI']

    parsed_url = urlparse(app.config['WEB_BASE_URI'])
    app.config.setdefault('PREFERRED_URL_SCHEME', 'https')

    if app.debug:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
    else:
        app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30

    app.url_map.converters['uuid'] = UUIDConverter

    # now that config is set up, let's ensure the CUSTOM_JS / CUSTOM_CSS
    # variables are safe (within the changes directory) and convert them to
    # absolute paths
    if app.config['WEBAPP_CUSTOM_CSS']:
        app.config['WEBAPP_CUSTOM_CSS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_CSS'])

        enforce_is_subdir(app.config['WEBAPP_CUSTOM_CSS'],
                          os.path.join(PROJECT_ROOT, 'webapp/custom'))
    else:
        app.config['WEBAPP_CUSTOM_CSS'] = _find_custom_css()

    if app.config['WEBAPP_CUSTOM_JS']:
        app.config['WEBAPP_CUSTOM_JS'] = os.path.join(
            PROJECT_ROOT, 'webapp/custom/', app.config['WEBAPP_CUSTOM_JS'])

        enforce_is_subdir(app.config['WEBAPP_CUSTOM_JS'],
                          os.path.join(PROJECT_ROOT, 'webapp/custom'))

    # init sentry first
    sentry.init_app(app)

    @app.before_request
    def capture_user(*args, **kwargs):
        from changes.api.auth import get_current_user
        user = get_current_user()
        if user is not None:
            sentry.client.user_context({
                'id': user.id,
                'email': user.email,
            })

    api.init_app(app)
    db.init_app(app)
    mail.init_app(app)
    queue.init_app(app)
    redis.init_app(app)
    statsreporter.init_app(app)

    configure_debug_toolbar(app)

    from raven.contrib.celery import register_signal, register_logger_signal
    register_signal(sentry.client)
    register_logger_signal(sentry.client, loglevel=logging.WARNING)

    # configure debug routes first
    if app.debug:
        configure_debug_routes(app)

    configure_templates(app)

    # TODO: these can be moved to wsgi app entrypoints
    configure_api_routes(app)
    configure_web_routes(app)

    configure_jobs(app)
    configure_transaction_logging(app)

    rules_file = app.config.get('CATEGORIZE_RULES_FILE')
    if rules_file:
        # Fail at startup if we have a bad rules file.
        categorize.load_rules(rules_file)

    import jinja2
    webapp_template_folder = os.path.join(PROJECT_ROOT, 'webapp/html')
    template_folder = os.path.join(PROJECT_ROOT, 'templates')
    template_loader = jinja2.ChoiceLoader([
        app.jinja_loader,
        jinja2.FileSystemLoader([webapp_template_folder, template_folder])
    ])
    app.jinja_loader = template_loader

    return app