def pytest_configure(config): from rockpack.mainsite.core.es import mappings mappings.CHANNEL_ALIAS = mappings.CHANNEL_INDEX = 'test_channel' mappings.VIDEO_ALIAS = mappings.VIDEO_INDEX = 'test_video' mappings.USER_ALIAS = mappings.USER_INDEX = 'test_user' from rockpack.mainsite import app, init_app app.config['TESTING'] = True app.config['FORCE_INDEX_INSERT_REFRESH'] = True app.config['DATABASE_URL'] = app.config.get('TEST_DATABASE_URL', 'sqlite://') # import after setting DATABASE_URL from rockpack.mainsite.core import dbapi if app.config.get('ELASTICSEARCH_URL'): from rockpack.mainsite.core.es import helpers helpers.Indexing.create_all_indexes(rebuild=True) helpers.Indexing.create_all_mappings() if 'sqlite:' in app.config['DATABASE_URL']: connection = dbapi.db.engine.raw_connection().connection # Seems to be required for sub-transaction support: connection.isolation_level = None # Use group_concat instead of string_agg from sqlalchemy import func func.string_agg = func.group_concat # For compatibility with postgres. XXX: can't return timedelta :-( from datetime import datetime connection.create_function('age', 1, lambda d: None) # substitute postgres-specific "interval" expression from rockpack.mainsite.services.user import api from sqlalchemy import text api.SUBSCRIPTION_VIDEO_FEED_THRESHOLD = text("datetime('now')") api.ACTIVITY_LAST_ACTION_COMPARISON = "action = '%s'" dbapi.sync_database(drop_all=True) from wonder.common import timing timing.log.level = 50 from test.test_helpers import install_mocks from test.fixtures import install, all_data install_mocks() init_app() # Explicityly load admin tables after app is loaded. dbapi.sync_database(custom_modules=('rockpack.mainsite.admin', 'rockpack.mainsite.admin.auth', )) install(*all_data) if app.config.get('ELASTICSEARCH_URL'): helpers.full_user_import() helpers.full_channel_import() helpers.full_video_import()
def create_app(): # uwsgi mule will execute here if not app.blueprints: init_app() if 'SENTRY_DSN' in app.config: from raven.contrib.flask import Sentry Sentry(app, logging=app.config.get('SENTRY_ENABLE_LOGGING'), level=logging.WARN) # Use ES cluster nodes directly for batch jobs discover_cluster_nodes() return app
def run(*args): init_app() if args: return manager.handle(sys.argv[0], args) else: return manager.run()
# access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # Comment by Neebone: # Initialise the app so alembic can detect what tables we have import os, sys sys.path.append(os.getcwd()) from rockpack.mainsite import app from rockpack.mainsite import init_app init_app() from rockpack.mainsite.core.dbapi import db # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = db.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline():