def run(self): config = app.config # prefer "hostname" over "host", and fallback to "localhost" hostname = config.get("hostname") or config.get("host") or "localhost" self.server = CLSXMLRPC((hostname, config['port'])) self.server.allow_none = True self.server.daemon_threads = True self.server.register_introspection_functions() self.server.register_instance(api.ServerAPI(self.sync)) self.server.serve_forever()
def setup_application(app): config = app.config # we want our low level middleware to get to the request ASAP. We don't # need any stack middleware in them - especially no StatusCodeRedirect buffering app = SimpleHg(app, config) app = SimpleGit(app, config) # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy if any(asbool(config.get(x)) for x in ['https_fixup', 'force_https', 'use_htsts']): app = HttpsFixup(app, config) app = PermanentRepoUrl(app, config) # Optional and undocumented wrapper - gives more verbose request/response logging, but has a slight overhead if str2bool(config.get('use_wsgi_wrapper')): app = RequestWrapper(app, config) return app
def skip_audit_log(request): """ Conditionally skip tests marked with 'audit_log' based on the USE_AUDIT_LOG config value. """ config = make_config() if request.node.get_closest_marker("audit_log"): use_audit_log = config.get("USE_AUDIT_LOG", False) if not use_audit_log: pytest.skip("audit log feature flag disabled")
def _setup(config): # disable delayed execution # FIXME: still do this with rq instead of rabbitmq # NOTE: this is called from tests so it may have side effects if config.get('adhocracy.setup.drop', "OH_NOES") == "KILL_EM_ALL": meta.data.drop_all(bind=meta.engine) meta.engine.execute("DROP TABLE IF EXISTS alembic_version") alembic_cfg = alembic.config.Config(config['config_filename']) if meta.engine.has_table('alembic_version'): alembic.command.upgrade(alembic_cfg, 'head') initial_setup = False else: # Create the tables meta.data.create_all(bind=meta.engine) alembic.command.stamp(alembic_cfg, 'head') initial_setup = True install.setup_entities(config, initial_setup)
def init_app(self, app: FastAPI, plugins: List[AExtensionDesc] = []): _log.info("Initializing DBMigrationManager") auto_upgrade_default = manager.settings.tdp_core.migrations.autoUpgrade for p in plugins: _log.info("DBMigration found: %s", p.id) # Check if configKey is set, otherwise use the plugin configuration config = manager.settings.get_nested(p.configKey, {}) if hasattr( p, "configKey") else {} # Priority of assignments: Configuration File -> Plugin Definition id = config.get("id") or (p.id if hasattr(p, "id") else None) db_key = config.get("dbKey") or (p.dbKey if hasattr(p, "dbKey") else None) db_url = config.get("dbUrl") or (p.dbUrl if hasattr(p, "dbUrl") else None) script_location = config.get("scriptLocation") or ( p.scriptLocation if hasattr(p, "scriptLocation") else None) version_table_schema = config.get("versionTableSchema") or ( p.versionTableSchema if hasattr(p, "versionTableSchema") else None) auto_upgrade = (config.get("autoUpgrade") if type( config.get("autoUpgrade")) == bool else ( p.autoUpgrade if hasattr(p, "autoUpgrade") and type(p.autoUpgrade) == bool else auto_upgrade_default)) # Validate the plugin description missing_fields = [] if not id: missing_fields.append("id") if not script_location: missing_fields.append("scriptLocation") if not db_key and not db_url: missing_fields.append("dbUrl or dbKey") if len(missing_fields) > 0: _log.error( "No {} defined for DBMigration {} - is your configuration up to date?" .format(", ".join(missing_fields), id or "<UNKNOWN>")) continue if db_key and db_url: _log.info( f"Both dbKey and dbUrl defined for DBMigration {id} - falling back to dbUrl" ) elif db_key: # Check if engine exists if db_key not in manager.db.connectors: _log.error( f"No engine called {db_key} found for DBMigration {id} - is your configuration up to date?" ) continue # Retrieve engine and store string as db url try: db_url = str(manager.db.engine(db_key).url) except Exception: _log.exception( f"Error retrieving URL from engine {db_key}") continue # Create new migration migration = DBMigration( id, db_url, script_location, auto_upgrade=auto_upgrade, version_table_schema=version_table_schema, ) # Store migration self._migrations[migration.id] = migration
def setup_configuration(app): config = app.config if not kallithea.lib.locale.current_locale_is_valid(): log.error("Terminating ...") sys.exit(1) # Mercurial sets encoding at module import time, so we have to monkey patch it hgencoding = config.get('hgencoding') if hgencoding: mercurial.encoding.encoding = hgencoding if config.get('ignore_alembic_revision', False): log.warn('database alembic revision checking is disabled') else: dbconf = config['sqlalchemy.url'] alembic_cfg = alembic.config.Config() alembic_cfg.set_main_option('script_location', 'kallithea:alembic') alembic_cfg.set_main_option('sqlalchemy.url', dbconf) script_dir = ScriptDirectory.from_config(alembic_cfg) available_heads = sorted(script_dir.get_heads()) engine = create_engine(dbconf) with engine.connect() as conn: context = MigrationContext.configure(conn) current_heads = sorted(str(s) for s in context.get_current_heads()) if current_heads != available_heads: log.error('Failed to run Kallithea:\n\n' 'The database version does not match the Kallithea version.\n' 'Please read the documentation on how to upgrade or downgrade the database.\n' 'Current database version id(s): %s\n' 'Expected database version id(s): %s\n' 'If you are a developer and you know what you are doing, you can add `ignore_alembic_revision = True` ' 'to your .ini file to skip the check.\n' % (' '.join(current_heads), ' '.join(available_heads))) sys.exit(1) # store some globals into kallithea kallithea.DEFAULT_USER_ID = db.User.get_default_user().user_id if str2bool(config.get('use_celery')): kallithea.CELERY_APP = celerypylons.make_app() kallithea.CONFIG = config load_rcextensions(root_path=config['here']) set_app_settings(config) instance_id = kallithea.CONFIG.get('instance_id', '*') if instance_id == '*': instance_id = '%s-%s' % (platform.uname()[1], os.getpid()) kallithea.CONFIG['instance_id'] = instance_id # update kallithea.CONFIG with the meanwhile changed 'config' kallithea.CONFIG.update(config) # configure vcs and indexer libraries (they are supposed to be independent # as much as possible and thus avoid importing tg.config or # kallithea.CONFIG). set_vcs_config(kallithea.CONFIG) set_indexer_config(kallithea.CONFIG) check_git_version() kallithea.model.meta.Session.remove()
def full_path(filename): output_folder = os.environ.get('OUTPUT_FOLDER', None) if not output_folder: output_folder = config.get('output_folder', 'output') return os.path.join(output_folder, filename)
def define_api_sync(main_dag_name): dag = QuarterlySubDAG(main_dag_name, 'api_v1_sync') output_folder = config.get('output_folder', 'output') table_files = { 'jobs_master': 'job_titles_master_table.tsv', 'skills_master': 'skills_master_table.tsv', 'interesting_jobs': 'interesting_job_titles.tsv', 'skill_importance': 'ksas_importance.tsv', 'geo_title_count': 'geo_title_count_{}.csv', 'title_count': 'title_count_{}.csv', } def full_path(filename): output_folder = os.environ.get('OUTPUT_FOLDER', None) if not output_folder: output_folder = config.get('output_folder', 'output') return os.path.join(output_folder, filename) class JobMaster(BaseOperator): def execute(self, context): engine = get_db() load_jobs_master(full_path(table_files['jobs_master']), engine) class SkillMaster(BaseOperator): def execute(self, context): engine = get_db() load_skills_master(full_path(table_files['skills_master']), engine) class JobAlternateTitles(BaseOperator): def execute(self, context): engine = get_db() load_alternate_titles(full_path(table_files['jobs_master']), engine) class JobUnusualTitles(BaseOperator): def execute(self, context): engine = get_db() load_jobs_unusual_titles( full_path(table_files['interesting_jobs']), engine) class SkillImportance(BaseOperator): def execute(self, context): engine = get_db() load_skills_importance(full_path(table_files['skill_importance']), engine) class GeoTitleCounts(BaseOperator): def execute(self, context): year, quarter = datetime_to_year_quarter(context['execution_date']) quarter_string = datetime_to_quarter(context['execution_date']) engine = get_db() load_geo_title_counts( filename=full_path( table_files['geo_title_count']).format(quarter_string), year=year, quarter=quarter, db_engine=engine, ) class TitleCounts(BaseOperator): def execute(self, context): year, quarter = datetime_to_year_quarter(context['execution_date']) quarter_string = datetime_to_quarter(context['execution_date']) engine = get_db() load_title_counts( filename=full_path( table_files['title_count']).format(quarter_string), year=year, quarter=quarter, db_engine=engine, ) class SchemaUpgrade(BaseOperator): def execute(self, context): alembic.config.main(argv=['--raiseerr', 'upgrade', 'head']) schema_upgrade = SchemaUpgrade(task_id='schema_upgrade', dag=dag) job_master = JobMaster(task_id='job_master', dag=dag) skill_master = SkillMaster(task_id='skill_master', dag=dag) alternate_titles = JobAlternateTitles(task_id='alternate_titles', dag=dag) unusual_titles = JobUnusualTitles(task_id='unusual_titles', dag=dag) skill_importance = SkillImportance(task_id='skill_importance', dag=dag) geo_title_counts = GeoTitleCounts(task_id='geo_title_counts', dag=dag) title_counts = TitleCounts(task_id='title_counts', dag=dag) alternate_titles.set_upstream(job_master) unusual_titles.set_upstream(job_master) skill_importance.set_upstream(job_master) skill_importance.set_upstream(skill_master) all_tasks = [ job_master, skill_master, alternate_titles, unusual_titles, skill_importance, geo_title_counts, title_counts ] for task in all_tasks: task.set_upstream(schema_upgrade) return dag
from os import environ from asyncpg.pool import Pool from fastapi import FastAPI from starlette.config import Config from asgi_lifespan import LifespanManager from httpx import AsyncClient from app.db.repositories.auth0_users import Auth0UsersRepository from app.db.repositories.users import UsersRepository from app.models.domain.users import UserInDB, Auth0User from app.services import jwt config = Config(".env") TEST_DB_CONNECTION = config.get("TEST_DB_CONNECTION", cast=str) @pytest.fixture async def setup_test_db() -> None: environ["DB_CONNECTION"] = TEST_DB_CONNECTION @pytest.fixture(autouse=True) async def apply_migrations(setup_test_db: None) -> None: alembic.config.main(argv=["upgrade", "head"]) yield alembic.config.main(argv=["downgrade", "base"]) @pytest.fixture