def env_import(args): ''' Restore mongo database and media files from a tar archive ''' if not args.cache_path: args.cache_path = tempfile.mkdtemp() setup_global_and_app_config(args.conf_file) # Creates mg_globals.public_store and mg_globals.queue_store setup_storage() global_config, app_config = setup_global_and_app_config(args.conf_file) db = setup_connection_and_db_from_config( app_config) tf = tarfile.open( args.tar_file, mode='r|gz') tf.extractall(args.cache_path) args.cache_path = os.path.join( args.cache_path, 'mediagoblin-data') args = _setup_paths(args) # Import database from extracted data _import_database(db, args) _import_media(db, args) _clean(args)
def setup_database(app): app_config = app.app_config global_config = app.global_config run_migrations = app_config['run_migrations'] # Load all models for media types (plugins, ...) load_models(app_config) # Set up the database db = setup_connection_and_db_from_config( app_config, run_migrations, app=app) # run_migrations is used for tests if run_migrations: # Run the migrations to initialize/update the database. # We only run the alembic migrations in the case of unit # tests, in which case we don't need to run the legacy # migrations. from mediagoblin.gmg_commands.dbupdate import ( run_alembic_migrations, run_foundations) run_alembic_migrations(db, app_config, global_config) run_foundations(db, global_config) else: check_db_migrations_current(db) setup_globals(database=db) return db
def env_import(args): ''' Restore mongo database and media files from a tar archive ''' if not args.cache_path: args.cache_path = tempfile.mkdtemp() setup_global_and_app_config(args.conf_file) # Creates mg_globals.public_store and mg_globals.queue_store setup_storage() global_config, app_config = setup_global_and_app_config(args.conf_file) db = setup_connection_and_db_from_config( app_config) tf = tarfile.open( args.tar_file, mode='r|gz') tf.extractall(args.cache_path) args.cache_path = os.path.join( args.cache_path, 'mediagoblin-data') args = _setup_paths(args) # Import database from extracted data _import_database(db, args) _import_media(db, args) _clean(args)
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) # Do we have migrations should_run_sqam_migrations = db.engine.has_table("core__migrations") and \ sqam_migrations_to_run(db, app_config, global_config) # Looks like a fresh database! # (We set up this variable here because doing "run_all_migrations" below # will change things.) fresh_database = ( not db.engine.has_table("core__migrations") and not db.engine.has_table("alembic_version")) # Run the migrations if should_run_sqam_migrations: run_all_migrations(db, app_config, global_config) run_alembic_migrations(db, app_config, global_config) # If this was our first time initializing the database, # we must lay down the foundations if fresh_database: run_foundations(db, global_config)
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) # Do we have migrations should_run_sqam_migrations = db.engine.has_table("core__migrations") and \ sqam_migrations_to_run(db, app_config, global_config) # Looks like a fresh database! # (We set up this variable here because doing "run_all_migrations" below # will change things.) fresh_database = (not db.engine.has_table("core__migrations") and not db.engine.has_table("alembic_version")) # Run the migrations if should_run_sqam_migrations: run_all_migrations(db, app_config, global_config) run_alembic_migrations(db, app_config, global_config) # If this was our first time initializing the database, # we must lay down the foundations if fresh_database: run_foundations(db, global_config)
def env_export(args): ''' Export database and media files to a tar archive ''' commands_util.check_unrecognized_args(args) if args.cache_path: if os.path.exists(args.cache_path): _log.error('The cache directory must not exist ' 'before you run this script') _log.error('Cache directory: {0}'.format(args.cache_path)) return False else: args.cache_path = tempfile.mkdtemp() args = _setup_paths(args) if not _export_check(args): _log.error('Checks did not pass, exiting') sys.exit(0) globa_config, app_config = setup_global_and_app_config(args.conf_file) setup_storage() db = setup_connection_and_db_from_config(app_config) _export_database(db, args) _export_media(db, args) _create_archive(args) _clean(args)
def env_export(args): ''' Export database and media files to a tar archive ''' if args.cache_path: if os.path.exists(args.cache_path): _log.error('The cache directory must not exist ' 'before you run this script') _log.error('Cache directory: {0}'.format(args.cache_path)) return False else: args.cache_path = tempfile.mkdtemp() args = _setup_paths(args) if not _export_check(args): _log.error('Checks did not pass, exiting') sys.exit(0) globa_config, app_config = setup_global_and_app_config(args.conf_file) setup_storage() db = setup_connection_and_db_from_config(app_config) _export_database(db, args) _export_media(db, args) _create_archive(args) _clean(args)
def setup_database(app): app_config = app.app_config global_config = app.global_config run_migrations = app_config['run_migrations'] # Load all models for media types (plugins, ...) load_models(app_config) # Set up the database db = setup_connection_and_db_from_config(app_config, run_migrations, app=app) # run_migrations is used for tests if run_migrations: # Run the migrations to initialize/update the database. # We only run the alembic migrations in the case of unit # tests, in which case we don't need to run the legacy # migrations. from mediagoblin.gmg_commands.dbupdate import (run_alembic_migrations, run_foundations) run_alembic_migrations(db, app_config, global_config) run_foundations(db, global_config) else: check_db_migrations_current(db) setup_globals(database=db) return db
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) #Run the migrations run_all_migrations(db, app_config, global_config)
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) #Run the migrations run_all_migrations(db, app_config, global_config)
def setup_database(): app_config = mg_globals.app_config # Load all models for media types (plugins, ...) load_models(app_config) # Set up the database db = setup_connection_and_db_from_config(app_config) check_db_migrations_current(db) setup_globals(database=db) return db
def get_test_app(dump_old_app=True): suicide_if_bad_celery_environ() # Leave this imported as it sets up celery. from mediagoblin.celery_setup import from_tests global MGOBLIN_APP # Just return the old app if that exists and it's okay to set up # and return if MGOBLIN_APP and not dump_old_app: return MGOBLIN_APP # Remove and reinstall user_dev directories if os.path.exists(TEST_USER_DEV): shutil.rmtree(TEST_USER_DEV) for directory in USER_DEV_DIRECTORIES_TO_SETUP: full_dir = os.path.join(TEST_USER_DEV, directory) os.makedirs(full_dir) # Get app config global_config, validation_result = read_mediagoblin_config(TEST_APP_CONFIG) app_config = global_config['mediagoblin'] # Wipe database # @@: For now we're dropping collections, but we could also just # collection.remove() ? connection, db = setup_connection_and_db_from_config(app_config) assert db.name == MEDIAGOBLIN_TEST_DB_NAME collections_to_wipe = [ collection for collection in db.collection_names() if not collection.startswith('system.')] for collection in collections_to_wipe: db.drop_collection(collection) # TODO: Drop and recreate indexes # setup app and return test_app = loadapp( 'config:' + TEST_SERVER_CONFIG) app = TestApp(test_app) MGOBLIN_APP = app return app
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) # Run the migrations run_all_migrations(db, app_config, global_config) # TODO: Make this happen regardless of python 2 or 3 once ensured # to be "safe"! if six.PY3: run_alembic_migrations(db, app_config, global_config)
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) # Run the migrations run_all_migrations(db, app_config, global_config) # TODO: Make this happen regardless of python 2 or 3 once ensured # to be "safe"! if six.PY3: run_alembic_migrations(db, app_config, global_config)
def setup_database(run_migrations=False): app_config = mg_globals.app_config global_config = mg_globals.global_config # Load all models for media types (plugins, ...) load_models(app_config) # Set up the database db = setup_connection_and_db_from_config(app_config, run_migrations) if run_migrations: #Run the migrations to initialize/update the database. from mediagoblin.gmg_commands.dbupdate import run_all_migrations run_all_migrations(db, app_config, global_config) else: check_db_migrations_current(db) setup_globals(database=db) return db
def setup_database(run_migrations=False): app_config = mg_globals.app_config global_config = mg_globals.global_config # Load all models for media types (plugins, ...) load_models(app_config) # Set up the database db = setup_connection_and_db_from_config(app_config, run_migrations) if run_migrations: #Run the migrations to initialize/update the database. from mediagoblin.gmg_commands.dbupdate import run_all_migrations run_all_migrations(db, app_config, global_config) else: check_db_migrations_current(db) setup_globals(database=db) return db
def run_dbupdate(app_config, global_config): """ Initialize or migrate the database as specified by the config file. Will also initialize or migrate all extensions (media types, and in the future, plugins) """ # Gather information from all media managers / projects dbdatas = gather_database_data( app_config['media_types'], global_config.get('plugins', {}).keys()) # Set up the database db = setup_connection_and_db_from_config(app_config, migrations=True) Session = sessionmaker(bind=db.engine) # Setup media managers for all dbdata, run init/migrate and print info # For each component, create/migrate tables for dbdata in dbdatas: migration_manager = dbdata.make_migration_manager(Session()) migration_manager.init_or_migrate()
def raw_alembic_cli(args): global_config, app_config = setup_global_and_app_config(args.conf_file) db = setup_connection_and_db_from_config(app_config, migrations=False) FudgedCommandLine().main(args, db, global_config)
def __init__(self, config_path, setup_celery=True): """ Initialize the application based on a configuration file. Arguments: - config_path: path to the configuration file we're opening. - setup_celery: whether or not to setup celery during init. (Note: setting 'celery_setup_elsewhere' also disables setting up celery.) """ ############## # Setup config ############## # Open and setup the config global_config, validation_result = read_mediagoblin_config(config_path) app_config = global_config['mediagoblin'] # report errors if necessary validation_report = generate_validation_report(global_config, validation_result) if validation_report: raise ImproperlyConfigured(validation_report) ########################################## # Setup other connections / useful objects ########################################## # Set up the database self.connection, self.db = setup_connection_and_db_from_config( app_config) # Get the template environment self.template_loader = util.get_jinja_loader( app_config.get('user_template_path')) # Set up storage systems self.public_store = storage.storage_system_from_config( app_config, 'publicstore') self.queue_store = storage.storage_system_from_config( app_config, 'queuestore') # set up routing self.routing = routing.get_mapper() # set up staticdirector tool if app_config.has_key('direct_remote_path'): self.staticdirector = staticdirect.RemoteStaticDirect( app_config['direct_remote_path'].strip()) elif app_config.has_key('direct_remote_paths'): direct_remote_path_lines = app_config['direct_remote_paths'].strip( ).splitlines() self.staticdirector = staticdirect.MultiRemoteStaticDirect( dict([ line.strip().split(' ', 1) for line in direct_remote_path_lines ])) else: raise ImproperlyConfigured("One of direct_remote_path or " "direct_remote_paths must be provided") # Setup celery, if appropriate if setup_celery and not app_config.get('celery_setup_elsewhere'): if os.environ.get('CELERY_ALWAYS_EAGER'): setup_celery_from_config(app_config, global_config, force_celery_always_eager=True) else: setup_celery_from_config(app_config, global_config) ####################################################### # Insert appropriate things into mediagoblin.mg_globals # # certain properties need to be accessed globally eg from # validators, etc, which might not access to the request # object. ####################################################### setup_globals( app_config=app_config, global_config=global_config, # TODO: No need to set these two up as globals, we could # just read them out of mg_globals.app_config email_sender_address=app_config['email_sender_address'], email_debug_mode=app_config['email_debug_mode'], # Actual, useful to everyone objects app=self, db_connection=self.connection, database=self.db, public_store=self.public_store, queue_store=self.queue_store, workbench_manager=WorkbenchManager(app_config['workbench_path']))
def clean_orphan_tags(commit=True): """Search for unused MediaTags and delete them""" q1 = Session.query(Tag).outerjoin(MediaTag).filter(MediaTag.id==None) for t in q1: Session.delete(t) # The "let the db do all the work" version: # q1 = Session.query(Tag.id).outerjoin(MediaTag).filter(MediaTag.id==None) # q2 = Session.query(Tag).filter(Tag.id.in_(q1)) # q2.delete(synchronize_session = False) if commit: Session.commit() def check_collection_slug_used(creator_id, slug, ignore_c_id): filt = (Collection.creator == creator_id) \ & (Collection.slug == slug) if ignore_c_id is not None: filt = filt & (Collection.id != ignore_c_id) does_exist = Session.query(Collection.id).filter(filt).first() is not None return does_exist if __name__ == '__main__': from mediagoblin.db.open import setup_connection_and_db_from_config db = setup_connection_and_db_from_config({'sql_engine':'sqlite:///mediagoblin.db'}) clean_orphan_tags()
def __init__(self, config_path, setup_celery=True): """ Initialize the application based on a configuration file. Arguments: - config_path: path to the configuration file we're opening. - setup_celery: whether or not to setup celery during init. (Note: setting 'celery_setup_elsewhere' also disables setting up celery.) """ ############## # Setup config ############## # Open and setup the config global_config, validation_result = read_mediagoblin_config(config_path) app_config = global_config['mediagoblin'] # report errors if necessary validation_report = generate_validation_report( global_config, validation_result) if validation_report: raise ImproperlyConfigured(validation_report) ########################################## # Setup other connections / useful objects ########################################## # Set up the database self.connection, self.db = setup_connection_and_db_from_config( app_config) # Get the template environment self.template_loader = util.get_jinja_loader( app_config.get('user_template_path')) # Set up storage systems self.public_store = storage.storage_system_from_config( app_config, 'publicstore') self.queue_store = storage.storage_system_from_config( app_config, 'queuestore') # set up routing self.routing = routing.get_mapper() # set up staticdirector tool if app_config.has_key('direct_remote_path'): self.staticdirector = staticdirect.RemoteStaticDirect( app_config['direct_remote_path'].strip()) elif app_config.has_key('direct_remote_paths'): direct_remote_path_lines = app_config[ 'direct_remote_paths'].strip().splitlines() self.staticdirector = staticdirect.MultiRemoteStaticDirect( dict([line.strip().split(' ', 1) for line in direct_remote_path_lines])) else: raise ImproperlyConfigured( "One of direct_remote_path or " "direct_remote_paths must be provided") # Setup celery, if appropriate if setup_celery and not app_config.get('celery_setup_elsewhere'): if os.environ.get('CELERY_ALWAYS_EAGER'): setup_celery_from_config( app_config, global_config, force_celery_always_eager=True) else: setup_celery_from_config(app_config, global_config) ####################################################### # Insert appropriate things into mediagoblin.mg_globals # # certain properties need to be accessed globally eg from # validators, etc, which might not access to the request # object. ####################################################### setup_globals( app_config=app_config, global_config=global_config, # TODO: No need to set these two up as globals, we could # just read them out of mg_globals.app_config email_sender_address=app_config['email_sender_address'], email_debug_mode=app_config['email_debug_mode'], # Actual, useful to everyone objects app=self, db_connection=self.connection, database=self.db, public_store=self.public_store, queue_store=self.queue_store, workbench_manager=WorkbenchManager(app_config['workbench_path']))
if ignore_c_id is not None: filt = filt & (Collection.id != ignore_c_id) does_exist = Session.query(Collection.id).filter(filt).first() is not None return does_exist def check_db_up_to_date(): """Check if the database is up to date and quit if not""" dbdatas = gather_database_data(mgg.global_config.get('plugins', {}).keys()) for dbdata in dbdatas: session = Session() try: migration_manager = dbdata.make_migration_manager(session) if migration_manager.database_current_migration is None or \ migration_manager.migrations_to_run(): sys.exit("Your database is not up to date. Please run " "'gmg dbupdate' before starting MediaGoblin.") finally: Session.rollback() Session.remove() if __name__ == '__main__': from mediagoblin.db.open import setup_connection_and_db_from_config db = setup_connection_and_db_from_config( {'sql_engine': 'sqlite:///mediagoblin.db'}) clean_orphan_tags()
def raw_alembic_cli(args): global_config, app_config = setup_global_and_app_config(args.conf_file) db = setup_connection_and_db_from_config(app_config, migrations=False) FudgedCommandLine().main(args, db, global_config)