def setUpClass_method(self): from aiida.backends.sqlalchemy import get_scoped_session if self.test_session is None: # Should we use reset_session? self.test_session = get_scoped_session() if self.drop_all: Base.metadata.drop_all(self.test_session.connection) Base.metadata.create_all(self.test_session.connection) install_tc(self.test_session.connection) else: self.clean_db() self.insert_data()
def setUpClass_method(self): if self.test_session is None: if self.connection is None: config = get_profile_config(AIIDADB_PROFILE) engine = get_engine(config) self.test_session = get_session(engine=engine) self.connection = engine.connect() self.test_session = Session(bind=self.connection) aiida.backends.sqlalchemy.session = self.test_session if self.drop_all: Base.metadata.drop_all(self.connection) Base.metadata.create_all(self.connection) install_tc(self.connection) else: self.clean_db() self.insert_data()
def downgrade(): """Migrations for the downgrade.""" op.create_table( 'db_dbpath', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('child_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('depth', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('entry_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('direct_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('exit_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['child_id'], ['db_dbnode.id'], name='db_dbpath_child_id_fkey', initially='DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['parent_id'], ['db_dbnode.id'], name='db_dbpath_parent_id_fkey', initially='DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbpath_pkey')) # I get the session using the alembic connection # (Keep in mind that alembic uses the AiiDA SQLA # session) session = Session(bind=op.get_bind()) install_tc(session)
def setUpClass(cls): config = get_profile_config("tests") engine_url = ("postgresql://{AIIDADB_USER}:{AIIDADB_PASS}@" "{AIIDADB_HOST}:{AIIDADB_PORT}/{AIIDADB_NAME}").format( **config) engine = create_engine(engine_url, json_serializer=dumps_json, json_deserializer=loads_json) cls.connection = engine.connect() session = Session(bind=cls.connection) sa.session = session if cls.drop_all: Base.metadata.drop_all(cls.connection) Base.metadata.create_all(cls.connection) install_tc(cls.connection) email = get_configured_user_email() has_user = DbUser.query.filter(DbUser.email == email).first() if not has_user: user = DbUser(email, "foo", "bar", "tests") sa.session.add(user) sa.session.commit() sa.session.expire_all() has_computer = DbComputer.query.filter( DbComputer.hostname == 'localhost').first() if not has_computer: computer = SqlAlchemyTests._create_computer() computer.store() session.close()
def setup(profile, only_config, non_interactive=False, **kwargs): ''' setup an aiida profile and aiida user (and the aiida default user). :param profile: Profile name :param only_config: do not create a new user :param non_interactive: do not prompt for configuration values, fail if not all values are given as kwargs. :param backend: one of 'django', 'sqlalchemy' :param email: valid email address for the user :param db_host: hostname for the database :param db_port: port to connect to the database :param db_user: name of the db user :param db_pass: password of the db user ''' from aiida.common.setup import (create_base_dirs, create_configuration, set_default_profile, DEFAULT_UMASK, create_config_noninteractive) from aiida.backends.profile import BACKEND_SQLA, BACKEND_DJANGO from aiida.backends.utils import set_backend_type, get_backend_type from aiida.common.exceptions import InvalidOperation # ~ cmdline_args = list(args) # ~ only_user_config = False # ~ try: # ~ cmdline_args.remove('--only-config') # ~ only_user_config = True # ~ except ValueError: # ~ # Parameter not provided # ~ pass only_user_config = only_config # ~ if cmdline_args: # ~ print >> sys.stderr, "Unknown parameters on the command line: " # ~ print >> sys.stderr, ", ".join(cmdline_args) # ~ sys.exit(1) # create the directories to store the configuration files create_base_dirs() # gprofile = 'default' if profile is None else profile # ~ gprofile = profile if settings_profile.AIIDADB_PROFILE is None \ # ~ else settings_profile.AIIDADB_PROFILE if settings_profile.AIIDADB_PROFILE and profile: sys.exit( 'the profile argument cannot be used if verdi is called with -p option: {} and {}' .format(settings_profile.AIIDADB_PROFILE, profile)) gprofile = settings_profile.AIIDADB_PROFILE or profile if gprofile == profile: settings_profile.AIIDADB_PROFILE = profile if not settings_profile.AIIDADB_PROFILE: settings_profile.AIIDADB_PROFILE = 'default' # used internally later gprofile = settings_profile.AIIDADB_PROFILE created_conf = None # ask and store the configuration of the DB if non_interactive: try: created_conf = create_config_noninteractive( profile=gprofile, backend=kwargs['backend'], email=kwargs['email'], db_host=kwargs['db_host'], db_port=kwargs['db_port'], db_name=kwargs['db_name'], db_user=kwargs['db_user'], db_pass=kwargs.get('db_pass', ''), repo=kwargs['repo'], force_overwrite=kwargs.get('force_overwrite', False)) except ValueError as e: click.echo("Error during configuation: {}".format(e.message), err=True) sys.exit(1) except KeyError as e: sys.exit( "--non-interactive requires all values to be given on the commandline! {}" .format(e.message), err=True) else: try: created_conf = create_configuration(profile=gprofile) except ValueError as e: print >> sys.stderr, "Error during configuration: {}".format( e.message) sys.exit(1) # set default DB profiles set_default_profile('verdi', gprofile, force_rewrite=False) set_default_profile('daemon', gprofile, force_rewrite=False) if only_user_config: print( "Only user configuration requested, " "skipping the migrate command") else: print "Executing now a migrate command..." backend_choice = created_conf['AIIDADB_BACKEND'] if backend_choice == BACKEND_DJANGO: print("...for Django backend") # The correct profile is selected within load_dbenv. # Setting os.umask here since sqlite database gets created in # this step. old_umask = os.umask(DEFAULT_UMASK) # This check should be done more properly # try: # backend_type = get_backend_type() # except KeyError: # backend_type = None # # if backend_type is not None and backend_type != BACKEND_DJANGO: # raise InvalidOperation("An already existing database found" # "and a different than the selected" # "backend was used for its " # "management.") try: pass_to_django_manage([execname, 'migrate'], profile=gprofile) finally: os.umask(old_umask) set_backend_type(BACKEND_DJANGO) elif backend_choice == BACKEND_SQLA: print("...for SQLAlchemy backend") from aiida import is_dbenv_loaded, load_dbenv if not is_dbenv_loaded(): load_dbenv() from aiida.backends.sqlalchemy.models.base import Base from aiida.backends.sqlalchemy.utils import install_tc, reset_session from aiida.common.setup import get_profile_config # This check should be done more properly # try: # backend_type = get_backend_type() # except KeyError: # backend_type = None # # if backend_type is not None and backend_type != BACKEND_SQLA: # raise InvalidOperation("An already existing database found" # "and a different than the selected" # "backend was used for its " # "management.") # Those import are necessary for SQLAlchemy to correctly create # the needed database tables. from aiida.backends.sqlalchemy.models.authinfo import (DbAuthInfo) from aiida.backends.sqlalchemy.models.comment import DbComment from aiida.backends.sqlalchemy.models.computer import (DbComputer) from aiida.backends.sqlalchemy.models.group import ( DbGroup, table_groups_nodes) from aiida.backends.sqlalchemy.models.lock import DbLock from aiida.backends.sqlalchemy.models.log import DbLog from aiida.backends.sqlalchemy.models.node import (DbLink, DbNode, DbPath, DbCalcState) from aiida.backends.sqlalchemy.models.user import DbUser from aiida.backends.sqlalchemy.models.workflow import ( DbWorkflow, DbWorkflowData, DbWorkflowStep) from aiida.backends.sqlalchemy.models.settings import DbSetting reset_session(get_profile_config(gprofile)) from aiida.backends.sqlalchemy import get_scoped_session connection = get_scoped_session().connection() Base.metadata.create_all(connection) install_tc(connection) set_backend_type(BACKEND_SQLA) else: raise InvalidOperation("Not supported backend selected.") print "Database was created successfully" # I create here the default user print "Loading new environment..." if only_user_config: from aiida.backends.utils import load_dbenv, is_dbenv_loaded # db environment has not been loaded in this case if not is_dbenv_loaded(): load_dbenv() from aiida.common.setup import DEFAULT_AIIDA_USER from aiida.orm.user import User as AiiDAUser if not AiiDAUser.search_for_users(email=DEFAULT_AIIDA_USER): print "Installing default AiiDA user..." nuser = AiiDAUser(email=DEFAULT_AIIDA_USER) nuser.first_name = "AiiDA" nuser.last_name = "Daemon" nuser.is_staff = True nuser.is_active = True nuser.is_superuser = True nuser.force_save() from aiida.common.utils import get_configured_user_email email = get_configured_user_email() print "Starting user configuration for {}...".format(email) if email == DEFAULT_AIIDA_USER: print "You set up AiiDA using the default Daemon email ({}),".format( email) print "therefore no further user configuration will be asked." else: # Ask to configure the new user if not non_interactive: user.configure.main(args=[email]) else: # or don't ask user.do_configure(kwargs['email'], kwargs.get('first_name'), kwargs.get('last_name'), kwargs.get('institution'), True) print "Setup finished."
def upgrade(): op.create_table('db_dbuser', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('email', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('password', sa.VARCHAR(length=128), autoincrement=False, nullable=True), sa.Column('is_superuser', sa.BOOLEAN(), autoincrement=False, nullable=False), sa.Column('first_name', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('last_name', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('institution', sa.VARCHAR(length=254), autoincrement=False, nullable=True), sa.Column('is_staff', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('date_joined', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbuser_pkey'), postgresql_ignore_search_path=False ) op.create_index('ix_db_dbuser_email', 'db_dbuser', ['email'], unique=True) op.create_table('db_dbworkflow', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('nodeversion', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('lastsyncedversion', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('report', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('module', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('module_class', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('script_path', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('script_md5', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['user_id'], [u'db_dbuser.id'], name=u'db_dbworkflow_user_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'db_dbworkflow_pkey'), postgresql_ignore_search_path=False ) op.create_index('ix_db_dbworkflow_label', 'db_dbworkflow', ['label']) op.create_table('db_dbworkflowstep', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('nextcall', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['parent_id'], [u'db_dbworkflow.id'], name=u'db_dbworkflowstep_parent_id_fkey'), sa.ForeignKeyConstraint(['user_id'], [u'db_dbuser.id'], name=u'db_dbworkflowstep_user_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'db_dbworkflowstep_pkey'), sa.UniqueConstraint('parent_id', 'name', name=u'db_dbworkflowstep_parent_id_name_key'), postgresql_ignore_search_path=False ) op.create_table('db_dbcomputer', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('hostname', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('enabled', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('transport_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('scheduler_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('transport_params', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('metadata', postgresql.JSONB(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbcomputer_pkey'), sa.UniqueConstraint('name', name=u'db_dbcomputer_name_key') ) op.create_table('db_dbauthinfo', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('aiidauser_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbcomputer_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('metadata', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('auth_params', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('enabled', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['aiidauser_id'], [u'db_dbuser.id'], name=u'db_dbauthinfo_aiidauser_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['dbcomputer_id'], [u'db_dbcomputer.id'], name=u'db_dbauthinfo_dbcomputer_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbauthinfo_pkey'), sa.UniqueConstraint('aiidauser_id', 'dbcomputer_id', name=u'db_dbauthinfo_aiidauser_id_dbcomputer_id_key') ) op.create_table('db_dbgroup', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['user_id'], [u'db_dbuser.id'], name=u'db_dbgroup_user_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbgroup_pkey'), sa.UniqueConstraint('name', 'type', name=u'db_dbgroup_name_type_key') ) op.create_index('ix_db_dbgroup_name', 'db_dbgroup', ['name']) op.create_index('ix_db_dbgroup_type', 'db_dbgroup', ['type']) op.create_table('db_dbnode', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('nodeversion', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('public', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('attributes', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('extras', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('dbcomputer_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['dbcomputer_id'], [u'db_dbcomputer.id'], name=u'db_dbnode_dbcomputer_id_fkey', ondelete=u'RESTRICT', initially=u'DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['user_id'], [u'db_dbuser.id'], name=u'db_dbnode_user_id_fkey', ondelete=u'RESTRICT', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbnode_pkey'),postgresql_ignore_search_path=False ) op.create_index('ix_db_dbnode_label', 'db_dbnode', ['label']) op.create_index('ix_db_dbnode_type', 'db_dbnode', ['type']) op.create_table('db_dbgroup_dbnodes', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbgroup_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbgroup_id'], [u'db_dbgroup.id'], name=u'db_dbgroup_dbnodes_dbgroup_id_fkey', initially=u'DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['dbnode_id'], [u'db_dbnode.id'], name=u'db_dbgroup_dbnodes_dbnode_id_fkey', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbgroup_dbnodes_pkey') ) op.create_table('db_dblock', sa.Column('key', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('creation', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('timeout', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('owner', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('key', name=u'db_dblock_pkey') ) op.create_table('db_dbworkflowdata', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('data_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('value_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('json_value', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('aiida_obj_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['aiida_obj_id'], [u'db_dbnode.id'], name=u'db_dbworkflowdata_aiida_obj_id_fkey'), sa.ForeignKeyConstraint(['parent_id'], [u'db_dbworkflow.id'], name=u'db_dbworkflowdata_parent_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'db_dbworkflowdata_pkey'), sa.UniqueConstraint('parent_id', 'name', 'data_type', name=u'db_dbworkflowdata_parent_id_name_data_type_key') ) op.create_table('db_dblink', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('input_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('output_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['input_id'], [u'db_dbnode.id'], name=u'db_dblink_input_id_fkey', initially=u'DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['output_id'], [u'db_dbnode.id'], name=u'db_dblink_output_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dblink_pkey'), ) op.create_index('ix_db_dblink_label', 'db_dblink', ['label']) op.create_table('db_dbworkflowstep_calculations', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbworkflowstep_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbnode_id'], [u'db_dbnode.id'], name=u'db_dbworkflowstep_calculations_dbnode_id_fkey'), sa.ForeignKeyConstraint(['dbworkflowstep_id'], [u'db_dbworkflowstep.id'], name=u'db_dbworkflowstep_calculations_dbworkflowstep_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'db_dbworkflowstep_calculations_pkey'), sa.UniqueConstraint('dbworkflowstep_id', 'dbnode_id', name=u'db_dbworkflowstep_calculations_id_dbnode_id_key') ) op.create_table('db_dbpath', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('child_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('depth', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('entry_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('direct_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('exit_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['child_id'], [u'db_dbnode.id'], name=u'db_dbpath_child_id_fkey', initially=u'DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['parent_id'], [u'db_dbnode.id'], name=u'db_dbpath_parent_id_fkey', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbpath_pkey') ) op.create_table('db_dbcalcstate', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbnode_id'], [u'db_dbnode.id'], name=u'db_dbcalcstate_dbnode_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbcalcstate_pkey'), sa.UniqueConstraint('dbnode_id', 'state', name=u'db_dbcalcstate_dbnode_id_state_key') ) op.create_index('ix_db_dbcalcstate_state', 'db_dbcalcstate', ['state']) op.create_table('db_dbsetting', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('key', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('val', postgresql.JSONB(), autoincrement=False, nullable=True), sa.Column('description', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbsetting_pkey'), sa.UniqueConstraint('key', name=u'db_dbsetting_key_key') ) op.create_index('ix_db_dbsetting_key', 'db_dbsetting', ['key']) op.create_table('db_dbcomment', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbnode_id'], [u'db_dbnode.id'], name=u'db_dbcomment_dbnode_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.ForeignKeyConstraint(['user_id'], [u'db_dbuser.id'], name=u'db_dbcomment_user_id_fkey', ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True), sa.PrimaryKeyConstraint('id', name=u'db_dbcomment_pkey') ) op.create_table('db_dblog', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('loggername', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('levelname', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('objname', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('objpk', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('message', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('metadata', postgresql.JSONB(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name=u'db_dblog_pkey') ) op.create_index('ix_db_dblog_levelname', 'db_dblog', ['levelname']) op.create_index('ix_db_dblog_loggername', 'db_dblog', ['loggername']) op.create_index('ix_db_dblog_objname', 'db_dblog', ['objname']) op.create_index('ix_db_dblog_objpk', 'db_dblog', ['objpk']) op.create_table('db_dbworkflowstep_sub_workflows', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('dbworkflowstep_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('dbworkflow_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['dbworkflow_id'], [u'db_dbworkflow.id'], name=u'db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey'), sa.ForeignKeyConstraint(['dbworkflowstep_id'], [u'db_dbworkflowstep.id'], name=u'db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'db_dbworkflowstep_sub_workflows_pkey'), sa.UniqueConstraint('dbworkflowstep_id', 'dbworkflow_id', name=u'db_dbworkflowstep_sub_workflows_id_dbworkflow__key') ) # I get the session using the alembic connection # (Keep in mind that alembic uses the AiiDA SQLA # session) session = Session(bind=op.get_bind()) install_tc(session)
def run(self, *args): from aiida.common.setup import (create_base_dirs, create_configuration, set_default_profile, DEFAULT_UMASK) from aiida.backends.profile import BACKEND_SQLA, BACKEND_DJANGO from aiida.backends.utils import set_backend_type, get_backend_type from aiida.common.exceptions import InvalidOperation cmdline_args = list(args) only_user_config = False try: cmdline_args.remove('--only-config') only_user_config = True except ValueError: # Parameter not provided pass if cmdline_args: print >> sys.stderr, "Unknown parameters on the command line: " print >> sys.stderr, ", ".join(cmdline_args) sys.exit(1) # create the directories to store the configuration files create_base_dirs() # gprofile = 'default' if profile is None else profile gprofile = 'default' if settings_profile.AIIDADB_PROFILE is None \ else settings_profile.AIIDADB_PROFILE created_conf = None # ask and store the configuration of the DB try: created_conf = create_configuration(profile=gprofile) except ValueError as e: print >> sys.stderr, "Error during configuration: {}".format( e.message) sys.exit(1) # set default DB profiles set_default_profile('verdi', gprofile, force_rewrite=False) set_default_profile('daemon', gprofile, force_rewrite=False) if only_user_config: print( "Only user configuration requested, " "skipping the migrate command") else: print "Executing now a migrate command..." backend_choice = created_conf['AIIDADB_BACKEND'] if backend_choice == BACKEND_DJANGO: print("...for Django backend") # The correct profile is selected within load_dbenv. # Setting os.umask here since sqlite database gets created in # this step. old_umask = os.umask(DEFAULT_UMASK) # This check should be done more properly # try: # backend_type = get_backend_type() # except KeyError: # backend_type = None # # if backend_type is not None and backend_type != BACKEND_DJANGO: # raise InvalidOperation("An already existing database found" # "and a different than the selected" # "backend was used for its " # "management.") try: pass_to_django_manage([execname, 'migrate'], profile=gprofile) finally: os.umask(old_umask) set_backend_type(BACKEND_DJANGO) elif backend_choice == BACKEND_SQLA: print("...for SQLAlchemy backend") from aiida.backends.sqlalchemy.models.base import Base from aiida.backends.sqlalchemy.utils import (get_engine, install_tc) from aiida.common.setup import get_profile_config from aiida import is_dbenv_loaded, load_dbenv if not is_dbenv_loaded(): load_dbenv() # This check should be done more properly # try: # backend_type = get_backend_type() # except KeyError: # backend_type = None # # if backend_type is not None and backend_type != BACKEND_SQLA: # raise InvalidOperation("An already existing database found" # "and a different than the selected" # "backend was used for its " # "management.") # Those import are necessary for SQLAlchemy to correctly create # the needed database tables. from aiida.backends.sqlalchemy.models.authinfo import ( DbAuthInfo) from aiida.backends.sqlalchemy.models.comment import DbComment from aiida.backends.sqlalchemy.models.computer import ( DbComputer) from aiida.backends.sqlalchemy.models.group import ( DbGroup, table_groups_nodes) from aiida.backends.sqlalchemy.models.lock import DbLock from aiida.backends.sqlalchemy.models.log import DbLog from aiida.backends.sqlalchemy.models.node import (DbLink, DbNode, DbPath, DbCalcState) from aiida.backends.sqlalchemy.models.user import DbUser from aiida.backends.sqlalchemy.models.workflow import ( DbWorkflow, DbWorkflowData, DbWorkflowStep) from aiida.backends.sqlalchemy.models.settings import DbSetting connection = get_engine(get_profile_config(gprofile)) Base.metadata.create_all(connection) install_tc(connection) set_backend_type(BACKEND_SQLA) else: raise InvalidOperation("Not supported backend selected.") print "Database was created successfully" # I create here the default user print "Loading new environment..." if only_user_config: from aiida.backends.utils import load_dbenv, is_dbenv_loaded # db environment has not been loaded in this case if not is_dbenv_loaded(): load_dbenv() from aiida.common.setup import DEFAULT_AIIDA_USER from aiida.orm.user import User as AiiDAUser if not AiiDAUser.search_for_users(email=DEFAULT_AIIDA_USER): print "Installing default AiiDA user..." nuser = AiiDAUser(email=DEFAULT_AIIDA_USER) nuser.first_name = "AiiDA" nuser.last_name = "Daemon" nuser.is_staff = True nuser.is_active = True nuser.is_superuser = True nuser.force_save() from aiida.common.utils import get_configured_user_email email = get_configured_user_email() print "Starting user configuration for {}...".format(email) if email == DEFAULT_AIIDA_USER: print "You set up AiiDA using the default Daemon email ({}),".format( email) print "therefore no further user configuration will be asked." else: # Ask to configure the new user User().user_configure(email) print "Install finished."