def initialize_startup(): """ Force DB tables create, in case no data is already found.""" is_db_empty = False session = SA_SESSIONMAKER() inspector = reflection.Inspector.from_engine(session.connection()) if len(inspector.get_table_names()) < 1: LOGGER.debug("Database access exception, maybe DB is empty") is_db_empty = True session.close() if is_db_empty: LOGGER.info("Initializing Database") if os.path.exists(cfg.DB_VERSIONING_REPO): shutil.rmtree(cfg.DB_VERSIONING_REPO) migratesqlapi.create(cfg.DB_VERSIONING_REPO, os.path.split(cfg.DB_VERSIONING_REPO)[1]) _update_sql_scripts() migratesqlapi.version_control(cfg.DB_URL, cfg.DB_VERSIONING_REPO, version=cfg.DB_CURRENT_VERSION) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() LOGGER.info("Database Default Tables created successfully!") else: _update_sql_scripts() migratesqlapi.upgrade(cfg.DB_URL, cfg.DB_VERSIONING_REPO, version=cfg.DB_CURRENT_VERSION) LOGGER.info("Database already has some data, will not be re-created!") return is_db_empty
def initialize_startup(): """ Force DB tables create, in case no data is already found.""" is_db_empty = False session = SA_SESSIONMAKER() inspector = reflection.Inspector.from_engine(session.connection()) if len(inspector.get_table_names()) < 1: LOGGER.debug("Database access exception, maybe DB is empty") is_db_empty = True session.close() versions_repo = TvbProfile.current.db.DB_VERSIONING_REPO if is_db_empty: LOGGER.info("Initializing Database") if os.path.exists(versions_repo): shutil.rmtree(versions_repo) migratesqlapi.create(versions_repo, os.path.split(versions_repo)[1]) _update_sql_scripts() migratesqlapi.version_control( TvbProfile.current.db.DB_URL, versions_repo, version=TvbProfile.current.version.DB_STRUCTURE_VERSION) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() LOGGER.info("Database Default Tables created successfully!") else: _update_sql_scripts() migratesqlapi.upgrade( TvbProfile.current.db.DB_URL, versions_repo, version=TvbProfile.current.version.DB_STRUCTURE_VERSION) LOGGER.info("Database already has some data, will not be re-created!") return is_db_empty
def initialize_startup(): """ Force DB tables create, in case no data is already found.""" is_db_empty = False session = SA_SESSIONMAKER() inspector = reflection.Inspector.from_engine(session.connection()) if len(inspector.get_table_names()) < 1: LOGGER.debug("Database access exception, maybe DB is empty") is_db_empty = True session.close() versions_repo = TvbProfile.current.db.DB_VERSIONING_REPO alembic_cfg = Config() alembic_cfg.set_main_option('script_location', versions_repo) alembic_cfg.set_main_option('sqlalchemy.url', TvbProfile.current.db.DB_URL) if is_db_empty: LOGGER.info("Initializing Database") if os.path.exists(versions_repo): shutil.rmtree(versions_repo) _update_sql_scripts() session = SA_SESSIONMAKER() Base.metadata.create_all(bind=session.connection()) session.commit() session.close() command.stamp(alembic_cfg, 'head') LOGGER.info("Database Default Tables created successfully!") else: _update_sql_scripts() with session.connection() as connection: alembic_cfg.attributes['connection'] = connection command.upgrade(alembic_cfg, TvbProfile.current.version.DB_STRUCTURE_VERSION) LOGGER.info("Database already has some data, will not be re-created!") return is_db_empty
def test_db_mapping(self): """ Test DB storage/retrieval of a simple traited attribute""" session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() # test data dikt = {'a': 6} tup = ('5', 9.348) dtype = numpy.dtype(float) json = {'a': 'asdf', 'b': {'23': '687568'}} test_inst = MappedTestClass() test_inst.dikt = copy.deepcopy(dikt) test_inst.tup = copy.deepcopy(tup) test_inst.dtype = copy.deepcopy(dtype) test_inst.json = copy.deepcopy(json) test_inst.set_operation_id(self.operation.id) test_inst = dao.store_entity(test_inst) test_inst = dao.get_generic_entity(MappedTestClass, test_inst.gid, 'gid')[0] self.assertEqual(test_inst.dikt, dikt) self.assertEqual(test_inst.tup, tup) self.assertEqual(test_inst.dtype, dtype) self.assertEqual(test_inst.json, json)
def reset_database(): """ Remove all tables in DB. """ LOGGER.warning("Your Database tables will be deleted.") try: session = SA_SESSIONMAKER() LOGGER.debug("Delete connection initiated.") inspector = reflection.Inspector.from_engine(session.connection()) for table in inspector.get_table_names(): try: LOGGER.debug("Removing:" + table) session.execute(text("DROP TABLE \"%s\" CASCADE" % table)) except Exception: try: session.execute(text("DROP TABLE %s" % table)) except Exception as excep1: LOGGER.error("Could no drop table %s", table) LOGGER.exception(excep1) session.commit() LOGGER.info("Database was cleanup!") except Exception as excep: LOGGER.warning(excep) finally: session.close()
def reset_database(): """ Remove all tables in DB. """ LOGGER.warning("Your Database tables will be deleted.") try: session = SA_SESSIONMAKER() LOGGER.debug("Delete connection initiated.") inspector = reflection.Inspector.from_engine(session.connection()) for table in inspector.get_table_names(): try: LOGGER.debug("Removing:" + table) session.execute(text("DROP TABLE \"%s\" CASCADE" % table)) except Exception: try: session.execute(text("DROP TABLE %s" % table)) except Exception as excep1: LOGGER.error("Could no drop table %s", table) LOGGER.exception(excep1) session.commit() LOGGER.info("Database was cleanup!") except Exception as excep: LOGGER.warning(excep) finally: session.close()
def test_db_mapping(self): """ Test DB storage/retrieval of a simple traited attribute""" session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() # test data dikt = {'a': 6} tup = ('5', 9.348) dtype = numpy.dtype(float) json = {'a': 'asdf', 'b': {'23': '687568'}} test_inst = MappedTestClass() test_inst.dikt = copy.deepcopy(dikt) test_inst.tup = copy.deepcopy(tup) test_inst.dtype = copy.deepcopy(dtype) test_inst.json = copy.deepcopy(json) test_inst.set_operation_id(self.operation.id) test_inst = dao.store_entity(test_inst) test_inst = dao.get_generic_entity(MappedTestClass, test_inst.gid, 'gid')[0] self.assertEqual(test_inst.dikt, dikt) self.assertEqual(test_inst.tup, tup) self.assertEqual(test_inst.dtype, dtype) self.assertEqual(test_inst.json, json)
def introspect(self, do_create): """ Introspect a given module to: - create tables for custom DataType; - populate adapter algorithms references. """ self.logger.debug("Introspection into module:" + self.module_name) module = __import__(self.module_name, globals(), locals(), ["__init__"]) try: path_adapters = module.ADAPTERS self.path_types = module.DATATYPES_PATH self.removers_path = module.REMOVERS_PATH self.path_portlets = getattr(module, 'PORTLETS_PATH', []) except Exception as excep: self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!") self.logger.warning(excep.message) return if do_create: self.logger.debug("Found Datatypes_Path=" + str(self.path_types)) # DataTypes only need to be imported for adding to DB tables for path in self.path_types: self.__get_datatypes(path) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() self.logger.debug("Found Adapters_Dict=" + str(path_adapters)) for category_name in path_adapters: category_details = path_adapters[category_name] launchable = bool(category_details.get(LAUNCHABLE)) rawinput = bool(category_details.get(RAWINPUT)) display = bool(category_details.get(DISPLAYER)) order_nr = category_details.get(ORDER, 999) category_instance = dao.filter_category( category_name, rawinput, display, launchable, order_nr) if category_instance is not None: category_instance.last_introspection_check = datetime.datetime.now( ) category_instance.removed = False else: category_state = category_details.get(STATE, '') category_instance = model.AlgorithmCategory( category_name, launchable, rawinput, display, category_state, order_nr, datetime.datetime.now()) category_instance = dao.store_entity(category_instance) for actual_module in path_adapters[category_name]['modules']: self.__read_adapters(category_instance.id, actual_module) for path in self.path_portlets: self.__get_portlets(path) ### Register Remover instances for current introspected module removers.update_dictionary(self.get_removers_dict())
def introspect(self, do_create): """ Introspect a given module to: - create tables for custom DataType; - populate adapter algorithms references. """ self.logger.debug("Introspection into module:" + self.module_name) module = __import__(self.module_name, globals(), locals(), ["__init__"]) try: path_adapters = module.ADAPTERS self.path_types = module.DATATYPES_PATH self.removers_path = module.REMOVERS_PATH self.path_portlets = getattr(module, 'PORTLETS_PATH', []) except Exception as excep: self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!") self.logger.warning(excep.message) return if do_create: self.logger.debug("Found Datatypes_Path=" + str(self.path_types)) # DataTypes only need to be imported for adding to DB tables for path in self.path_types: self.__get_datatypes(path) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() self.logger.debug("Found Adapters_Dict=" + str(path_adapters)) for category_name in path_adapters: category_details = path_adapters[category_name] launchable = bool(category_details.get(LAUNCHABLE)) rawinput = bool(category_details.get(RAWINPUT)) display = bool(category_details.get(DISPLAYER)) order_nr = category_details.get(ORDER, 999) category_instance = dao.filter_category(category_name, rawinput, display, launchable, order_nr) if category_instance is not None: category_instance.last_introspection_check = datetime.datetime.now() category_instance.removed = False else: category_state = category_details.get(STATE, '') category_instance = model.AlgorithmCategory(category_name, launchable, rawinput, display, category_state, order_nr, datetime.datetime.now()) category_instance = dao.store_entity(category_instance) for actual_module in path_adapters[category_name]['modules']: self.__read_adapters(category_instance.id, actual_module) for path in self.path_portlets: self.__get_portlets(path) ### Register Remover instances for current introspected module removers.update_dictionary(self.get_removers_dict())
def _ensure_datatype_tables_are_created(): session = SA_SESSIONMAKER() Base.metadata.create_all(bind=session.connection()) session.commit() session.close()
else: self.path_portlets = [] except Exception, excep: self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!") self.logger.warning(excep.message) return if do_create: self.logger.debug("Found Datatypes_Path=" + str(self.path_types)) # DataTypes only need to be imported for adding to DB tables for path in self.path_types: self.__get_datatypes(path) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() self.logger.debug("Found Adapters_Dict=" + str(path_adapters)) for category_name in path_adapters: category_details = path_adapters[category_name] launchable = (LAUNCHABLE in category_details and category_details[LAUNCHABLE]) rawinput = (RAWINPUT in category_details and category_details[RAWINPUT]) display = (DISPLAYER in category_details and category_details[DISPLAYER]) if ORDER in category_details: order_nr = category_details[ORDER] else:
self.path_types = module.DATATYPES_PATH self.removers_path = module.REMOVERS_PATH self.path_portlets = getattr(module, 'PORTLETS_PATH', []) except Exception, excep: self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!") self.logger.warning(excep.message) return if do_create: self.logger.debug("Found Datatypes_Path=" + str(self.path_types)) # DataTypes only need to be imported for adding to DB tables for path in self.path_types: self.__get_datatypes(path) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() self.logger.debug("Found Adapters_Dict=" + str(path_adapters)) for category_name in path_adapters: category_details = path_adapters[category_name] launchable = bool(category_details.get(LAUNCHABLE)) rawinput = bool(category_details.get(RAWINPUT)) display = bool(category_details.get(DISPLAYER)) order_nr = category_details.get(ORDER, 999) category_instance = dao.filter_category(category_name, rawinput, display, launchable, order_nr) if category_instance is not None: category_instance.last_introspection_check = datetime.datetime.now() category_instance.removed = False else: