def setUp(self): """ Reset the database before each test. """ # self.reset_database() self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) ### Insert some starting data in the database. categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) categ2 = model.AlgorithmCategory('two', rawinput=True) self.categ2 = dao.store_entity(categ2) algo = model.AlgorithmGroup("test_module1", "classname1", categ1.id) self.algo1 = dao.store_entity(algo) algo = model.AlgorithmGroup("test_module2", "classname2", categ2.id) dao.store_entity(algo) algo = model.AlgorithmGroup("tvb_test.core.services.flowservice_test", "ValidTestAdapter", categ2.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo) algo = model.AlgorithmGroup("test_module3", "classname3", categ1.id) dao.store_entity(algo) algo = model.Algorithm(self.algo1.id, 'id', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo)
def __init__(self): micro_postfix = "_%d" % int(time.time() * 1000000) # Here create all structures needed later for data types creation self.files_helper = FilesHelper() # First create user user = model.User("datatype_factory_user" + micro_postfix, "test_pass", "*****@*****.**" + micro_postfix, True, "user") self.user = dao.store_entity(user) # Now create a project project_service = ProjectService() data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[]) self.project = project_service.store_project(self.user, True, None, **data) # Create algorithm alg_category = model.AlgorithmCategory('one', True) dao.store_entity(alg_category) ad = model.Algorithm("test_module1", "classname1", alg_category.id) self.algorithm = dao.store_entity(ad) #Create an operation self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME, DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE} operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED) self.operation = dao.store_entity(operation)
def _create_algo_for_upload(): """ Creates a fake algorithm for an upload category. """ category = dao.store_entity( model.AlgorithmCategory("upload_category", rawinput=True)) algo_group = dao.store_entity( model.AlgorithmGroup("module", "classname", category.id)) return dao.store_entity(model.Algorithm(algo_group.id, "algo"))
def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) ad = model.Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id) self.algo_inst = dao.store_entity(ad)
def introspect(self, do_create): """ Introspect a given module to: - create tables for custom DataType; - populate adapter algorithms references. """ self.logger.debug("Introspection into module:" + self.module_name) module = __import__(self.module_name, globals(), locals(), ["__init__"]) try: path_adapters = module.ADAPTERS self.path_types = module.DATATYPES_PATH self.removers_path = module.REMOVERS_PATH self.path_portlets = getattr(module, 'PORTLETS_PATH', []) except Exception as excep: self.logger.warning("Module " + self.module_name + " is not fully introspect compatible!") self.logger.warning(excep.message) return if do_create: self.logger.debug("Found Datatypes_Path=" + str(self.path_types)) # DataTypes only need to be imported for adding to DB tables for path in self.path_types: self.__get_datatypes(path) session = SA_SESSIONMAKER() model.Base.metadata.create_all(bind=session.connection()) session.commit() session.close() self.logger.debug("Found Adapters_Dict=" + str(path_adapters)) for category_name in path_adapters: category_details = path_adapters[category_name] launchable = bool(category_details.get(LAUNCHABLE)) rawinput = bool(category_details.get(RAWINPUT)) display = bool(category_details.get(DISPLAYER)) order_nr = category_details.get(ORDER, 999) category_instance = dao.filter_category( category_name, rawinput, display, launchable, order_nr) if category_instance is not None: category_instance.last_introspection_check = datetime.datetime.now( ) category_instance.removed = False else: category_state = category_details.get(STATE, '') category_instance = model.AlgorithmCategory( category_name, launchable, rawinput, display, category_state, order_nr, datetime.datetime.now()) category_instance = dao.store_entity(category_instance) for actual_module in path_adapters[category_name]['modules']: self.__read_adapters(category_instance.id, actual_module) for path in self.path_portlets: self.__get_portlets(path) ### Register Remover instances for current introspected module removers.update_dictionary(self.get_removers_dict())
def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) algo = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo)
def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) algo = model.AlgorithmGroup("tvb_test.core.services.flowservice_test", "ValidTestAdapter", categ1.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo)
def setUp(self): """ Prepare some entities to work with during tests:""" self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) ### Insert some starting data in the database. categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) categ2 = model.AlgorithmCategory('two', rawinput=True) self.categ2 = dao.store_entity(categ2) group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id) self.algo_group1 = dao.store_entity(group1) group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id) self.algo_group2 = dao.store_entity(group2) group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id) self.algo_group3 = dao.store_entity(group3) group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id) self.algo_group_v = dao.store_entity(group_v) algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='') self.algorithm_v = dao.store_entity(algo_v) algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='') self.algorithm1 = dao.store_entity(algo1)
def test_get_linkable_projects(self): """ Test for retrieving the projects for a given user. """ initial_projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reseted!") test_proj = [] user1 = TestFactory.create_user("another_user") for i in range(4): test_proj.append( TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i))) project_storage = self.structure_helper.get_project_folder( test_proj[0]) result_meta = { DataTypeMetaData.KEY_OPERATION_TYPE: "Upload", DataTypeMetaData.KEY_AUTHOR: "John Doe", DataTypeMetaData.KEY_SUBJECT: "subj1", DataTypeMetaData.KEY_STATE: "test_state", DataTypeMetaData.KEY_NODE_TYPE: "test_data", DataTypeMetaData.KEY_DATE: "test_date", DataTypeMetaData.KEY_GID: generate_guid() } entity = dao.store_entity(model.AlgorithmCategory("category")) entity = dao.store_entity( model.AlgorithmGroup("module", "classname", entity.id)) entity = dao.store_entity(model.Algorithm(entity.id, "algo")) operation = model.Operation(self.test_user.id, test_proj[0].id, entity.id, "") operation = dao.store_entity(operation) project_storage = os.path.join(project_storage, str(operation.id)) os.makedirs(project_storage) entity = DataTypeMetaData(result_meta) datatype = dao.store_entity( model.DataType(module="test_data", subject="subj1", state="test_state", operation_id=operation.id)) linkable = self.project_service.get_linkable_projects_for_user( self.test_user.id, str(datatype.id))[0] self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!") proj_names = [project.name for project in linkable] self.assertTrue(test_proj[1].name in proj_names) self.assertTrue(test_proj[2].name in proj_names) self.assertFalse(test_proj[3].name in proj_names)
def __init__(self): now = datetime.now() micro_postfix = "_%d" % now.microsecond # Here create all structures needed later for data types creation self.files_helper = FilesHelper() # First create user user = model.User("datatype_factory_user" + micro_postfix, "test_pass", "*****@*****.**" + micro_postfix, True, "user") self.user = dao.store_entity(user) # Now create a project project_service = ProjectService() data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[]) self.project = project_service.store_project(self.user, True, None, **data) # Create algorithm alg_category = model.AlgorithmCategory('one', True) dao.store_entity(alg_category) alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id) dao.store_entity(alg_group) algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='') self.algorithm = dao.store_entity(algorithm) #Create an operation self.meta = { DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME, DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE } operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status="FINISHED", method_name=ABCAdapter.LAUNCH_METHOD) self.operation = dao.store_entity(operation)
def test_groups_for_categories(self): """ Test getting algorithms for specific categories. """ category1 = self.flow_service.get_groups_for_categories([self.categ1]) category2 = self.flow_service.get_groups_for_categories([self.categ2]) dummy = model.AlgorithmCategory('dummy', rawinput=True) dummy.id = 999 unexisting_cat = self.flow_service.get_groups_for_categories([dummy]) self.assertEqual(len(category1), 2) for algorithm in category1: if algorithm.module not in ["test_module1", "test_module3"]: self.fail("Some invalid data retrieved") for algorithm in category2: if algorithm.module not in ["test_module2", "tvb_test.core.services.flowservice_test"]: self.fail("Some invalid data retrieved") self.assertEqual(len(category2), 2) self.assertEqual(len(unexisting_cat), 0)
display = (DISPLAYER in category_details and category_details[DISPLAYER]) if ORDER in category_details: order_nr = category_details[ORDER] else: order_nr = 999 category_instance = dao.filter_category( category_name, rawinput, display, launchable, order_nr) if category_instance is not None: category_instance.last_introspection_check = datetime.datetime.now( ) else: category_state = category_details[ STATE] if STATE in category_details else '' category_instance = model.AlgorithmCategory( category_name, launchable, rawinput, display, category_state, order_nr, datetime.datetime.now()) category_instance = dao.store_entity(category_instance) for actual_module in path_adapters[category_name]['modules']: self.__populate_algorithms(category_instance.id, actual_module) for path in self.path_portlets: self.__get_portlets(path) ### Register Remover instances for current introspected module removers.update_dictionary(self.get_removers_dict()) def __get_portlets(self, path_portlets): """ Given a path in the form of a python package e.g.: "tvb.portlets', import the package, get it's folder and look for all the XML files defined