def setUp(self): """ Reset the database before each test. """ # self.reset_database() self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) ### Insert some starting data in the database. categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) categ2 = model.AlgorithmCategory('two', rawinput=True) self.categ2 = dao.store_entity(categ2) algo = model.AlgorithmGroup("test_module1", "classname1", categ1.id) self.algo1 = dao.store_entity(algo) algo = model.AlgorithmGroup("test_module2", "classname2", categ2.id) dao.store_entity(algo) algo = model.AlgorithmGroup("tvb_test.core.services.flowservice_test", "ValidTestAdapter", categ2.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo) algo = model.AlgorithmGroup("test_module3", "classname3", categ1.id) dao.store_entity(algo) algo = model.Algorithm(self.algo1.id, 'id', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo)
def __init__(self): micro_postfix = "_%d" % int(time.time() * 1000000) # Here create all structures needed later for data types creation self.files_helper = FilesHelper() # First create user user = model.User("datatype_factory_user" + micro_postfix, "test_pass", "*****@*****.**" + micro_postfix, True, "user") self.user = dao.store_entity(user) # Now create a project project_service = ProjectService() data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[]) self.project = project_service.store_project(self.user, True, None, **data) # Create algorithm alg_category = model.AlgorithmCategory('one', True) dao.store_entity(alg_category) ad = model.Algorithm("test_module1", "classname1", alg_category.id) self.algorithm = dao.store_entity(ad) #Create an operation self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME, DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE} operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED) self.operation = dao.store_entity(operation)
def _create_algo_for_upload(): """ Creates a fake algorithm for an upload category. """ category = dao.store_entity( model.AlgorithmCategory("upload_category", rawinput=True)) algo_group = dao.store_entity( model.AlgorithmGroup("module", "classname", category.id)) return dao.store_entity(model.Algorithm(algo_group.id, "algo"))
def __read_adapters(self, category_key, module_name): """ Add or update lines into STORED_ADAPTERS table: One line for each custom class found which is extending from ABCAdapter. """ for adapters_file in Introspector.__read_module_variable(module_name): try: adapters_module = __import__(module_name + "." + adapters_file, globals(), locals(), [adapters_file]) for ad_class in dir(adapters_module): ad_class = adapters_module.__dict__[ad_class] if Introspector._is_concrete_subclass(ad_class, ABCAdapter): if ad_class.can_be_active(): stored_adapter = model.Algorithm(ad_class.__module__, ad_class.__name__, category_key, ad_class.get_group_name(), ad_class.get_group_description(), ad_class.get_ui_name(), ad_class.get_ui_description(), ad_class.get_ui_subsection(), datetime.datetime.now()) adapter_inst = ad_class() in_params = adapter_inst.get_input_tree() req_type, param_name, flt = self.__get_required_input(in_params) stored_adapter.required_datatype = req_type stored_adapter.parameter_name = param_name stored_adapter.datatype_filter = flt stored_adapter.outputlist = str(adapter_inst.get_output()) inst_from_db = dao.get_algorithm_by_module(ad_class.__module__, ad_class.__name__) if inst_from_db is not None: stored_adapter.id = inst_from_db.id stored_adapter = dao.store_entity(stored_adapter, inst_from_db is not None) ad_class.stored_adapter = stored_adapter else: self.logger.warning("Skipped Adapter(probably because MATLAB not found):" + str(ad_class)) except Exception: self.logger.exception("Could not introspect Adapters file:" + adapters_file)
def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) ad = model.Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id) self.algo_inst = dao.store_entity(ad)
def transactional_setup_method(self): """ Prepare some entities to work with during tests:""" self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) category = dao.get_uploader_categories()[0] self.algorithm = dao.store_entity(model.Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, category.id))
def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) algo = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo)
def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) algo = model.AlgorithmGroup("tvb_test.core.services.flowservice_test", "ValidTestAdapter", categ1.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo)
def ensure_db(self): """ Ensure algorithm exists in DB and add it if not """ cat = dao.get_uploader_categories()[0] cls = self.__class__ cmd, cnm = cls.__module__, cls.__name__ gp = dao.get_algorithm_by_module(cmd, cnm) if gp is None: gp = model.Algorithm(cmd, cnm, cat.id) gp = dao.store_entity(gp) self.stored_adapter = gp
def ensure_db(self): """ Ensure algorithm exists in DB and add it if not """ cat = dao.get_uploader_categories()[0] cls = self.__class__ cmd, cnm = cls.__module__, cls.__name__ gp = dao.find_group(cmd, cnm) if gp is None: gp = model.AlgorithmGroup(cmd, cnm, cat.id) gp = dao.store_entity(gp) dao.store_entity(model.Algorithm(gp.id, cnm, cnm)) self.algorithm_group = gp
def setUp(self): """ Prepare some entities to work with during tests:""" self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) ### Insert some starting data in the database. categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) categ2 = model.AlgorithmCategory('two', rawinput=True) self.categ2 = dao.store_entity(categ2) group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id) self.algo_group1 = dao.store_entity(group1) group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id) self.algo_group2 = dao.store_entity(group2) group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id) self.algo_group3 = dao.store_entity(group3) group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id) self.algo_group_v = dao.store_entity(group_v) algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='') self.algorithm_v = dao.store_entity(algo_v) algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='') self.algorithm1 = dao.store_entity(algo1)
def test_get_linkable_projects(self): """ Test for retrieving the projects for a given user. """ initial_projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reseted!") test_proj = [] user1 = TestFactory.create_user("another_user") for i in range(4): test_proj.append( TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i))) project_storage = self.structure_helper.get_project_folder( test_proj[0]) result_meta = { DataTypeMetaData.KEY_OPERATION_TYPE: "Upload", DataTypeMetaData.KEY_AUTHOR: "John Doe", DataTypeMetaData.KEY_SUBJECT: "subj1", DataTypeMetaData.KEY_STATE: "test_state", DataTypeMetaData.KEY_NODE_TYPE: "test_data", DataTypeMetaData.KEY_DATE: "test_date", DataTypeMetaData.KEY_GID: generate_guid() } entity = dao.store_entity(model.AlgorithmCategory("category")) entity = dao.store_entity( model.AlgorithmGroup("module", "classname", entity.id)) entity = dao.store_entity(model.Algorithm(entity.id, "algo")) operation = model.Operation(self.test_user.id, test_proj[0].id, entity.id, "") operation = dao.store_entity(operation) project_storage = os.path.join(project_storage, str(operation.id)) os.makedirs(project_storage) entity = DataTypeMetaData(result_meta) datatype = dao.store_entity( model.DataType(module="test_data", subject="subj1", state="test_state", operation_id=operation.id)) linkable = self.project_service.get_linkable_projects_for_user( self.test_user.id, str(datatype.id))[0] self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!") proj_names = [project.name for project in linkable] self.assertTrue(test_proj[1].name in proj_names) self.assertTrue(test_proj[2].name in proj_names) self.assertFalse(test_proj[3].name in proj_names)
def __init__(self): now = datetime.now() micro_postfix = "_%d" % now.microsecond # Here create all structures needed later for data types creation self.files_helper = FilesHelper() # First create user user = model.User("datatype_factory_user" + micro_postfix, "test_pass", "*****@*****.**" + micro_postfix, True, "user") self.user = dao.store_entity(user) # Now create a project project_service = ProjectService() data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[]) self.project = project_service.store_project(self.user, True, None, **data) # Create algorithm alg_category = model.AlgorithmCategory('one', True) dao.store_entity(alg_category) alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id) dao.store_entity(alg_group) algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='') self.algorithm = dao.store_entity(algorithm) #Create an operation self.meta = { DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME, DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE } operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status="FINISHED", method_name=ABCAdapter.LAUNCH_METHOD) self.operation = dao.store_entity(operation)
def __store_algorithms_for_group(self, group, adapter, has_sub_algorithms): """ For the group passed as parameter do the following: If it has sub-algorithms, get the list of them, add sub-algorithm references into the DB with all the required fields. If it is not a GroupAdapter add a single algorithm into the DB with an empty identifier. """ if has_sub_algorithms: algos = adapter.get_algorithms_dictionary() for algo_ident in algos: in_params = adapter.get_input_for_algorithm(algo_ident) req_type, param_name, flt = self.__get_required_input( in_params) outputs = adapter.get_output_for_algorithm(algo_ident) algo_description = "" if self.__is_matlab_parent( inspect.getclasstree([adapter.__class__])): root_folder = adapter.get_matlab_file_root() file_name = adapter.get_matlab_file(algo_ident) if file_name: algo_description = self.extract_matlab_doc_string( os.path.join(root_folder, file_name)) algorithm = dao.get_algorithm_by_group(group.id, algo_ident) if algorithm is None: #Create new algorithm = model.Algorithm(group.id, algo_ident, algos[algo_ident][ATT_NAME], req_type, param_name, str(outputs), flt, description=algo_description) else: #Edit previous algorithm.name = algos[algo_ident][ATT_NAME] algorithm.required_datatype = req_type algorithm.parameter_name = param_name algorithm.outputlist = str(outputs) algorithm.datatype_filter = flt algorithm.description = algo_description dao.store_entity(algorithm) else: input_tree = adapter.get_input_tree() req_type, param_name, flt = self.__get_required_input(input_tree) outputs = str(adapter.get_output()) algorithm = dao.get_algorithm_by_group(group.id, None) if hasattr(adapter, '_ui_name'): algo_name = getattr(adapter, '_ui_name') else: algo_name = adapter.__class__.__name__ if algorithm is None: #Create new algorithm = model.Algorithm(group.id, None, algo_name, req_type, param_name, outputs, flt) else: #Edit previous algorithm.name = algo_name algorithm.required_datatype = req_type algorithm.parameter_name = param_name algorithm.outputlist = str(outputs) algorithm.datatype_filter = flt dao.store_entity(algorithm)