def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value): """ Find all DataTypes (including the linked ones and the groups) relevant for the current project. In case of a problem, will return an empty list. """ metadata_list = [] dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value) for dt in dt_list: # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects data = {} is_group = False group_op = None dt_entity = dao.get_datatype_by_gid(dt.gid) if dt_entity is None: self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt)) continue # Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None: is_group = True group_op = dt.parent_operation.operation_group # All these fields are necessary here for dynamic Tree levels. data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id data[DataTypeMetaData.KEY_GID] = dt.gid data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type data[DataTypeMetaData.KEY_STATE] = dt.state data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject) data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else '' data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else '' data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else '' data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else '' data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else '' # Operation related fields: operation_name = CommonDetails.compute_operation_name( dt.parent_operation.algorithm.algorithm_category.displayname, dt.parent_operation.algorithm.displayname) data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None completion_date = dt.parent_operation.completion_date string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else "" string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else "" data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else '' data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-' metadata_list.append(DataTypeMetaData(data, dt.invalid)) return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)
def test_get_linkable_projects(self): """ Test for retrieving the projects for a given user. """ initial_projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reseted!") test_proj = [] user1 = TestFactory.create_user("another_user") for i in range(4): test_proj.append( TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i))) project_storage = self.structure_helper.get_project_folder( test_proj[0]) result_meta = { DataTypeMetaData.KEY_OPERATION_TYPE: "Upload", DataTypeMetaData.KEY_AUTHOR: "John Doe", DataTypeMetaData.KEY_SUBJECT: "subj1", DataTypeMetaData.KEY_STATE: "test_state", DataTypeMetaData.KEY_NODE_TYPE: "test_data", DataTypeMetaData.KEY_DATE: "test_date", DataTypeMetaData.KEY_GID: generate_guid() } entity = dao.store_entity(model.AlgorithmCategory("category")) entity = dao.store_entity( model.AlgorithmGroup("module", "classname", entity.id)) entity = dao.store_entity(model.Algorithm(entity.id, "algo")) operation = model.Operation(self.test_user.id, test_proj[0].id, entity.id, "") operation = dao.store_entity(operation) project_storage = os.path.join(project_storage, str(operation.id)) os.makedirs(project_storage) entity = DataTypeMetaData(result_meta) datatype = dao.store_entity( model.DataType(module="test_data", subject="subj1", state="test_state", operation_id=operation.id)) linkable = self.project_service.get_linkable_projects_for_user( self.test_user.id, str(datatype.id))[0] self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!") proj_names = [project.name for project in linkable] self.assertTrue(test_proj[1].name in proj_names) self.assertTrue(test_proj[2].name in proj_names) self.assertFalse(test_proj[3].name in proj_names)
def load_datatype_from_file(self, storage_folder, file_name, op_id, datatype_group=None, move=True): """ Creates an instance of datatype from storage / H5 file :returns: datatype """ self.logger.debug("Loading datatType from file: %s" % file_name) storage_manager = HDF5StorageManager(storage_folder, file_name) meta_dictionary = storage_manager.get_metadata() meta_structure = DataTypeMetaData(meta_dictionary) # Now try to determine class and instantiate it class_name = meta_structure[DataTypeMetaData.KEY_CLASS_NAME] class_module = meta_structure[DataTypeMetaData.KEY_MODULE] datatype = __import__(class_module, globals(), locals(), [class_name]) datatype = getattr(datatype, class_name) type_instance = manager_of_class(datatype).new_instance() # Now we fill data into instance type_instance.type = str(type_instance.__class__.__name__) type_instance.module = str(type_instance.__module__) # Fill instance with meta data type_instance.load_from_metadata(meta_dictionary) #Add all the required attributes if datatype_group is not None: type_instance.fk_datatype_group = datatype_group.id type_instance.set_operation_id(op_id) # Now move storage file into correct folder if necessary current_file = os.path.join(storage_folder, file_name) new_file = type_instance.get_storage_file_path() if new_file != current_file and move: shutil.move(current_file, new_file) return type_instance
def get_filterable_meta(): """ Contains all the attributes by which the user can structure the tree of DataTypes """ return DataTypeMetaData.get_filterable_meta()
def get_filterable_meta(): """ Contains all the attributes by which the user can structure the tree of DataTypes """ return DataTypeMetaData.get_filterable_meta()